text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def new_page(self, page_number, new_chapter, **kwargs): """Called by :meth:`render` with the :class:`Chain`s that need more :class:`Container`s. This method should create a new :class:`Page` which contains a container associated with `chain`.""" right_template = self.document.get_page_template(self, 'right') left_template = self.document.get_page_template(self, 'left') page_template = right_template if page_number % 2 else left_template return page_template.page(self, page_number, self.chain, new_chapter, **kwargs)
[ "def", "new_page", "(", "self", ",", "page_number", ",", "new_chapter", ",", "*", "*", "kwargs", ")", ":", "right_template", "=", "self", ".", "document", ".", "get_page_template", "(", "self", ",", "'right'", ")", "left_template", "=", "self", ".", "document", ".", "get_page_template", "(", "self", ",", "'left'", ")", "page_template", "=", "right_template", "if", "page_number", "%", "2", "else", "left_template", "return", "page_template", ".", "page", "(", "self", ",", "page_number", ",", "self", ".", "chain", ",", "new_chapter", ",", "*", "*", "kwargs", ")" ]
66.888889
21.222222
def _load_yml_config(self, config_file): """ loads a yaml str, creates a few constructs for pyaml, serializes and normalized the config data. Then assigns the config data to self._data. :param config_file: A :string: loaded from a yaml file. """ if not isinstance(config_file, six.string_types): raise TypeError('config_file must be a str.') try: def construct_yaml_int(self, node): obj = SafeConstructor.construct_yaml_int(self, node) data = ConfigInt( obj, node.start_mark, node.end_mark ) return data def construct_yaml_float(self, node): obj, = SafeConstructor.construct_yaml_float(self, node) data = ConfigFloat( obj, node.start_mark, node.end_mark ) return data def construct_yaml_str(self, node): # Override the default string handling function # to always return unicode objects obj = SafeConstructor.construct_scalar(self, node) assert isinstance(obj, six.string_types) data = ConfigUnicode( obj, node.start_mark, node.end_mark ) return data def construct_yaml_mapping(self, node): obj, = SafeConstructor.construct_yaml_map(self, node) data = ConfigDict( obj, node.start_mark, node.end_mark ) return data def construct_yaml_seq(self, node): obj, = SafeConstructor.construct_yaml_seq(self, node) data = ConfigSeq( obj, node.start_mark, node.end_mark ) return data # SafeConstructor.add_constructor(u'tag:yaml.org,2002:bool', construct_yaml_bool) SafeConstructor.add_constructor(u'tag:yaml.org,2002:float', construct_yaml_float) SafeConstructor.add_constructor(u'tag:yaml.org,2002:int', construct_yaml_int) SafeConstructor.add_constructor(u'tag:yaml.org,2002:map', construct_yaml_mapping) SafeConstructor.add_constructor(u'tag:yaml.org,2002:seq', construct_yaml_seq) SafeConstructor.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) data = SafeLoader(config_file).get_data() if data is None: raise AttributeError('The configuration file needs to have data in it.') self._data = normalize_keys(data, snake_case=False) except YAMLError as e: if hasattr(e, 'problem_mark'): mark = e.problem_mark raise SyntaxError( "There is a syntax error in your freight-forwarder config file line: {0} column: {1}".format( mark.line + 1, mark.column + 1 ) ) else: raise SyntaxError("There is a syntax error in your freight-forwarder config.")
[ "def", "_load_yml_config", "(", "self", ",", "config_file", ")", ":", "if", "not", "isinstance", "(", "config_file", ",", "six", ".", "string_types", ")", ":", "raise", "TypeError", "(", "'config_file must be a str.'", ")", "try", ":", "def", "construct_yaml_int", "(", "self", ",", "node", ")", ":", "obj", "=", "SafeConstructor", ".", "construct_yaml_int", "(", "self", ",", "node", ")", "data", "=", "ConfigInt", "(", "obj", ",", "node", ".", "start_mark", ",", "node", ".", "end_mark", ")", "return", "data", "def", "construct_yaml_float", "(", "self", ",", "node", ")", ":", "obj", ",", "=", "SafeConstructor", ".", "construct_yaml_float", "(", "self", ",", "node", ")", "data", "=", "ConfigFloat", "(", "obj", ",", "node", ".", "start_mark", ",", "node", ".", "end_mark", ")", "return", "data", "def", "construct_yaml_str", "(", "self", ",", "node", ")", ":", "# Override the default string handling function", "# to always return unicode objects", "obj", "=", "SafeConstructor", ".", "construct_scalar", "(", "self", ",", "node", ")", "assert", "isinstance", "(", "obj", ",", "six", ".", "string_types", ")", "data", "=", "ConfigUnicode", "(", "obj", ",", "node", ".", "start_mark", ",", "node", ".", "end_mark", ")", "return", "data", "def", "construct_yaml_mapping", "(", "self", ",", "node", ")", ":", "obj", ",", "=", "SafeConstructor", ".", "construct_yaml_map", "(", "self", ",", "node", ")", "data", "=", "ConfigDict", "(", "obj", ",", "node", ".", "start_mark", ",", "node", ".", "end_mark", ")", "return", "data", "def", "construct_yaml_seq", "(", "self", ",", "node", ")", ":", "obj", ",", "=", "SafeConstructor", ".", "construct_yaml_seq", "(", "self", ",", "node", ")", "data", "=", "ConfigSeq", "(", "obj", ",", "node", ".", "start_mark", ",", "node", ".", "end_mark", ")", "return", "data", "# SafeConstructor.add_constructor(u'tag:yaml.org,2002:bool', construct_yaml_bool)", "SafeConstructor", ".", "add_constructor", "(", "u'tag:yaml.org,2002:float'", ",", "construct_yaml_float", ")", "SafeConstructor", ".", "add_constructor", "(", "u'tag:yaml.org,2002:int'", ",", "construct_yaml_int", ")", "SafeConstructor", ".", "add_constructor", "(", "u'tag:yaml.org,2002:map'", ",", "construct_yaml_mapping", ")", "SafeConstructor", ".", "add_constructor", "(", "u'tag:yaml.org,2002:seq'", ",", "construct_yaml_seq", ")", "SafeConstructor", ".", "add_constructor", "(", "u'tag:yaml.org,2002:str'", ",", "construct_yaml_str", ")", "data", "=", "SafeLoader", "(", "config_file", ")", ".", "get_data", "(", ")", "if", "data", "is", "None", ":", "raise", "AttributeError", "(", "'The configuration file needs to have data in it.'", ")", "self", ".", "_data", "=", "normalize_keys", "(", "data", ",", "snake_case", "=", "False", ")", "except", "YAMLError", "as", "e", ":", "if", "hasattr", "(", "e", ",", "'problem_mark'", ")", ":", "mark", "=", "e", ".", "problem_mark", "raise", "SyntaxError", "(", "\"There is a syntax error in your freight-forwarder config file line: {0} column: {1}\"", ".", "format", "(", "mark", ".", "line", "+", "1", ",", "mark", ".", "column", "+", "1", ")", ")", "else", ":", "raise", "SyntaxError", "(", "\"There is a syntax error in your freight-forwarder config.\"", ")" ]
37.609195
21.701149
def _get_default_annual_spacing(nyears): """ Returns a default spacing between consecutive ticks for annual data. """ if nyears < 11: (min_spacing, maj_spacing) = (1, 1) elif nyears < 20: (min_spacing, maj_spacing) = (1, 2) elif nyears < 50: (min_spacing, maj_spacing) = (1, 5) elif nyears < 100: (min_spacing, maj_spacing) = (5, 10) elif nyears < 200: (min_spacing, maj_spacing) = (5, 25) elif nyears < 600: (min_spacing, maj_spacing) = (10, 50) else: factor = nyears // 1000 + 1 (min_spacing, maj_spacing) = (factor * 20, factor * 100) return (min_spacing, maj_spacing)
[ "def", "_get_default_annual_spacing", "(", "nyears", ")", ":", "if", "nyears", "<", "11", ":", "(", "min_spacing", ",", "maj_spacing", ")", "=", "(", "1", ",", "1", ")", "elif", "nyears", "<", "20", ":", "(", "min_spacing", ",", "maj_spacing", ")", "=", "(", "1", ",", "2", ")", "elif", "nyears", "<", "50", ":", "(", "min_spacing", ",", "maj_spacing", ")", "=", "(", "1", ",", "5", ")", "elif", "nyears", "<", "100", ":", "(", "min_spacing", ",", "maj_spacing", ")", "=", "(", "5", ",", "10", ")", "elif", "nyears", "<", "200", ":", "(", "min_spacing", ",", "maj_spacing", ")", "=", "(", "5", ",", "25", ")", "elif", "nyears", "<", "600", ":", "(", "min_spacing", ",", "maj_spacing", ")", "=", "(", "10", ",", "50", ")", "else", ":", "factor", "=", "nyears", "//", "1000", "+", "1", "(", "min_spacing", ",", "maj_spacing", ")", "=", "(", "factor", "*", "20", ",", "factor", "*", "100", ")", "return", "(", "min_spacing", ",", "maj_spacing", ")" ]
33
11.5
def get_resource_allocation(self): """Get the :py:class:`ResourceAllocation` element tance. Returns: ResourceAllocation: Resource allocation used to access information about the resource where this PE is running. .. versionadded:: 1.9 """ if hasattr(self, 'resourceAllocation'): return ResourceAllocation(self.rest_client.make_request(self.resourceAllocation), self.rest_client)
[ "def", "get_resource_allocation", "(", "self", ")", ":", "if", "hasattr", "(", "self", ",", "'resourceAllocation'", ")", ":", "return", "ResourceAllocation", "(", "self", ".", "rest_client", ".", "make_request", "(", "self", ".", "resourceAllocation", ")", ",", "self", ".", "rest_client", ")" ]
43.5
28.2
def startElement(self, name, attrs): """Callback run at the start of each XML element""" self._contextStack.append(self._context) self._contentList = [] if name in self._statusDict: self._itemTag, itemType = self._statusDict[name] self._progress.startItem(itemType) elif name == self._itemTag: self._error = False self._progress.newItem() try: if self._context == 'root': if name == 'xtvd': self._context = 'xtvd' self._startXTVDNode(name, attrs) elif self._context == 'xtvd': self._context = name elif self._context == 'stations': self._startStationsNode(name, attrs) elif self._context == 'lineups': self._startLineupsNode(name, attrs) elif self._context == 'schedules': self._startSchedulesNode(name, attrs) elif self._context == 'programs': self._startProgramsNode(name, attrs) elif self._context == 'productionCrew': self._startProductionCrewNode(name, attrs) elif self._context == 'genres': self._startGenresNode(name, attrs) except Exception, e: self._error = True self._progress.printMsg(str(e), error=True)
[ "def", "startElement", "(", "self", ",", "name", ",", "attrs", ")", ":", "self", ".", "_contextStack", ".", "append", "(", "self", ".", "_context", ")", "self", ".", "_contentList", "=", "[", "]", "if", "name", "in", "self", ".", "_statusDict", ":", "self", ".", "_itemTag", ",", "itemType", "=", "self", ".", "_statusDict", "[", "name", "]", "self", ".", "_progress", ".", "startItem", "(", "itemType", ")", "elif", "name", "==", "self", ".", "_itemTag", ":", "self", ".", "_error", "=", "False", "self", ".", "_progress", ".", "newItem", "(", ")", "try", ":", "if", "self", ".", "_context", "==", "'root'", ":", "if", "name", "==", "'xtvd'", ":", "self", ".", "_context", "=", "'xtvd'", "self", ".", "_startXTVDNode", "(", "name", ",", "attrs", ")", "elif", "self", ".", "_context", "==", "'xtvd'", ":", "self", ".", "_context", "=", "name", "elif", "self", ".", "_context", "==", "'stations'", ":", "self", ".", "_startStationsNode", "(", "name", ",", "attrs", ")", "elif", "self", ".", "_context", "==", "'lineups'", ":", "self", ".", "_startLineupsNode", "(", "name", ",", "attrs", ")", "elif", "self", ".", "_context", "==", "'schedules'", ":", "self", ".", "_startSchedulesNode", "(", "name", ",", "attrs", ")", "elif", "self", ".", "_context", "==", "'programs'", ":", "self", ".", "_startProgramsNode", "(", "name", ",", "attrs", ")", "elif", "self", ".", "_context", "==", "'productionCrew'", ":", "self", ".", "_startProductionCrewNode", "(", "name", ",", "attrs", ")", "elif", "self", ".", "_context", "==", "'genres'", ":", "self", ".", "_startGenresNode", "(", "name", ",", "attrs", ")", "except", "Exception", ",", "e", ":", "self", ".", "_error", "=", "True", "self", ".", "_progress", ".", "printMsg", "(", "str", "(", "e", ")", ",", "error", "=", "True", ")" ]
39.314286
11.171429
def send_email(name, ctx_dict, send_to=None, subject=u'Subject', **kwargs): """ Shortcut function for EmailFromTemplate class @return: None """ eft = EmailFromTemplate(name=name) eft.subject = subject eft.context = ctx_dict eft.get_object() eft.render_message() eft.send_email(send_to=send_to, **kwargs)
[ "def", "send_email", "(", "name", ",", "ctx_dict", ",", "send_to", "=", "None", ",", "subject", "=", "u'Subject'", ",", "*", "*", "kwargs", ")", ":", "eft", "=", "EmailFromTemplate", "(", "name", "=", "name", ")", "eft", ".", "subject", "=", "subject", "eft", ".", "context", "=", "ctx_dict", "eft", ".", "get_object", "(", ")", "eft", ".", "render_message", "(", ")", "eft", ".", "send_email", "(", "send_to", "=", "send_to", ",", "*", "*", "kwargs", ")" ]
25.615385
16.846154
def request_cached_property(func): """Make the given method a per-request cached property. This caches the value on the request context rather than on the object itself, preventing problems if the object gets reused across multiple requests. """ @property @functools.wraps(func) def wrapped(self): cached_value = context.get_for_view(self, func.__name__, UNDEFINED) if cached_value is not UNDEFINED: return cached_value value = func(self) context.set_for_view(self, func.__name__, value) return value return wrapped
[ "def", "request_cached_property", "(", "func", ")", ":", "@", "property", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapped", "(", "self", ")", ":", "cached_value", "=", "context", ".", "get_for_view", "(", "self", ",", "func", ".", "__name__", ",", "UNDEFINED", ")", "if", "cached_value", "is", "not", "UNDEFINED", ":", "return", "cached_value", "value", "=", "func", "(", "self", ")", "context", ".", "set_for_view", "(", "self", ",", "func", ".", "__name__", ",", "value", ")", "return", "value", "return", "wrapped" ]
29.4
21.8
def get_chunk(self): """Return complete chunks or None if EOF reached""" while not self._eof_reached: read = self.input_stream.read(self.chunk_size - len(self._partial_chunk)) if len(read) == 0: self._eof_reached = True self._partial_chunk += read if len(self._partial_chunk) == self.chunk_size or self._eof_reached: chunk = self._partial_chunk self._partial_chunk = "" return chunk
[ "def", "get_chunk", "(", "self", ")", ":", "while", "not", "self", ".", "_eof_reached", ":", "read", "=", "self", ".", "input_stream", ".", "read", "(", "self", ".", "chunk_size", "-", "len", "(", "self", ".", "_partial_chunk", ")", ")", "if", "len", "(", "read", ")", "==", "0", ":", "self", ".", "_eof_reached", "=", "True", "self", ".", "_partial_chunk", "+=", "read", "if", "len", "(", "self", ".", "_partial_chunk", ")", "==", "self", ".", "chunk_size", "or", "self", ".", "_eof_reached", ":", "chunk", "=", "self", ".", "_partial_chunk", "self", ".", "_partial_chunk", "=", "\"\"", "return", "chunk" ]
45.454545
12.272727
def build(cls, name_dict, use_printable=False): """ Creates a Name object from a dict of unicode string keys and values. The keys should be from NameType._map, or a dotted-integer OID unicode string. :param name_dict: A dict of name information, e.g. {"common_name": "Will Bond", "country_name": "US", "organization": "Codex Non Sufficit LC"} :param use_printable: A bool - if PrintableString should be used for encoding instead of UTF8String. This is for backwards compatibility with old software. :return: An x509.Name object """ rdns = [] if not use_printable: encoding_name = 'utf8_string' encoding_class = UTF8String else: encoding_name = 'printable_string' encoding_class = PrintableString # Sort the attributes according to NameType.preferred_order name_dict = OrderedDict( sorted( name_dict.items(), key=lambda item: NameType.preferred_ordinal(item[0]) ) ) for attribute_name, attribute_value in name_dict.items(): attribute_name = NameType.map(attribute_name) if attribute_name == 'email_address': value = EmailAddress(attribute_value) elif attribute_name == 'domain_component': value = DNSName(attribute_value) elif attribute_name in set(['dn_qualifier', 'country_name', 'serial_number']): value = DirectoryString( name='printable_string', value=PrintableString(attribute_value) ) else: value = DirectoryString( name=encoding_name, value=encoding_class(attribute_value) ) rdns.append(RelativeDistinguishedName([ NameTypeAndValue({ 'type': attribute_name, 'value': value }) ])) return cls(name='', value=RDNSequence(rdns))
[ "def", "build", "(", "cls", ",", "name_dict", ",", "use_printable", "=", "False", ")", ":", "rdns", "=", "[", "]", "if", "not", "use_printable", ":", "encoding_name", "=", "'utf8_string'", "encoding_class", "=", "UTF8String", "else", ":", "encoding_name", "=", "'printable_string'", "encoding_class", "=", "PrintableString", "# Sort the attributes according to NameType.preferred_order", "name_dict", "=", "OrderedDict", "(", "sorted", "(", "name_dict", ".", "items", "(", ")", ",", "key", "=", "lambda", "item", ":", "NameType", ".", "preferred_ordinal", "(", "item", "[", "0", "]", ")", ")", ")", "for", "attribute_name", ",", "attribute_value", "in", "name_dict", ".", "items", "(", ")", ":", "attribute_name", "=", "NameType", ".", "map", "(", "attribute_name", ")", "if", "attribute_name", "==", "'email_address'", ":", "value", "=", "EmailAddress", "(", "attribute_value", ")", "elif", "attribute_name", "==", "'domain_component'", ":", "value", "=", "DNSName", "(", "attribute_value", ")", "elif", "attribute_name", "in", "set", "(", "[", "'dn_qualifier'", ",", "'country_name'", ",", "'serial_number'", "]", ")", ":", "value", "=", "DirectoryString", "(", "name", "=", "'printable_string'", ",", "value", "=", "PrintableString", "(", "attribute_value", ")", ")", "else", ":", "value", "=", "DirectoryString", "(", "name", "=", "encoding_name", ",", "value", "=", "encoding_class", "(", "attribute_value", ")", ")", "rdns", ".", "append", "(", "RelativeDistinguishedName", "(", "[", "NameTypeAndValue", "(", "{", "'type'", ":", "attribute_name", ",", "'value'", ":", "value", "}", ")", "]", ")", ")", "return", "cls", "(", "name", "=", "''", ",", "value", "=", "RDNSequence", "(", "rdns", ")", ")" ]
35.677966
19.983051
def logToConsole(level=logging.INFO): """ Create a log handler that logs to the console. """ logger = logging.getLogger() logger.setLevel(level) formatter = logging.Formatter( '%(asctime)s %(name)s %(levelname)s %(message)s') handler = logging.StreamHandler() handler.setFormatter(formatter) logger.handlers = [ h for h in logger.handlers if type(h) is not logging.StreamHandler] logger.addHandler(handler)
[ "def", "logToConsole", "(", "level", "=", "logging", ".", "INFO", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", ")", "logger", ".", "setLevel", "(", "level", ")", "formatter", "=", "logging", ".", "Formatter", "(", "'%(asctime)s %(name)s %(levelname)s %(message)s'", ")", "handler", "=", "logging", ".", "StreamHandler", "(", ")", "handler", ".", "setFormatter", "(", "formatter", ")", "logger", ".", "handlers", "=", "[", "h", "for", "h", "in", "logger", ".", "handlers", "if", "type", "(", "h", ")", "is", "not", "logging", ".", "StreamHandler", "]", "logger", ".", "addHandler", "(", "handler", ")" ]
32.642857
7.642857
def from_tfs(klass, tfs_project, labor_hours=True): """ Creates CodeGovProject object from TFS/VSTS/AzureDevOps Instance """ project = klass() project_web_url = '' # -- REQUIRED FIELDS -- project['name'] = tfs_project.projectInfo.name if 'web' in tfs_project.projectInfo._links.additional_properties: if 'href' in tfs_project.projectInfo._links.additional_properties['web']: # URL Encodes spaces that are in the Project Name for the Project Web URL project_web_url = requote_uri(tfs_project.projectInfo._links.additional_properties['web']['href']) project['repositoryURL'] = project_web_url project['homepageURL'] = project_web_url project['description'] = tfs_project.projectInfo.description project['vcs'] = 'TFS/AzureDevOps' project['permissions']['license'] = None project['tags'] = [] if labor_hours: logger.debug('Sorry labor hour calculation not currently supported.') # project['laborHours'] = labor_hours_from_url(project['repositoryURL']) else: project['laborHours'] = 0 if tfs_project.projectCreateInfo.last_update_time < POLICY_START_DATE: project['permissions']['usageType'] = 'exemptByPolicyDate' else: project['permissions']['usageType'] = 'exemptByAgencyMission' project['permissions']['exemptionText'] = 'This source code resides on a private server and has not been properly evaluated for releaseability.' project['contact'] = { 'email': '', 'URL': project_web_url } project['date'] = { 'lastModified': tfs_project.projectLastUpdateInfo.last_update_time.date().isoformat(), 'created': tfs_project.projectCreateInfo.last_update_time.date().isoformat(), 'metadataLastUpdated': '' } _prune_dict_null_str(project) return project
[ "def", "from_tfs", "(", "klass", ",", "tfs_project", ",", "labor_hours", "=", "True", ")", ":", "project", "=", "klass", "(", ")", "project_web_url", "=", "''", "# -- REQUIRED FIELDS --", "project", "[", "'name'", "]", "=", "tfs_project", ".", "projectInfo", ".", "name", "if", "'web'", "in", "tfs_project", ".", "projectInfo", ".", "_links", ".", "additional_properties", ":", "if", "'href'", "in", "tfs_project", ".", "projectInfo", ".", "_links", ".", "additional_properties", "[", "'web'", "]", ":", "# URL Encodes spaces that are in the Project Name for the Project Web URL", "project_web_url", "=", "requote_uri", "(", "tfs_project", ".", "projectInfo", ".", "_links", ".", "additional_properties", "[", "'web'", "]", "[", "'href'", "]", ")", "project", "[", "'repositoryURL'", "]", "=", "project_web_url", "project", "[", "'homepageURL'", "]", "=", "project_web_url", "project", "[", "'description'", "]", "=", "tfs_project", ".", "projectInfo", ".", "description", "project", "[", "'vcs'", "]", "=", "'TFS/AzureDevOps'", "project", "[", "'permissions'", "]", "[", "'license'", "]", "=", "None", "project", "[", "'tags'", "]", "=", "[", "]", "if", "labor_hours", ":", "logger", ".", "debug", "(", "'Sorry labor hour calculation not currently supported.'", ")", "# project['laborHours'] = labor_hours_from_url(project['repositoryURL'])", "else", ":", "project", "[", "'laborHours'", "]", "=", "0", "if", "tfs_project", ".", "projectCreateInfo", ".", "last_update_time", "<", "POLICY_START_DATE", ":", "project", "[", "'permissions'", "]", "[", "'usageType'", "]", "=", "'exemptByPolicyDate'", "else", ":", "project", "[", "'permissions'", "]", "[", "'usageType'", "]", "=", "'exemptByAgencyMission'", "project", "[", "'permissions'", "]", "[", "'exemptionText'", "]", "=", "'This source code resides on a private server and has not been properly evaluated for releaseability.'", "project", "[", "'contact'", "]", "=", "{", "'email'", ":", "''", ",", "'URL'", ":", "project_web_url", "}", "project", "[", "'date'", "]", "=", "{", "'lastModified'", ":", "tfs_project", ".", "projectLastUpdateInfo", ".", "last_update_time", ".", "date", "(", ")", ".", "isoformat", "(", ")", ",", "'created'", ":", "tfs_project", ".", "projectCreateInfo", ".", "last_update_time", ".", "date", "(", ")", ".", "isoformat", "(", ")", ",", "'metadataLastUpdated'", ":", "''", "}", "_prune_dict_null_str", "(", "project", ")", "return", "project" ]
37.207547
28.981132
def _escape_argspec(obj, iterable, escape): """Helper for various string-wrapped functions.""" for key, value in iterable: if hasattr(value, '__html__') or isinstance(value, string_types): obj[key] = escape(value) return obj
[ "def", "_escape_argspec", "(", "obj", ",", "iterable", ",", "escape", ")", ":", "for", "key", ",", "value", "in", "iterable", ":", "if", "hasattr", "(", "value", ",", "'__html__'", ")", "or", "isinstance", "(", "value", ",", "string_types", ")", ":", "obj", "[", "key", "]", "=", "escape", "(", "value", ")", "return", "obj" ]
41.833333
12.5
def processEscalatedException(self, ex): """ Process an exception escalated from a Replica """ if isinstance(ex, SuspiciousNode): self.reportSuspiciousNodeEx(ex) else: raise RuntimeError("unhandled replica-escalated exception") from ex
[ "def", "processEscalatedException", "(", "self", ",", "ex", ")", ":", "if", "isinstance", "(", "ex", ",", "SuspiciousNode", ")", ":", "self", ".", "reportSuspiciousNodeEx", "(", "ex", ")", "else", ":", "raise", "RuntimeError", "(", "\"unhandled replica-escalated exception\"", ")", "from", "ex" ]
36.5
10.5
def _req(self, url, method='GET', **kw): '''Make request and convert JSON response to python objects''' send = requests.post if method == 'POST' else requests.get try: r = send( url, headers=self._token_header(), timeout=self.settings['timeout'], **kw) except requests.exceptions.Timeout: raise ApiError('Request timed out (%s seconds)' % self.settings['timeout']) try: json = r.json() except ValueError: raise ApiError('Received not JSON response from API') if json.get('status') != 'ok': raise ApiError('API error: received unexpected json from API: %s' % json) return json
[ "def", "_req", "(", "self", ",", "url", ",", "method", "=", "'GET'", ",", "*", "*", "kw", ")", ":", "send", "=", "requests", ".", "post", "if", "method", "==", "'POST'", "else", "requests", ".", "get", "try", ":", "r", "=", "send", "(", "url", ",", "headers", "=", "self", ".", "_token_header", "(", ")", ",", "timeout", "=", "self", ".", "settings", "[", "'timeout'", "]", ",", "*", "*", "kw", ")", "except", "requests", ".", "exceptions", ".", "Timeout", ":", "raise", "ApiError", "(", "'Request timed out (%s seconds)'", "%", "self", ".", "settings", "[", "'timeout'", "]", ")", "try", ":", "json", "=", "r", ".", "json", "(", ")", "except", "ValueError", ":", "raise", "ApiError", "(", "'Received not JSON response from API'", ")", "if", "json", ".", "get", "(", "'status'", ")", "!=", "'ok'", ":", "raise", "ApiError", "(", "'API error: received unexpected json from API: %s'", "%", "json", ")", "return", "json" ]
41.444444
19.666667
def add_directories(names): """Git/Mercurial/zip files omit directories, let's add them back.""" res = list(names) seen = set(names) for name in names: while True: name = os.path.dirname(name) if not name or name in seen: break res.append(name) seen.add(name) return sorted(res)
[ "def", "add_directories", "(", "names", ")", ":", "res", "=", "list", "(", "names", ")", "seen", "=", "set", "(", "names", ")", "for", "name", "in", "names", ":", "while", "True", ":", "name", "=", "os", ".", "path", ".", "dirname", "(", "name", ")", "if", "not", "name", "or", "name", "in", "seen", ":", "break", "res", ".", "append", "(", "name", ")", "seen", ".", "add", "(", "name", ")", "return", "sorted", "(", "res", ")" ]
29.916667
12.75
def isxmap(xmethod, opt): """Return ``isxmap`` argument for ``.IterStatsConfig`` initialiser. """ if xmethod == 'admm': isx = {'XPrRsdl': 'PrimalRsdl', 'XDlRsdl': 'DualRsdl', 'XRho': 'Rho'} else: isx = {'X_F_Btrack': 'F_Btrack', 'X_Q_Btrack': 'Q_Btrack', 'X_ItBt': 'IterBTrack', 'X_L': 'L', 'X_Rsdl': 'Rsdl'} if not opt['AccurateDFid']: isx.update(evlmap(True)) return isx
[ "def", "isxmap", "(", "xmethod", ",", "opt", ")", ":", "if", "xmethod", "==", "'admm'", ":", "isx", "=", "{", "'XPrRsdl'", ":", "'PrimalRsdl'", ",", "'XDlRsdl'", ":", "'DualRsdl'", ",", "'XRho'", ":", "'Rho'", "}", "else", ":", "isx", "=", "{", "'X_F_Btrack'", ":", "'F_Btrack'", ",", "'X_Q_Btrack'", ":", "'Q_Btrack'", ",", "'X_ItBt'", ":", "'IterBTrack'", ",", "'X_L'", ":", "'L'", ",", "'X_Rsdl'", ":", "'Rsdl'", "}", "if", "not", "opt", "[", "'AccurateDFid'", "]", ":", "isx", ".", "update", "(", "evlmap", "(", "True", ")", ")", "return", "isx" ]
33.769231
17.769231
def generate_apiary_doc(task_router): """Generate apiary documentation. Create a Apiary generator and add application packages to it. :param task_router: task router, injected :type task_router: TaskRouter :return: apiary generator :rtype: ApiaryDoc """ generator = ApiaryDoc() for m in task_router.get_task_packages() + get_method_packages(): m = importlib.import_module(m) generator.docmodule(m) return generator
[ "def", "generate_apiary_doc", "(", "task_router", ")", ":", "generator", "=", "ApiaryDoc", "(", ")", "for", "m", "in", "task_router", ".", "get_task_packages", "(", ")", "+", "get_method_packages", "(", ")", ":", "m", "=", "importlib", ".", "import_module", "(", "m", ")", "generator", ".", "docmodule", "(", "m", ")", "return", "generator" ]
26.941176
17.882353
def _mmult(self, a, b): """ Returns the 3x3 matrix multiplication of A and B. Note that scale(), translate(), rotate() work with premultiplication, e.g. the matrix A followed by B = BA and not AB. """ # No need to optimize (C version is just as fast). return [ a[0] * b[0] + a[1] * b[3], a[0] * b[1] + a[1] * b[4], 0, a[3] * b[0] + a[4] * b[3], a[3] * b[1] + a[4] * b[4], 0, a[6] * b[0] + a[7] * b[3] + b[6], a[6] * b[1] + a[7] * b[4] + b[7], 1 ]
[ "def", "_mmult", "(", "self", ",", "a", ",", "b", ")", ":", "# No need to optimize (C version is just as fast).\r", "return", "[", "a", "[", "0", "]", "*", "b", "[", "0", "]", "+", "a", "[", "1", "]", "*", "b", "[", "3", "]", ",", "a", "[", "0", "]", "*", "b", "[", "1", "]", "+", "a", "[", "1", "]", "*", "b", "[", "4", "]", ",", "0", ",", "a", "[", "3", "]", "*", "b", "[", "0", "]", "+", "a", "[", "4", "]", "*", "b", "[", "3", "]", ",", "a", "[", "3", "]", "*", "b", "[", "1", "]", "+", "a", "[", "4", "]", "*", "b", "[", "4", "]", ",", "0", ",", "a", "[", "6", "]", "*", "b", "[", "0", "]", "+", "a", "[", "7", "]", "*", "b", "[", "3", "]", "+", "b", "[", "6", "]", ",", "a", "[", "6", "]", "*", "b", "[", "1", "]", "+", "a", "[", "7", "]", "*", "b", "[", "4", "]", "+", "b", "[", "7", "]", ",", "1", "]" ]
36.352941
14.352941
def _init_actions(self, create_standard_actions): """ Init context menu action """ menu_advanced = QtWidgets.QMenu(_('Advanced')) self.add_menu(menu_advanced) self._sub_menus = { 'Advanced': menu_advanced } if create_standard_actions: # Undo action = QtWidgets.QAction(_('Undo'), self) action.setShortcut('Ctrl+Z') action.setIcon(icons.icon( 'edit-undo', ':/pyqode-icons/rc/edit-undo.png', 'fa.undo')) action.triggered.connect(self.undo) self.undoAvailable.connect(action.setVisible) action.setVisible(False) self.add_action(action, sub_menu=None) self.action_undo = action # Redo action = QtWidgets.QAction(_('Redo'), self) action.setShortcut('Ctrl+Y') action.setIcon(icons.icon( 'edit-redo', ':/pyqode-icons/rc/edit-redo.png', 'fa.repeat')) action.triggered.connect(self.redo) self.redoAvailable.connect(action.setVisible) action.setVisible(False) self.add_action(action, sub_menu=None) self.action_redo = action # Copy action = QtWidgets.QAction(_('Copy'), self) action.setShortcut(QtGui.QKeySequence.Copy) action.setIcon(icons.icon( 'edit-copy', ':/pyqode-icons/rc/edit-copy.png', 'fa.copy')) action.triggered.connect(self.copy) self.add_action(action, sub_menu=None) self.action_copy = action # cut action = QtWidgets.QAction(_('Cut'), self) action.setShortcut(QtGui.QKeySequence.Cut) action.setIcon(icons.icon( 'edit-cut', ':/pyqode-icons/rc/edit-cut.png', 'fa.cut')) action.triggered.connect(self.cut) self.add_action(action, sub_menu=None) self.action_cut = action # paste action = QtWidgets.QAction(_('Paste'), self) action.setShortcut(QtGui.QKeySequence.Paste) action.setIcon(icons.icon( 'edit-paste', ':/pyqode-icons/rc/edit-paste.png', 'fa.paste')) action.triggered.connect(self.paste) self.add_action(action, sub_menu=None) self.action_paste = action # duplicate line action = QtWidgets.QAction(_('Duplicate line'), self) action.setShortcut('Ctrl+D') action.triggered.connect(self.duplicate_line) self.add_action(action, sub_menu=None) self.action_duplicate_line = action # swap line up action = QtWidgets.QAction(_('Swap line up'), self) action.setShortcut("Alt++") action.triggered.connect(self.swapLineUp) self.add_action(action, sub_menu=None) self.action_swap_line_up = action # swap line down action = QtWidgets.QAction(_('Swap line down'), self) action.setShortcut("Alt+-") action.triggered.connect(self.swapLineDown) self.add_action(action, sub_menu=None) self.action_swap_line_down = action # select all action = QtWidgets.QAction(_('Select all'), self) action.setShortcut(QtGui.QKeySequence.SelectAll) action.triggered.connect(self.selectAll) self.action_select_all = action self.add_action(self.action_select_all, sub_menu=None) self.add_separator(sub_menu=None) if create_standard_actions: # indent action = QtWidgets.QAction(_('Indent'), self) action.setShortcut('Tab') action.setIcon(icons.icon( 'format-indent-more', ':/pyqode-icons/rc/format-indent-more.png', 'fa.indent')) action.triggered.connect(self.indent) self.add_action(action) self.action_indent = action # unindent action = QtWidgets.QAction(_('Un-indent'), self) action.setShortcut('Shift+Tab') action.setIcon(icons.icon( 'format-indent-less', ':/pyqode-icons/rc/format-indent-less.png', 'fa.dedent')) action.triggered.connect(self.un_indent) self.add_action(action) self.action_un_indent = action self.add_separator() # goto action = QtWidgets.QAction(_('Go to line'), self) action.setShortcut('Ctrl+G') action.setIcon(icons.icon( 'go-jump', ':/pyqode-icons/rc/goto-line.png', 'fa.share')) action.triggered.connect(self.goto_line) self.add_action(action) self.action_goto_line = action
[ "def", "_init_actions", "(", "self", ",", "create_standard_actions", ")", ":", "menu_advanced", "=", "QtWidgets", ".", "QMenu", "(", "_", "(", "'Advanced'", ")", ")", "self", ".", "add_menu", "(", "menu_advanced", ")", "self", ".", "_sub_menus", "=", "{", "'Advanced'", ":", "menu_advanced", "}", "if", "create_standard_actions", ":", "# Undo", "action", "=", "QtWidgets", ".", "QAction", "(", "_", "(", "'Undo'", ")", ",", "self", ")", "action", ".", "setShortcut", "(", "'Ctrl+Z'", ")", "action", ".", "setIcon", "(", "icons", ".", "icon", "(", "'edit-undo'", ",", "':/pyqode-icons/rc/edit-undo.png'", ",", "'fa.undo'", ")", ")", "action", ".", "triggered", ".", "connect", "(", "self", ".", "undo", ")", "self", ".", "undoAvailable", ".", "connect", "(", "action", ".", "setVisible", ")", "action", ".", "setVisible", "(", "False", ")", "self", ".", "add_action", "(", "action", ",", "sub_menu", "=", "None", ")", "self", ".", "action_undo", "=", "action", "# Redo", "action", "=", "QtWidgets", ".", "QAction", "(", "_", "(", "'Redo'", ")", ",", "self", ")", "action", ".", "setShortcut", "(", "'Ctrl+Y'", ")", "action", ".", "setIcon", "(", "icons", ".", "icon", "(", "'edit-redo'", ",", "':/pyqode-icons/rc/edit-redo.png'", ",", "'fa.repeat'", ")", ")", "action", ".", "triggered", ".", "connect", "(", "self", ".", "redo", ")", "self", ".", "redoAvailable", ".", "connect", "(", "action", ".", "setVisible", ")", "action", ".", "setVisible", "(", "False", ")", "self", ".", "add_action", "(", "action", ",", "sub_menu", "=", "None", ")", "self", ".", "action_redo", "=", "action", "# Copy", "action", "=", "QtWidgets", ".", "QAction", "(", "_", "(", "'Copy'", ")", ",", "self", ")", "action", ".", "setShortcut", "(", "QtGui", ".", "QKeySequence", ".", "Copy", ")", "action", ".", "setIcon", "(", "icons", ".", "icon", "(", "'edit-copy'", ",", "':/pyqode-icons/rc/edit-copy.png'", ",", "'fa.copy'", ")", ")", "action", ".", "triggered", ".", "connect", "(", "self", ".", "copy", ")", "self", ".", "add_action", "(", "action", ",", "sub_menu", "=", "None", ")", "self", ".", "action_copy", "=", "action", "# cut", "action", "=", "QtWidgets", ".", "QAction", "(", "_", "(", "'Cut'", ")", ",", "self", ")", "action", ".", "setShortcut", "(", "QtGui", ".", "QKeySequence", ".", "Cut", ")", "action", ".", "setIcon", "(", "icons", ".", "icon", "(", "'edit-cut'", ",", "':/pyqode-icons/rc/edit-cut.png'", ",", "'fa.cut'", ")", ")", "action", ".", "triggered", ".", "connect", "(", "self", ".", "cut", ")", "self", ".", "add_action", "(", "action", ",", "sub_menu", "=", "None", ")", "self", ".", "action_cut", "=", "action", "# paste", "action", "=", "QtWidgets", ".", "QAction", "(", "_", "(", "'Paste'", ")", ",", "self", ")", "action", ".", "setShortcut", "(", "QtGui", ".", "QKeySequence", ".", "Paste", ")", "action", ".", "setIcon", "(", "icons", ".", "icon", "(", "'edit-paste'", ",", "':/pyqode-icons/rc/edit-paste.png'", ",", "'fa.paste'", ")", ")", "action", ".", "triggered", ".", "connect", "(", "self", ".", "paste", ")", "self", ".", "add_action", "(", "action", ",", "sub_menu", "=", "None", ")", "self", ".", "action_paste", "=", "action", "# duplicate line", "action", "=", "QtWidgets", ".", "QAction", "(", "_", "(", "'Duplicate line'", ")", ",", "self", ")", "action", ".", "setShortcut", "(", "'Ctrl+D'", ")", "action", ".", "triggered", ".", "connect", "(", "self", ".", "duplicate_line", ")", "self", ".", "add_action", "(", "action", ",", "sub_menu", "=", "None", ")", "self", ".", "action_duplicate_line", "=", "action", "# swap line up", "action", "=", "QtWidgets", ".", "QAction", "(", "_", "(", "'Swap line up'", ")", ",", "self", ")", "action", ".", "setShortcut", "(", "\"Alt++\"", ")", "action", ".", "triggered", ".", "connect", "(", "self", ".", "swapLineUp", ")", "self", ".", "add_action", "(", "action", ",", "sub_menu", "=", "None", ")", "self", ".", "action_swap_line_up", "=", "action", "# swap line down", "action", "=", "QtWidgets", ".", "QAction", "(", "_", "(", "'Swap line down'", ")", ",", "self", ")", "action", ".", "setShortcut", "(", "\"Alt+-\"", ")", "action", ".", "triggered", ".", "connect", "(", "self", ".", "swapLineDown", ")", "self", ".", "add_action", "(", "action", ",", "sub_menu", "=", "None", ")", "self", ".", "action_swap_line_down", "=", "action", "# select all", "action", "=", "QtWidgets", ".", "QAction", "(", "_", "(", "'Select all'", ")", ",", "self", ")", "action", ".", "setShortcut", "(", "QtGui", ".", "QKeySequence", ".", "SelectAll", ")", "action", ".", "triggered", ".", "connect", "(", "self", ".", "selectAll", ")", "self", ".", "action_select_all", "=", "action", "self", ".", "add_action", "(", "self", ".", "action_select_all", ",", "sub_menu", "=", "None", ")", "self", ".", "add_separator", "(", "sub_menu", "=", "None", ")", "if", "create_standard_actions", ":", "# indent", "action", "=", "QtWidgets", ".", "QAction", "(", "_", "(", "'Indent'", ")", ",", "self", ")", "action", ".", "setShortcut", "(", "'Tab'", ")", "action", ".", "setIcon", "(", "icons", ".", "icon", "(", "'format-indent-more'", ",", "':/pyqode-icons/rc/format-indent-more.png'", ",", "'fa.indent'", ")", ")", "action", ".", "triggered", ".", "connect", "(", "self", ".", "indent", ")", "self", ".", "add_action", "(", "action", ")", "self", ".", "action_indent", "=", "action", "# unindent", "action", "=", "QtWidgets", ".", "QAction", "(", "_", "(", "'Un-indent'", ")", ",", "self", ")", "action", ".", "setShortcut", "(", "'Shift+Tab'", ")", "action", ".", "setIcon", "(", "icons", ".", "icon", "(", "'format-indent-less'", ",", "':/pyqode-icons/rc/format-indent-less.png'", ",", "'fa.dedent'", ")", ")", "action", ".", "triggered", ".", "connect", "(", "self", ".", "un_indent", ")", "self", ".", "add_action", "(", "action", ")", "self", ".", "action_un_indent", "=", "action", "self", ".", "add_separator", "(", ")", "# goto", "action", "=", "QtWidgets", ".", "QAction", "(", "_", "(", "'Go to line'", ")", ",", "self", ")", "action", ".", "setShortcut", "(", "'Ctrl+G'", ")", "action", ".", "setIcon", "(", "icons", ".", "icon", "(", "'go-jump'", ",", "':/pyqode-icons/rc/goto-line.png'", ",", "'fa.share'", ")", ")", "action", ".", "triggered", ".", "connect", "(", "self", ".", "goto_line", ")", "self", ".", "add_action", "(", "action", ")", "self", ".", "action_goto_line", "=", "action" ]
43.537736
11.424528
def sha_hash(self) -> str: """ Return uppercase hex sha256 hash from signed raw document :return: """ return hashlib.sha256(self.signed_raw().encode("ascii")).hexdigest().upper()
[ "def", "sha_hash", "(", "self", ")", "->", "str", ":", "return", "hashlib", ".", "sha256", "(", "self", ".", "signed_raw", "(", ")", ".", "encode", "(", "\"ascii\"", ")", ")", ".", "hexdigest", "(", ")", ".", "upper", "(", ")" ]
30.428571
21
def create_output_directories(self): """Create output directories for thumbnails and original images.""" check_or_create_dir(self.dst_path) if self.medias: check_or_create_dir(join(self.dst_path, self.settings['thumb_dir'])) if self.medias and self.settings['keep_orig']: self.orig_path = join(self.dst_path, self.settings['orig_dir']) check_or_create_dir(self.orig_path)
[ "def", "create_output_directories", "(", "self", ")", ":", "check_or_create_dir", "(", "self", ".", "dst_path", ")", "if", "self", ".", "medias", ":", "check_or_create_dir", "(", "join", "(", "self", ".", "dst_path", ",", "self", ".", "settings", "[", "'thumb_dir'", "]", ")", ")", "if", "self", ".", "medias", "and", "self", ".", "settings", "[", "'keep_orig'", "]", ":", "self", ".", "orig_path", "=", "join", "(", "self", ".", "dst_path", ",", "self", ".", "settings", "[", "'orig_dir'", "]", ")", "check_or_create_dir", "(", "self", ".", "orig_path", ")" ]
42.545455
17.727273
def _get_pull_requests(self): """ Gets all pull requests from the repo since we can't do a filtered date merged search """ for pull in self.repo.pull_requests( state="closed", base=self.github_info["master_branch"], direction="asc" ): if self._include_pull_request(pull): yield pull
[ "def", "_get_pull_requests", "(", "self", ")", ":", "for", "pull", "in", "self", ".", "repo", ".", "pull_requests", "(", "state", "=", "\"closed\"", ",", "base", "=", "self", ".", "github_info", "[", "\"master_branch\"", "]", ",", "direction", "=", "\"asc\"", ")", ":", "if", "self", ".", "_include_pull_request", "(", "pull", ")", ":", "yield", "pull" ]
43.375
13.75
def deserialize(stream_or_string, **options): ''' Deserialize any string or stream like object into a Python data structure. :param stream_or_string: stream or string to deserialize. :param options: options given to lower configparser module. ''' if six.PY3: cp = configparser.ConfigParser(**options) else: cp = configparser.SafeConfigParser(**options) try: if not isinstance(stream_or_string, (bytes, six.string_types)): if six.PY3: cp.read_file(stream_or_string) else: cp.readfp(stream_or_string) else: if six.PY3: cp.read_file(six.moves.StringIO(stream_or_string)) else: # python2's ConfigParser cannot parse a config from a string cp.readfp(six.moves.StringIO(stream_or_string)) data = {} for section_name in cp.sections(): section = {} for k, v in cp.items(section_name): section[k] = v data[section_name] = section return data except Exception as error: raise DeserializationError(error)
[ "def", "deserialize", "(", "stream_or_string", ",", "*", "*", "options", ")", ":", "if", "six", ".", "PY3", ":", "cp", "=", "configparser", ".", "ConfigParser", "(", "*", "*", "options", ")", "else", ":", "cp", "=", "configparser", ".", "SafeConfigParser", "(", "*", "*", "options", ")", "try", ":", "if", "not", "isinstance", "(", "stream_or_string", ",", "(", "bytes", ",", "six", ".", "string_types", ")", ")", ":", "if", "six", ".", "PY3", ":", "cp", ".", "read_file", "(", "stream_or_string", ")", "else", ":", "cp", ".", "readfp", "(", "stream_or_string", ")", "else", ":", "if", "six", ".", "PY3", ":", "cp", ".", "read_file", "(", "six", ".", "moves", ".", "StringIO", "(", "stream_or_string", ")", ")", "else", ":", "# python2's ConfigParser cannot parse a config from a string", "cp", ".", "readfp", "(", "six", ".", "moves", ".", "StringIO", "(", "stream_or_string", ")", ")", "data", "=", "{", "}", "for", "section_name", "in", "cp", ".", "sections", "(", ")", ":", "section", "=", "{", "}", "for", "k", ",", "v", "in", "cp", ".", "items", "(", "section_name", ")", ":", "section", "[", "k", "]", "=", "v", "data", "[", "section_name", "]", "=", "section", "return", "data", "except", "Exception", "as", "error", ":", "raise", "DeserializationError", "(", "error", ")" ]
33.617647
20.735294
def config(self): """Implements Munin Plugin Graph Configuration. Prints out configuration for graphs. Use as is. Not required to be overwritten in child classes. The plugin will work correctly as long as the Munin Graph objects have been populated. """ for parent_name in self._graphNames: graph = self._graphDict[parent_name] if self.isMultigraph: print "multigraph %s" % self._getMultigraphID(parent_name) print self._formatConfig(graph.getConfig()) print if (self.isMultigraph and self._nestedGraphs and self._subgraphDict and self._subgraphNames): for (parent_name, subgraph_names) in self._subgraphNames.iteritems(): for graph_name in subgraph_names: graph = self._subgraphDict[parent_name][graph_name] print "multigraph %s" % self.getMultigraphID(parent_name, graph_name) print self._formatConfig(graph.getConfig()) print return True
[ "def", "config", "(", "self", ")", ":", "for", "parent_name", "in", "self", ".", "_graphNames", ":", "graph", "=", "self", ".", "_graphDict", "[", "parent_name", "]", "if", "self", ".", "isMultigraph", ":", "print", "\"multigraph %s\"", "%", "self", ".", "_getMultigraphID", "(", "parent_name", ")", "print", "self", ".", "_formatConfig", "(", "graph", ".", "getConfig", "(", ")", ")", "print", "if", "(", "self", ".", "isMultigraph", "and", "self", ".", "_nestedGraphs", "and", "self", ".", "_subgraphDict", "and", "self", ".", "_subgraphNames", ")", ":", "for", "(", "parent_name", ",", "subgraph_names", ")", "in", "self", ".", "_subgraphNames", ".", "iteritems", "(", ")", ":", "for", "graph_name", "in", "subgraph_names", ":", "graph", "=", "self", ".", "_subgraphDict", "[", "parent_name", "]", "[", "graph_name", "]", "print", "\"multigraph %s\"", "%", "self", ".", "getMultigraphID", "(", "parent_name", ",", "graph_name", ")", "print", "self", ".", "_formatConfig", "(", "graph", ".", "getConfig", "(", ")", ")", "print", "return", "True" ]
44.230769
21.923077
def set_stdev(self, col, row, stdev): """ Sets the standard deviation at this location (if valid location). :param col: the 0-based column index :type col: int :param row: the 0-based row index :type row: int :param stdev: the standard deviation to set :type stdev: float """ javabridge.call(self.jobject, "setStdDev", "(IID)V", col, row, stdev)
[ "def", "set_stdev", "(", "self", ",", "col", ",", "row", ",", "stdev", ")", ":", "javabridge", ".", "call", "(", "self", ".", "jobject", ",", "\"setStdDev\"", ",", "\"(IID)V\"", ",", "col", ",", "row", ",", "stdev", ")" ]
34.583333
14.916667
def create_for_block( cls, i=None, name=None, cname=None, version=None, **kwargs): """ return a new datacol with the block i """ if cname is None: cname = name or 'values_block_{idx}'.format(idx=i) if name is None: name = cname # prior to 0.10.1, we named values blocks like: values_block_0 an the # name values_0 try: if version[0] == 0 and version[1] <= 10 and version[2] == 0: m = re.search(r"values_block_(\d+)", name) if m: name = "values_{group}".format(group=m.groups()[0]) except IndexError: pass return cls(name=name, cname=cname, **kwargs)
[ "def", "create_for_block", "(", "cls", ",", "i", "=", "None", ",", "name", "=", "None", ",", "cname", "=", "None", ",", "version", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "cname", "is", "None", ":", "cname", "=", "name", "or", "'values_block_{idx}'", ".", "format", "(", "idx", "=", "i", ")", "if", "name", "is", "None", ":", "name", "=", "cname", "# prior to 0.10.1, we named values blocks like: values_block_0 an the", "# name values_0", "try", ":", "if", "version", "[", "0", "]", "==", "0", "and", "version", "[", "1", "]", "<=", "10", "and", "version", "[", "2", "]", "==", "0", ":", "m", "=", "re", ".", "search", "(", "r\"values_block_(\\d+)\"", ",", "name", ")", "if", "m", ":", "name", "=", "\"values_{group}\"", ".", "format", "(", "group", "=", "m", ".", "groups", "(", ")", "[", "0", "]", ")", "except", "IndexError", ":", "pass", "return", "cls", "(", "name", "=", "name", ",", "cname", "=", "cname", ",", "*", "*", "kwargs", ")" ]
35.45
23.6
def vmomentsurfacemass(self,*args,**kwargs): """ NAME: vmomentsurfacemass PURPOSE: calculate the an arbitrary moment of the velocity distribution at R times the surfacmass INPUT: R - radius at which to calculate the moment (in natural units) n - vR^n m - vT^m OPTIONAL INPUT: nsigma - number of sigma to integrate the velocities over KEYWORDS: romberg - if True, use a romberg integrator (default: False) deriv= None, 'R', or 'phi': calculates derivative of the moment wrt R or phi OUTPUT: <vR^n vT^m x surface-mass> at R (no support for units) HISTORY: 2011-03-30 - Written - Bovy (NYU) """ use_physical= kwargs.pop('use_physical',True) ro= kwargs.pop('ro',None) if ro is None and hasattr(self,'_roSet') and self._roSet: ro= self._ro if _APY_LOADED and isinstance(ro,units.Quantity): ro= ro.to(units.kpc).value vo= kwargs.pop('vo',None) if vo is None and hasattr(self,'_voSet') and self._voSet: vo= self._vo if _APY_LOADED and isinstance(vo,units.Quantity): vo= vo.to(units.km/units.s).value if use_physical and not vo is None and not ro is None: fac= surfdens_in_msolpc2(vo,ro)*vo**(args[1]+args[2]) if _APY_UNITS: u= units.Msun/units.pc**2*(units.km/units.s)**(args[1]+args[2]) out= self._vmomentsurfacemass(*args,**kwargs) if _APY_UNITS: return units.Quantity(out*fac,unit=u) else: return out*fac else: return self._vmomentsurfacemass(*args,**kwargs)
[ "def", "vmomentsurfacemass", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "use_physical", "=", "kwargs", ".", "pop", "(", "'use_physical'", ",", "True", ")", "ro", "=", "kwargs", ".", "pop", "(", "'ro'", ",", "None", ")", "if", "ro", "is", "None", "and", "hasattr", "(", "self", ",", "'_roSet'", ")", "and", "self", ".", "_roSet", ":", "ro", "=", "self", ".", "_ro", "if", "_APY_LOADED", "and", "isinstance", "(", "ro", ",", "units", ".", "Quantity", ")", ":", "ro", "=", "ro", ".", "to", "(", "units", ".", "kpc", ")", ".", "value", "vo", "=", "kwargs", ".", "pop", "(", "'vo'", ",", "None", ")", "if", "vo", "is", "None", "and", "hasattr", "(", "self", ",", "'_voSet'", ")", "and", "self", ".", "_voSet", ":", "vo", "=", "self", ".", "_vo", "if", "_APY_LOADED", "and", "isinstance", "(", "vo", ",", "units", ".", "Quantity", ")", ":", "vo", "=", "vo", ".", "to", "(", "units", ".", "km", "/", "units", ".", "s", ")", ".", "value", "if", "use_physical", "and", "not", "vo", "is", "None", "and", "not", "ro", "is", "None", ":", "fac", "=", "surfdens_in_msolpc2", "(", "vo", ",", "ro", ")", "*", "vo", "**", "(", "args", "[", "1", "]", "+", "args", "[", "2", "]", ")", "if", "_APY_UNITS", ":", "u", "=", "units", ".", "Msun", "/", "units", ".", "pc", "**", "2", "*", "(", "units", ".", "km", "/", "units", ".", "s", ")", "**", "(", "args", "[", "1", "]", "+", "args", "[", "2", "]", ")", "out", "=", "self", ".", "_vmomentsurfacemass", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "_APY_UNITS", ":", "return", "units", ".", "Quantity", "(", "out", "*", "fac", ",", "unit", "=", "u", ")", "else", ":", "return", "out", "*", "fac", "else", ":", "return", "self", ".", "_vmomentsurfacemass", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
29.3
24.533333
def validate_card_issue_modes(issue_mode: int, cards: list) -> list: """validate cards against deck_issue modes""" supported_mask = 63 # sum of all issue_mode values if not bool(issue_mode & supported_mask): return [] # return empty list for i in [1 << x for x in range(len(IssueMode))]: if bool(i & issue_mode): try: parser_fn = cast( Callable[[list], Optional[list]], parsers[IssueMode(i).name] ) except ValueError: continue parsed_cards = parser_fn(cards) if not parsed_cards: return [] cards = parsed_cards return cards
[ "def", "validate_card_issue_modes", "(", "issue_mode", ":", "int", ",", "cards", ":", "list", ")", "->", "list", ":", "supported_mask", "=", "63", "# sum of all issue_mode values", "if", "not", "bool", "(", "issue_mode", "&", "supported_mask", ")", ":", "return", "[", "]", "# return empty list", "for", "i", "in", "[", "1", "<<", "x", "for", "x", "in", "range", "(", "len", "(", "IssueMode", ")", ")", "]", ":", "if", "bool", "(", "i", "&", "issue_mode", ")", ":", "try", ":", "parser_fn", "=", "cast", "(", "Callable", "[", "[", "list", "]", ",", "Optional", "[", "list", "]", "]", ",", "parsers", "[", "IssueMode", "(", "i", ")", ".", "name", "]", ")", "except", "ValueError", ":", "continue", "parsed_cards", "=", "parser_fn", "(", "cards", ")", "if", "not", "parsed_cards", ":", "return", "[", "]", "cards", "=", "parsed_cards", "return", "cards" ]
28.28
18.72
def compare_table_cols(a, b): """ Return False if the two tables a and b have the same columns (ignoring order) according to LIGO LW name conventions, return True otherwise. """ return cmp(sorted((col.Name, col.Type) for col in a.getElementsByTagName(ligolw.Column.tagName)), sorted((col.Name, col.Type) for col in b.getElementsByTagName(ligolw.Column.tagName)))
[ "def", "compare_table_cols", "(", "a", ",", "b", ")", ":", "return", "cmp", "(", "sorted", "(", "(", "col", ".", "Name", ",", "col", ".", "Type", ")", "for", "col", "in", "a", ".", "getElementsByTagName", "(", "ligolw", ".", "Column", ".", "tagName", ")", ")", ",", "sorted", "(", "(", "col", ".", "Name", ",", "col", ".", "Type", ")", "for", "col", "in", "b", ".", "getElementsByTagName", "(", "ligolw", ".", "Column", ".", "tagName", ")", ")", ")" ]
51.714286
33.428571
def str_transmission_rate(self): """Returns a tuple of human readable transmission rates in bytes.""" upstream, downstream = self.transmission_rate return ( fritztools.format_num(upstream), fritztools.format_num(downstream) )
[ "def", "str_transmission_rate", "(", "self", ")", ":", "upstream", ",", "downstream", "=", "self", ".", "transmission_rate", "return", "(", "fritztools", ".", "format_num", "(", "upstream", ")", ",", "fritztools", ".", "format_num", "(", "downstream", ")", ")" ]
39.285714
12.142857
def mark(self, lineno, count=1): """Mark a given source line as executed count times. Multiple calls to mark for the same lineno add up. """ self.sourcelines[lineno] = self.sourcelines.get(lineno, 0) + count
[ "def", "mark", "(", "self", ",", "lineno", ",", "count", "=", "1", ")", ":", "self", ".", "sourcelines", "[", "lineno", "]", "=", "self", ".", "sourcelines", ".", "get", "(", "lineno", ",", "0", ")", "+", "count" ]
39.166667
16.666667
def withdraw(self, **params): """Submit a withdraw request. https://www.binance.com/restapipub.html Assumptions: - You must have Withdraw permissions enabled on your API key - You must have withdrawn to the address specified through the website and approved the transaction via email :param asset: required :type asset: str :type address: required :type address: str :type addressTag: optional - Secondary address identifier for coins like XRP,XMR etc. :type address: str :param amount: required :type amount: decimal :param name: optional - Description of the address, default asset value passed will be used :type name: str :param recvWindow: the number of milliseconds the request is valid for :type recvWindow: int :returns: API response .. code-block:: python { "msg": "success", "success": true, "id":"7213fea8e94b4a5593d507237e5a555b" } :raises: BinanceRequestException, BinanceAPIException, BinanceWithdrawException """ # force a name for the withdrawal if one not set if 'asset' in params and 'name' not in params: params['name'] = params['asset'] res = self._request_withdraw_api('post', 'withdraw.html', True, data=params) if not res['success']: raise BinanceWithdrawException(res['msg']) return res
[ "def", "withdraw", "(", "self", ",", "*", "*", "params", ")", ":", "# force a name for the withdrawal if one not set", "if", "'asset'", "in", "params", "and", "'name'", "not", "in", "params", ":", "params", "[", "'name'", "]", "=", "params", "[", "'asset'", "]", "res", "=", "self", ".", "_request_withdraw_api", "(", "'post'", ",", "'withdraw.html'", ",", "True", ",", "data", "=", "params", ")", "if", "not", "res", "[", "'success'", "]", ":", "raise", "BinanceWithdrawException", "(", "res", "[", "'msg'", "]", ")", "return", "res" ]
34.44186
24.162791
def add_archive(self, src_file, remove_final=False): """ Adds the contents of another tarfile to the build. It will be repackaged during context generation, and added to the root level of the file system. Therefore, it is not required that tar (or compression utilities) is present in the base image. :param src_file: Tar archive to add. :type src_file: unicode | str :param remove_final: Remove the contents after the build operation has completed. Note that this will remove all top-level components of the tar archive recursively. Therefore, you should not use this on standard unix folders. This will also not reduce the size of the resulting image (actually may increase instead) unless the image is squashed. :type remove_final: bool :return: Name of the root files / directories added to the Dockerfile. :rtype: list[unicode | str] """ with tarfile.open(src_file, 'r') as tf: member_names = [member.name for member in tf.getmembers() if posixpath.sep not in member.name] self.prefix_all('ADD', *zip(member_names, member_names)) if remove_final: self._remove_files.update(member_names) self._archives.append(src_file) return member_names
[ "def", "add_archive", "(", "self", ",", "src_file", ",", "remove_final", "=", "False", ")", ":", "with", "tarfile", ".", "open", "(", "src_file", ",", "'r'", ")", "as", "tf", ":", "member_names", "=", "[", "member", ".", "name", "for", "member", "in", "tf", ".", "getmembers", "(", ")", "if", "posixpath", ".", "sep", "not", "in", "member", ".", "name", "]", "self", ".", "prefix_all", "(", "'ADD'", ",", "*", "zip", "(", "member_names", ",", "member_names", ")", ")", "if", "remove_final", ":", "self", ".", "_remove_files", ".", "update", "(", "member_names", ")", "self", ".", "_archives", ".", "append", "(", "src_file", ")", "return", "member_names" ]
54.2
25
def __make_another_index(self, list_of_entries, url=False, hs_admin=False): ''' Find an index not yet used in the handle record and not reserved for any (other) special type. :param: list_of_entries: List of all entries to find which indices are used already. :param url: If True, an index for an URL entry is returned (1, unless it is already in use). :param hs_admin: If True, an index for HS_ADMIN is returned (100 or one of the following). :return: An integer. ''' start = 2 # reserved indices: reserved_for_url = set([1]) reserved_for_admin = set(range(100, 200)) prohibited_indices = reserved_for_url | reserved_for_admin if url: prohibited_indices = prohibited_indices - reserved_for_url start = 1 elif hs_admin: prohibited_indices = prohibited_indices - reserved_for_admin start = 100 # existing indices existing_indices = set() if list_of_entries is not None: for entry in list_of_entries: existing_indices.add(int(entry['index'])) # find new index: all_prohibited_indices = existing_indices | prohibited_indices searchmax = max(start, max(all_prohibited_indices)) + 2 for index in xrange(start, searchmax): if index not in all_prohibited_indices: return index
[ "def", "__make_another_index", "(", "self", ",", "list_of_entries", ",", "url", "=", "False", ",", "hs_admin", "=", "False", ")", ":", "start", "=", "2", "# reserved indices:", "reserved_for_url", "=", "set", "(", "[", "1", "]", ")", "reserved_for_admin", "=", "set", "(", "range", "(", "100", ",", "200", ")", ")", "prohibited_indices", "=", "reserved_for_url", "|", "reserved_for_admin", "if", "url", ":", "prohibited_indices", "=", "prohibited_indices", "-", "reserved_for_url", "start", "=", "1", "elif", "hs_admin", ":", "prohibited_indices", "=", "prohibited_indices", "-", "reserved_for_admin", "start", "=", "100", "# existing indices", "existing_indices", "=", "set", "(", ")", "if", "list_of_entries", "is", "not", "None", ":", "for", "entry", "in", "list_of_entries", ":", "existing_indices", ".", "add", "(", "int", "(", "entry", "[", "'index'", "]", ")", ")", "# find new index:", "all_prohibited_indices", "=", "existing_indices", "|", "prohibited_indices", "searchmax", "=", "max", "(", "start", ",", "max", "(", "all_prohibited_indices", ")", ")", "+", "2", "for", "index", "in", "xrange", "(", "start", ",", "searchmax", ")", ":", "if", "index", "not", "in", "all_prohibited_indices", ":", "return", "index" ]
36.4
22.1
def parseFASTACommandLineOptions(args): """ Examine parsed command-line options and return a Reads instance. @param args: An argparse namespace, as returned by the argparse C{parse_args} function. @return: A C{Reads} subclass instance, depending on the type of FASTA file given. """ # Set default FASTA type. if not (args.fasta or args.fastq or args.fasta_ss): args.fasta = True readClass = readClassNameToClass[args.readClass] if args.fasta: from dark.fasta import FastaReads return FastaReads(args.fastaFile, readClass=readClass) elif args.fastq: from dark.fastq import FastqReads return FastqReads(args.fastaFile, readClass=readClass) else: from dark.fasta_ss import SSFastaReads return SSFastaReads(args.fastaFile, readClass=readClass)
[ "def", "parseFASTACommandLineOptions", "(", "args", ")", ":", "# Set default FASTA type.", "if", "not", "(", "args", ".", "fasta", "or", "args", ".", "fastq", "or", "args", ".", "fasta_ss", ")", ":", "args", ".", "fasta", "=", "True", "readClass", "=", "readClassNameToClass", "[", "args", ".", "readClass", "]", "if", "args", ".", "fasta", ":", "from", "dark", ".", "fasta", "import", "FastaReads", "return", "FastaReads", "(", "args", ".", "fastaFile", ",", "readClass", "=", "readClass", ")", "elif", "args", ".", "fastq", ":", "from", "dark", ".", "fastq", "import", "FastqReads", "return", "FastqReads", "(", "args", ".", "fastaFile", ",", "readClass", "=", "readClass", ")", "else", ":", "from", "dark", ".", "fasta_ss", "import", "SSFastaReads", "return", "SSFastaReads", "(", "args", ".", "fastaFile", ",", "readClass", "=", "readClass", ")" ]
34.791667
18.791667
def check_nodes_count(baremetal_client, stack, parameters, defaults): """Check if there are enough available nodes for creating/scaling stack""" count = 0 if stack: for param in defaults: try: current = int(stack.parameters[param]) except KeyError: raise ValueError( "Parameter '%s' was not found in existing stack" % param) count += parameters.get(param, current) else: for param, default in defaults.items(): count += parameters.get(param, default) available = len(baremetal_client.node.list(associated=False, maintenance=False)) if count > available: raise exceptions.DeploymentError( "Not enough nodes - available: {0}, requested: {1}".format( available, count)) else: return True
[ "def", "check_nodes_count", "(", "baremetal_client", ",", "stack", ",", "parameters", ",", "defaults", ")", ":", "count", "=", "0", "if", "stack", ":", "for", "param", "in", "defaults", ":", "try", ":", "current", "=", "int", "(", "stack", ".", "parameters", "[", "param", "]", ")", "except", "KeyError", ":", "raise", "ValueError", "(", "\"Parameter '%s' was not found in existing stack\"", "%", "param", ")", "count", "+=", "parameters", ".", "get", "(", "param", ",", "current", ")", "else", ":", "for", "param", ",", "default", "in", "defaults", ".", "items", "(", ")", ":", "count", "+=", "parameters", ".", "get", "(", "param", ",", "default", ")", "available", "=", "len", "(", "baremetal_client", ".", "node", ".", "list", "(", "associated", "=", "False", ",", "maintenance", "=", "False", ")", ")", "if", "count", ">", "available", ":", "raise", "exceptions", ".", "DeploymentError", "(", "\"Not enough nodes - available: {0}, requested: {1}\"", ".", "format", "(", "available", ",", "count", ")", ")", "else", ":", "return", "True" ]
39.043478
19.217391
def _add_model(self, model_list_or_dict, core_element, model_class, model_key=None, load_meta_data=True): """Adds one model for a given core element. The method will add a model for a given core object and checks if there is a corresponding model object in the future expected model list. The method does not check if an object with corresponding model has already been inserted. :param model_list_or_dict: could be a list or dictionary of one model type :param core_element: the core element to a model for, can be state or state element :param model_class: model-class of the elements that should be insert :param model_key: if model_list_or_dict is a dictionary the key is the id of the respective element (e.g. 'state_id') :param load_meta_data: specific argument for loading meta data :return: """ found_model = self._get_future_expected_model(core_element) if found_model: found_model.parent = self if model_class is IncomeModel: self.income = found_model if found_model else IncomeModel(core_element, self) return if model_key is None: model_list_or_dict.append(found_model if found_model else model_class(core_element, self)) else: model_list_or_dict[model_key] = found_model if found_model else model_class(core_element, self, load_meta_data=load_meta_data)
[ "def", "_add_model", "(", "self", ",", "model_list_or_dict", ",", "core_element", ",", "model_class", ",", "model_key", "=", "None", ",", "load_meta_data", "=", "True", ")", ":", "found_model", "=", "self", ".", "_get_future_expected_model", "(", "core_element", ")", "if", "found_model", ":", "found_model", ".", "parent", "=", "self", "if", "model_class", "is", "IncomeModel", ":", "self", ".", "income", "=", "found_model", "if", "found_model", "else", "IncomeModel", "(", "core_element", ",", "self", ")", "return", "if", "model_key", "is", "None", ":", "model_list_or_dict", ".", "append", "(", "found_model", "if", "found_model", "else", "model_class", "(", "core_element", ",", "self", ")", ")", "else", ":", "model_list_or_dict", "[", "model_key", "]", "=", "found_model", "if", "found_model", "else", "model_class", "(", "core_element", ",", "self", ",", "load_meta_data", "=", "load_meta_data", ")" ]
53.310345
36.62069
def get_storage_policies(profile_manager, policy_names=None, get_all_policies=False): ''' Returns a list of the storage policies, filtered by name. profile_manager Reference to the profile manager. policy_names List of policy names to filter by. Default is None. get_all_policies Flag specifying to return all policies, regardless of the specified filter. ''' res_type = pbm.profile.ResourceType( resourceType=pbm.profile.ResourceTypeEnum.STORAGE) try: policy_ids = profile_manager.QueryProfile(res_type) except vim.fault.NoPermission as exc: log.exception(exc) raise VMwareApiError('Not enough permissions. Required privilege: ' '{0}'.format(exc.privilegeId)) except vim.fault.VimFault as exc: log.exception(exc) raise VMwareApiError(exc.msg) except vmodl.RuntimeFault as exc: log.exception(exc) raise VMwareRuntimeError(exc.msg) log.trace('policy_ids = %s', policy_ids) # More policies are returned so we need to filter again policies = [p for p in get_policies_by_id(profile_manager, policy_ids) if p.resourceType.resourceType == pbm.profile.ResourceTypeEnum.STORAGE] if get_all_policies: return policies if not policy_names: policy_names = [] return [p for p in policies if p.name in policy_names]
[ "def", "get_storage_policies", "(", "profile_manager", ",", "policy_names", "=", "None", ",", "get_all_policies", "=", "False", ")", ":", "res_type", "=", "pbm", ".", "profile", ".", "ResourceType", "(", "resourceType", "=", "pbm", ".", "profile", ".", "ResourceTypeEnum", ".", "STORAGE", ")", "try", ":", "policy_ids", "=", "profile_manager", ".", "QueryProfile", "(", "res_type", ")", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{0}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "log", ".", "trace", "(", "'policy_ids = %s'", ",", "policy_ids", ")", "# More policies are returned so we need to filter again", "policies", "=", "[", "p", "for", "p", "in", "get_policies_by_id", "(", "profile_manager", ",", "policy_ids", ")", "if", "p", ".", "resourceType", ".", "resourceType", "==", "pbm", ".", "profile", ".", "ResourceTypeEnum", ".", "STORAGE", "]", "if", "get_all_policies", ":", "return", "policies", "if", "not", "policy_names", ":", "policy_names", "=", "[", "]", "return", "[", "p", "for", "p", "in", "policies", "if", "p", ".", "name", "in", "policy_names", "]" ]
35.975
17.925
def regex_match_any(self, line, codes=None): """Match any regex.""" for selector in self.regex_selectors: for match in selector.regex.finditer(line): if codes and match.lastindex: # Currently the group name must be 'codes' try: disabled_codes = match.group('codes') except IndexError: return True disabled_codes = _stripped_codes(disabled_codes) current_code = codes[-1] if current_code in disabled_codes: return True else: return True return False
[ "def", "regex_match_any", "(", "self", ",", "line", ",", "codes", "=", "None", ")", ":", "for", "selector", "in", "self", ".", "regex_selectors", ":", "for", "match", "in", "selector", ".", "regex", ".", "finditer", "(", "line", ")", ":", "if", "codes", "and", "match", ".", "lastindex", ":", "# Currently the group name must be 'codes'", "try", ":", "disabled_codes", "=", "match", ".", "group", "(", "'codes'", ")", "except", "IndexError", ":", "return", "True", "disabled_codes", "=", "_stripped_codes", "(", "disabled_codes", ")", "current_code", "=", "codes", "[", "-", "1", "]", "if", "current_code", "in", "disabled_codes", ":", "return", "True", "else", ":", "return", "True", "return", "False" ]
35.6
15.7
def multidict_to_dict(d): """ Turns a werkzeug.MultiDict or django.MultiValueDict into a dict with list values :param d: a MultiDict or MultiValueDict instance :return: a dict instance """ return dict((k, v[0] if len(v) == 1 else v) for k, v in iterlists(d))
[ "def", "multidict_to_dict", "(", "d", ")", ":", "return", "dict", "(", "(", "k", ",", "v", "[", "0", "]", "if", "len", "(", "v", ")", "==", "1", "else", "v", ")", "for", "k", ",", "v", "in", "iterlists", "(", "d", ")", ")" ]
34.875
16.125
def quadrant(xcoord, ycoord): """ Find the quadrant a pair of coordinates are located in :type xcoord: integer :param xcoord: The x coordinate to find the quadrant for :type ycoord: integer :param ycoord: The y coordinate to find the quadrant for """ xneg = bool(xcoord < 0) yneg = bool(ycoord < 0) if xneg is True: if yneg is False: return 2 return 3 if yneg is False: return 1 return 4
[ "def", "quadrant", "(", "xcoord", ",", "ycoord", ")", ":", "xneg", "=", "bool", "(", "xcoord", "<", "0", ")", "yneg", "=", "bool", "(", "ycoord", "<", "0", ")", "if", "xneg", "is", "True", ":", "if", "yneg", "is", "False", ":", "return", "2", "return", "3", "if", "yneg", "is", "False", ":", "return", "1", "return", "4" ]
22.75
19.75
def add_moving_summary(*args, **kwargs): """ Summarize the moving average for scalar tensors. This function is a no-op if not calling from main training tower. Args: args: scalar tensors to summarize decay (float): the decay rate. Defaults to 0.95. collection (str or None): the name of the collection to add EMA-maintaining ops. The default will work together with the default :class:`MovingAverageSummary` callback. summary_collections ([str]): the names of collections to add the summary op. Default is TF's default (`tf.GraphKeys.SUMMARIES`). Returns: [tf.Tensor]: list of tensors returned by assign_moving_average, which can be used to maintain the EMA. """ decay = kwargs.pop('decay', 0.95) coll = kwargs.pop('collection', MOVING_SUMMARY_OPS_KEY) summ_coll = kwargs.pop('summary_collections', None) assert len(kwargs) == 0, "Unknown arguments: " + str(kwargs) ctx = get_current_tower_context() # allow ctx to be none if ctx is not None and not ctx.is_main_training_tower: return [] graph = tf.get_default_graph() try: control_flow_ctx = graph._get_control_flow_context() # XLA does not support summaries anyway # However, this function will generate unnecessary dependency edges, # which makes the tower function harder to compile under XLA, so we skip it if control_flow_ctx is not None and control_flow_ctx.IsXLAContext(): return except Exception: pass if tf.get_variable_scope().reuse is True: logger.warn("add_moving_summary() called under reuse=True scope, ignored.") return [] for x in args: assert isinstance(x, (tf.Tensor, tf.Variable)), x assert x.get_shape().ndims == 0, \ "add_moving_summary() only accepts scalar tensor! Got one with {}".format(x.get_shape()) ema_ops = [] for c in args: name = re.sub('tower[0-9]+/', '', c.op.name) with tf.name_scope(None): if not c.dtype.is_floating: c = tf.cast(c, tf.float32) # assign_moving_average creates variables with op names, therefore clear ns first. with _enter_vs_reuse_ns('EMA') as vs: ema_var = tf.get_variable(name, shape=c.shape, dtype=c.dtype, initializer=tf.constant_initializer(), trainable=False) ns = vs.original_name_scope with tf.name_scope(ns): # reuse VS&NS so that EMA_1 won't appear ema_op = moving_averages.assign_moving_average( ema_var, c, decay, zero_debias=True, name=name + '_EMA_apply') ema_ops.append(ema_op) with tf.name_scope(None): tf.summary.scalar( name + '-summary', ema_op, collections=summ_coll) # write the EMA value as a summary if coll is not None: for op in ema_ops: tf.add_to_collection(coll, op) return ema_ops
[ "def", "add_moving_summary", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "decay", "=", "kwargs", ".", "pop", "(", "'decay'", ",", "0.95", ")", "coll", "=", "kwargs", ".", "pop", "(", "'collection'", ",", "MOVING_SUMMARY_OPS_KEY", ")", "summ_coll", "=", "kwargs", ".", "pop", "(", "'summary_collections'", ",", "None", ")", "assert", "len", "(", "kwargs", ")", "==", "0", ",", "\"Unknown arguments: \"", "+", "str", "(", "kwargs", ")", "ctx", "=", "get_current_tower_context", "(", ")", "# allow ctx to be none", "if", "ctx", "is", "not", "None", "and", "not", "ctx", ".", "is_main_training_tower", ":", "return", "[", "]", "graph", "=", "tf", ".", "get_default_graph", "(", ")", "try", ":", "control_flow_ctx", "=", "graph", ".", "_get_control_flow_context", "(", ")", "# XLA does not support summaries anyway", "# However, this function will generate unnecessary dependency edges,", "# which makes the tower function harder to compile under XLA, so we skip it", "if", "control_flow_ctx", "is", "not", "None", "and", "control_flow_ctx", ".", "IsXLAContext", "(", ")", ":", "return", "except", "Exception", ":", "pass", "if", "tf", ".", "get_variable_scope", "(", ")", ".", "reuse", "is", "True", ":", "logger", ".", "warn", "(", "\"add_moving_summary() called under reuse=True scope, ignored.\"", ")", "return", "[", "]", "for", "x", "in", "args", ":", "assert", "isinstance", "(", "x", ",", "(", "tf", ".", "Tensor", ",", "tf", ".", "Variable", ")", ")", ",", "x", "assert", "x", ".", "get_shape", "(", ")", ".", "ndims", "==", "0", ",", "\"add_moving_summary() only accepts scalar tensor! Got one with {}\"", ".", "format", "(", "x", ".", "get_shape", "(", ")", ")", "ema_ops", "=", "[", "]", "for", "c", "in", "args", ":", "name", "=", "re", ".", "sub", "(", "'tower[0-9]+/'", ",", "''", ",", "c", ".", "op", ".", "name", ")", "with", "tf", ".", "name_scope", "(", "None", ")", ":", "if", "not", "c", ".", "dtype", ".", "is_floating", ":", "c", "=", "tf", ".", "cast", "(", "c", ",", "tf", ".", "float32", ")", "# assign_moving_average creates variables with op names, therefore clear ns first.", "with", "_enter_vs_reuse_ns", "(", "'EMA'", ")", "as", "vs", ":", "ema_var", "=", "tf", ".", "get_variable", "(", "name", ",", "shape", "=", "c", ".", "shape", ",", "dtype", "=", "c", ".", "dtype", ",", "initializer", "=", "tf", ".", "constant_initializer", "(", ")", ",", "trainable", "=", "False", ")", "ns", "=", "vs", ".", "original_name_scope", "with", "tf", ".", "name_scope", "(", "ns", ")", ":", "# reuse VS&NS so that EMA_1 won't appear", "ema_op", "=", "moving_averages", ".", "assign_moving_average", "(", "ema_var", ",", "c", ",", "decay", ",", "zero_debias", "=", "True", ",", "name", "=", "name", "+", "'_EMA_apply'", ")", "ema_ops", ".", "append", "(", "ema_op", ")", "with", "tf", ".", "name_scope", "(", "None", ")", ":", "tf", ".", "summary", ".", "scalar", "(", "name", "+", "'-summary'", ",", "ema_op", ",", "collections", "=", "summ_coll", ")", "# write the EMA value as a summary", "if", "coll", "is", "not", "None", ":", "for", "op", "in", "ema_ops", ":", "tf", ".", "add_to_collection", "(", "coll", ",", "op", ")", "return", "ema_ops" ]
42.178082
21.30137
def _get_index(n_items, item_size, n): """Prepare an index attribute for GPU uploading.""" index = np.arange(n_items) index = np.repeat(index, item_size) index = index.astype(np.float64) assert index.shape == (n,) return index
[ "def", "_get_index", "(", "n_items", ",", "item_size", ",", "n", ")", ":", "index", "=", "np", ".", "arange", "(", "n_items", ")", "index", "=", "np", ".", "repeat", "(", "index", ",", "item_size", ")", "index", "=", "index", ".", "astype", "(", "np", ".", "float64", ")", "assert", "index", ".", "shape", "==", "(", "n", ",", ")", "return", "index" ]
34.857143
7.285714
def as_wfn(self): """ Returns the CPE Name as WFN string of version 2.3. Only shows the first seven components. :return: CPE Name as WFN string :rtype: string :exception: TypeError - incompatible version """ wfn = [] wfn.append(CPE2_3_WFN.CPE_PREFIX) for ck in CPEComponent.CPE_COMP_KEYS: lc = self._get_attribute_components(ck) comp = lc[0] if (isinstance(comp, CPEComponentUndefined) or isinstance(comp, CPEComponentEmpty)): # Do not set the attribute continue else: v = [] v.append(ck) v.append("=") # Get the value of WFN of component v.append('"') v.append(comp.as_wfn()) v.append('"') # Append v to the WFN and add a separator wfn.append("".join(v)) wfn.append(CPEComponent2_3_WFN.SEPARATOR_COMP) # Del the last separator wfn = wfn[:-1] # Return the WFN string wfn.append(CPE2_3_WFN.CPE_SUFFIX) return "".join(wfn)
[ "def", "as_wfn", "(", "self", ")", ":", "wfn", "=", "[", "]", "wfn", ".", "append", "(", "CPE2_3_WFN", ".", "CPE_PREFIX", ")", "for", "ck", "in", "CPEComponent", ".", "CPE_COMP_KEYS", ":", "lc", "=", "self", ".", "_get_attribute_components", "(", "ck", ")", "comp", "=", "lc", "[", "0", "]", "if", "(", "isinstance", "(", "comp", ",", "CPEComponentUndefined", ")", "or", "isinstance", "(", "comp", ",", "CPEComponentEmpty", ")", ")", ":", "# Do not set the attribute", "continue", "else", ":", "v", "=", "[", "]", "v", ".", "append", "(", "ck", ")", "v", ".", "append", "(", "\"=\"", ")", "# Get the value of WFN of component", "v", ".", "append", "(", "'\"'", ")", "v", ".", "append", "(", "comp", ".", "as_wfn", "(", ")", ")", "v", ".", "append", "(", "'\"'", ")", "# Append v to the WFN and add a separator", "wfn", ".", "append", "(", "\"\"", ".", "join", "(", "v", ")", ")", "wfn", ".", "append", "(", "CPEComponent2_3_WFN", ".", "SEPARATOR_COMP", ")", "# Del the last separator", "wfn", "=", "wfn", "[", ":", "-", "1", "]", "# Return the WFN string", "wfn", ".", "append", "(", "CPE2_3_WFN", ".", "CPE_SUFFIX", ")", "return", "\"\"", ".", "join", "(", "wfn", ")" ]
26.431818
18.431818
def data_transforms_cifar10(args): """ data_transforms for cifar10 dataset """ cifar_mean = [0.49139968, 0.48215827, 0.44653124] cifar_std = [0.24703233, 0.24348505, 0.26158768] train_transform = transforms.Compose( [ transforms.RandomCrop(32, padding=4), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(cifar_mean, cifar_std), ] ) if args.cutout: train_transform.transforms.append(Cutout(args.cutout_length)) valid_transform = transforms.Compose( [transforms.ToTensor(), transforms.Normalize(cifar_mean, cifar_std)] ) return train_transform, valid_transform
[ "def", "data_transforms_cifar10", "(", "args", ")", ":", "cifar_mean", "=", "[", "0.49139968", ",", "0.48215827", ",", "0.44653124", "]", "cifar_std", "=", "[", "0.24703233", ",", "0.24348505", ",", "0.26158768", "]", "train_transform", "=", "transforms", ".", "Compose", "(", "[", "transforms", ".", "RandomCrop", "(", "32", ",", "padding", "=", "4", ")", ",", "transforms", ".", "RandomHorizontalFlip", "(", ")", ",", "transforms", ".", "ToTensor", "(", ")", ",", "transforms", ".", "Normalize", "(", "cifar_mean", ",", "cifar_std", ")", ",", "]", ")", "if", "args", ".", "cutout", ":", "train_transform", ".", "transforms", ".", "append", "(", "Cutout", "(", "args", ".", "cutout_length", ")", ")", "valid_transform", "=", "transforms", ".", "Compose", "(", "[", "transforms", ".", "ToTensor", "(", ")", ",", "transforms", ".", "Normalize", "(", "cifar_mean", ",", "cifar_std", ")", "]", ")", "return", "train_transform", ",", "valid_transform" ]
31.409091
18.681818
def moignard15() -> AnnData: """Hematopoiesis in early mouse embryos [Moignard15]_. Returns ------- Annotated data matrix. """ filename = settings.datasetdir / 'moignard15/nbt.3154-S3.xlsx' backup_url = 'http://www.nature.com/nbt/journal/v33/n3/extref/nbt.3154-S3.xlsx' adata = sc.read(filename, sheet='dCt_values.txt', backup_url=backup_url) # filter out 4 genes as in Haghverdi et al. (2016) gene_subset = ~np.in1d(adata.var_names, ['Eif2b1', 'Mrpl19', 'Polr2a', 'Ubc']) adata = adata[:, gene_subset] # retain non-removed genes # choose root cell for DPT analysis as in Haghverdi et al. (2016) adata.uns['iroot'] = 532 # note that in Matlab/R, counting starts at 1 # annotate with Moignard et al. (2015) experimental cell groups groups_order = ['HF', 'NP', 'PS', '4SG', '4SFG'] # annotate each observation/cell adata.obs['exp_groups'] = [ next(gname for gname in groups_order if sname.startswith(gname)) for sname in adata.obs_names] # fix the order and colors of names in "groups" adata.obs['exp_groups'] = pd.Categorical(adata.obs['exp_groups'], categories=groups_order) adata.uns['exp_groups_colors'] = ['#D7A83E', '#7AAE5D', '#497ABC', '#AF353A', '#765099'] return adata
[ "def", "moignard15", "(", ")", "->", "AnnData", ":", "filename", "=", "settings", ".", "datasetdir", "/", "'moignard15/nbt.3154-S3.xlsx'", "backup_url", "=", "'http://www.nature.com/nbt/journal/v33/n3/extref/nbt.3154-S3.xlsx'", "adata", "=", "sc", ".", "read", "(", "filename", ",", "sheet", "=", "'dCt_values.txt'", ",", "backup_url", "=", "backup_url", ")", "# filter out 4 genes as in Haghverdi et al. (2016)", "gene_subset", "=", "~", "np", ".", "in1d", "(", "adata", ".", "var_names", ",", "[", "'Eif2b1'", ",", "'Mrpl19'", ",", "'Polr2a'", ",", "'Ubc'", "]", ")", "adata", "=", "adata", "[", ":", ",", "gene_subset", "]", "# retain non-removed genes", "# choose root cell for DPT analysis as in Haghverdi et al. (2016)", "adata", ".", "uns", "[", "'iroot'", "]", "=", "532", "# note that in Matlab/R, counting starts at 1", "# annotate with Moignard et al. (2015) experimental cell groups", "groups_order", "=", "[", "'HF'", ",", "'NP'", ",", "'PS'", ",", "'4SG'", ",", "'4SFG'", "]", "# annotate each observation/cell", "adata", ".", "obs", "[", "'exp_groups'", "]", "=", "[", "next", "(", "gname", "for", "gname", "in", "groups_order", "if", "sname", ".", "startswith", "(", "gname", ")", ")", "for", "sname", "in", "adata", ".", "obs_names", "]", "# fix the order and colors of names in \"groups\"", "adata", ".", "obs", "[", "'exp_groups'", "]", "=", "pd", ".", "Categorical", "(", "adata", ".", "obs", "[", "'exp_groups'", "]", ",", "categories", "=", "groups_order", ")", "adata", ".", "uns", "[", "'exp_groups_colors'", "]", "=", "[", "'#D7A83E'", ",", "'#7AAE5D'", ",", "'#497ABC'", ",", "'#AF353A'", ",", "'#765099'", "]", "return", "adata" ]
49.961538
23.153846
def get_features(self, mapobject_type_name): '''Gets features for a given object type. Parameters ---------- mapobject_type_name: str type of the segmented objects Returns ------- List[Dict[str, str]] information about each feature See also -------- :func:`tmserver.api.feature.get_features` :class:`tmlib.models.feature.Feature` ''' logger.info( 'get features of experiment "%s", object type "%s"', self.experiment_name, mapobject_type_name ) mapobject_type_id = self._get_mapobject_type_id(mapobject_type_name) url = self._build_api_url( '/experiments/{experiment_id}/mapobject_types/{mapobject_type_id}/features'.format( experiment_id=self._experiment_id, mapobject_type_id=mapobject_type_id ) ) res = self._session.get(url) res.raise_for_status() return res.json()['data']
[ "def", "get_features", "(", "self", ",", "mapobject_type_name", ")", ":", "logger", ".", "info", "(", "'get features of experiment \"%s\", object type \"%s\"'", ",", "self", ".", "experiment_name", ",", "mapobject_type_name", ")", "mapobject_type_id", "=", "self", ".", "_get_mapobject_type_id", "(", "mapobject_type_name", ")", "url", "=", "self", ".", "_build_api_url", "(", "'/experiments/{experiment_id}/mapobject_types/{mapobject_type_id}/features'", ".", "format", "(", "experiment_id", "=", "self", ".", "_experiment_id", ",", "mapobject_type_id", "=", "mapobject_type_id", ")", ")", "res", "=", "self", ".", "_session", ".", "get", "(", "url", ")", "res", ".", "raise_for_status", "(", ")", "return", "res", ".", "json", "(", ")", "[", "'data'", "]" ]
31.625
19.5625
def _process_download_descriptor(self, dd): # type: (Downloader, blobxfer.models.download.Descriptor) -> None """Process download descriptor :param Downloader self: this :param blobxfer.models.download.Descriptor dd: download descriptor """ # update progress bar self._update_progress_bar() # get download offsets offsets, resume_bytes = dd.next_offsets() # add resume bytes to counter if resume_bytes is not None: with self._disk_operation_lock: self._download_bytes_sofar += resume_bytes logger.debug('adding {} sofar {} from {}'.format( resume_bytes, self._download_bytes_sofar, dd.entity.name)) del resume_bytes # check if all operations completed if offsets is None and dd.all_operations_completed: finalize = True sfpath = str(dd.final_path) # finalize integrity dd.finalize_integrity() # vectored io checks if dd.entity.vectored_io is not None: with self._transfer_lock: if sfpath not in self._vio_map: self._vio_map[sfpath] = 1 else: self._vio_map[sfpath] += 1 if (self._vio_map[sfpath] == dd.entity.vectored_io.total_slices): self._vio_map.pop(sfpath) else: finalize = False # finalize file if finalize: dd.finalize_file() # accounting with self._transfer_lock: self._download_sofar += 1 if dd.entity.is_encrypted: self._dd_map.pop(sfpath) self._transfer_set.remove( blobxfer.operations.download.Downloader. create_unique_transfer_operation_id(dd.entity)) self._transfer_cc.pop(dd.entity.path, None) return # re-enqueue for other threads to download if offsets is None: self._transfer_queue.put(dd) return # ensure forthcoming disk operation is accounted for with self._disk_operation_lock: self._disk_set.add( blobxfer.operations.download.Downloader. create_unique_disk_operation_id(dd, offsets)) # check if there are too many concurrent connections with self._transfer_lock: self._transfer_cc[dd.entity.path] += 1 cc_xfer = self._transfer_cc[dd.entity.path] if cc_xfer <= self._spec.options.max_single_object_concurrency: self._transfer_queue.put(dd) # issue get range if dd.entity.mode == blobxfer.models.azure.StorageModes.File: data = blobxfer.operations.azure.file.get_file_range( dd.entity, offsets) else: data = blobxfer.operations.azure.blob.get_blob_range( dd.entity, offsets) with self._transfer_lock: self._transfer_cc[dd.entity.path] -= 1 if cc_xfer > self._spec.options.max_single_object_concurrency: self._transfer_queue.put(dd) # enqueue data for processing self._disk_queue.put((dd, offsets, data))
[ "def", "_process_download_descriptor", "(", "self", ",", "dd", ")", ":", "# type: (Downloader, blobxfer.models.download.Descriptor) -> None", "# update progress bar", "self", ".", "_update_progress_bar", "(", ")", "# get download offsets", "offsets", ",", "resume_bytes", "=", "dd", ".", "next_offsets", "(", ")", "# add resume bytes to counter", "if", "resume_bytes", "is", "not", "None", ":", "with", "self", ".", "_disk_operation_lock", ":", "self", ".", "_download_bytes_sofar", "+=", "resume_bytes", "logger", ".", "debug", "(", "'adding {} sofar {} from {}'", ".", "format", "(", "resume_bytes", ",", "self", ".", "_download_bytes_sofar", ",", "dd", ".", "entity", ".", "name", ")", ")", "del", "resume_bytes", "# check if all operations completed", "if", "offsets", "is", "None", "and", "dd", ".", "all_operations_completed", ":", "finalize", "=", "True", "sfpath", "=", "str", "(", "dd", ".", "final_path", ")", "# finalize integrity", "dd", ".", "finalize_integrity", "(", ")", "# vectored io checks", "if", "dd", ".", "entity", ".", "vectored_io", "is", "not", "None", ":", "with", "self", ".", "_transfer_lock", ":", "if", "sfpath", "not", "in", "self", ".", "_vio_map", ":", "self", ".", "_vio_map", "[", "sfpath", "]", "=", "1", "else", ":", "self", ".", "_vio_map", "[", "sfpath", "]", "+=", "1", "if", "(", "self", ".", "_vio_map", "[", "sfpath", "]", "==", "dd", ".", "entity", ".", "vectored_io", ".", "total_slices", ")", ":", "self", ".", "_vio_map", ".", "pop", "(", "sfpath", ")", "else", ":", "finalize", "=", "False", "# finalize file", "if", "finalize", ":", "dd", ".", "finalize_file", "(", ")", "# accounting", "with", "self", ".", "_transfer_lock", ":", "self", ".", "_download_sofar", "+=", "1", "if", "dd", ".", "entity", ".", "is_encrypted", ":", "self", ".", "_dd_map", ".", "pop", "(", "sfpath", ")", "self", ".", "_transfer_set", ".", "remove", "(", "blobxfer", ".", "operations", ".", "download", ".", "Downloader", ".", "create_unique_transfer_operation_id", "(", "dd", ".", "entity", ")", ")", "self", ".", "_transfer_cc", ".", "pop", "(", "dd", ".", "entity", ".", "path", ",", "None", ")", "return", "# re-enqueue for other threads to download", "if", "offsets", "is", "None", ":", "self", ".", "_transfer_queue", ".", "put", "(", "dd", ")", "return", "# ensure forthcoming disk operation is accounted for", "with", "self", ".", "_disk_operation_lock", ":", "self", ".", "_disk_set", ".", "add", "(", "blobxfer", ".", "operations", ".", "download", ".", "Downloader", ".", "create_unique_disk_operation_id", "(", "dd", ",", "offsets", ")", ")", "# check if there are too many concurrent connections", "with", "self", ".", "_transfer_lock", ":", "self", ".", "_transfer_cc", "[", "dd", ".", "entity", ".", "path", "]", "+=", "1", "cc_xfer", "=", "self", ".", "_transfer_cc", "[", "dd", ".", "entity", ".", "path", "]", "if", "cc_xfer", "<=", "self", ".", "_spec", ".", "options", ".", "max_single_object_concurrency", ":", "self", ".", "_transfer_queue", ".", "put", "(", "dd", ")", "# issue get range", "if", "dd", ".", "entity", ".", "mode", "==", "blobxfer", ".", "models", ".", "azure", ".", "StorageModes", ".", "File", ":", "data", "=", "blobxfer", ".", "operations", ".", "azure", ".", "file", ".", "get_file_range", "(", "dd", ".", "entity", ",", "offsets", ")", "else", ":", "data", "=", "blobxfer", ".", "operations", ".", "azure", ".", "blob", ".", "get_blob_range", "(", "dd", ".", "entity", ",", "offsets", ")", "with", "self", ".", "_transfer_lock", ":", "self", ".", "_transfer_cc", "[", "dd", ".", "entity", ".", "path", "]", "-=", "1", "if", "cc_xfer", ">", "self", ".", "_spec", ".", "options", ".", "max_single_object_concurrency", ":", "self", ".", "_transfer_queue", ".", "put", "(", "dd", ")", "# enqueue data for processing", "self", ".", "_disk_queue", ".", "put", "(", "(", "dd", ",", "offsets", ",", "data", ")", ")" ]
43.644737
12.052632
def simplified_rayliegh_vel(self): """Simplified Rayliegh velocity of the site. This follows the simplifications proposed by Urzua et al. (2017) Returns ------- rayleigh_vel : float Equivalent shear-wave velocity. """ # FIXME: What if last layer has no thickness? thicks = np.array([l.thickness for l in self]) depths_mid = np.array([l.depth_mid for l in self]) shear_vels = np.array([l.shear_vel for l in self]) mode_incr = depths_mid * thicks / shear_vels ** 2 # Mode shape is computed as the sumation from the base of # the profile. Need to append a 0 for the roll performed in the next # step shape = np.r_[np.cumsum(mode_incr[::-1])[::-1], 0] freq_fund = np.sqrt(4 * np.sum( thicks * depths_mid ** 2 / shear_vels ** 2 ) / np.sum( thicks * # Roll is used to offset the mode_shape so that the sum # can be calculated for two adjacent layers np.sum(np.c_[shape, np.roll(shape, -1)], axis=1)[:-1] ** 2)) period_fun = 2 * np.pi / freq_fund rayleigh_vel = 4 * thicks.sum() / period_fun return rayleigh_vel
[ "def", "simplified_rayliegh_vel", "(", "self", ")", ":", "# FIXME: What if last layer has no thickness?", "thicks", "=", "np", ".", "array", "(", "[", "l", ".", "thickness", "for", "l", "in", "self", "]", ")", "depths_mid", "=", "np", ".", "array", "(", "[", "l", ".", "depth_mid", "for", "l", "in", "self", "]", ")", "shear_vels", "=", "np", ".", "array", "(", "[", "l", ".", "shear_vel", "for", "l", "in", "self", "]", ")", "mode_incr", "=", "depths_mid", "*", "thicks", "/", "shear_vels", "**", "2", "# Mode shape is computed as the sumation from the base of", "# the profile. Need to append a 0 for the roll performed in the next", "# step", "shape", "=", "np", ".", "r_", "[", "np", ".", "cumsum", "(", "mode_incr", "[", ":", ":", "-", "1", "]", ")", "[", ":", ":", "-", "1", "]", ",", "0", "]", "freq_fund", "=", "np", ".", "sqrt", "(", "4", "*", "np", ".", "sum", "(", "thicks", "*", "depths_mid", "**", "2", "/", "shear_vels", "**", "2", ")", "/", "np", ".", "sum", "(", "thicks", "*", "# Roll is used to offset the mode_shape so that the sum", "# can be calculated for two adjacent layers", "np", ".", "sum", "(", "np", ".", "c_", "[", "shape", ",", "np", ".", "roll", "(", "shape", ",", "-", "1", ")", "]", ",", "axis", "=", "1", ")", "[", ":", "-", "1", "]", "**", "2", ")", ")", "period_fun", "=", "2", "*", "np", ".", "pi", "/", "freq_fund", "rayleigh_vel", "=", "4", "*", "thicks", ".", "sum", "(", ")", "/", "period_fun", "return", "rayleigh_vel" ]
39.032258
19.516129
def base_image_inspect(self): """ inspect base image :return: dict """ if self._base_image_inspect is None: if self.base_from_scratch: self._base_image_inspect = {} elif self.parents_pulled or self.custom_base_image: try: self._base_image_inspect = self.tasker.inspect_image(self.base_image) except docker.errors.NotFound: # If the base image cannot be found throw KeyError - # as this property should behave like a dict raise KeyError("Unprocessed base image Dockerfile cannot be inspected") else: self._base_image_inspect =\ atomic_reactor.util.get_inspect_for_image(self.base_image, self.base_image.registry, self.base_image_insecure, self.base_image_dockercfg_path) base_image_str = str(self.base_image) if base_image_str not in self._parent_images_inspect: self._parent_images_inspect[base_image_str] = self._base_image_inspect return self._base_image_inspect
[ "def", "base_image_inspect", "(", "self", ")", ":", "if", "self", ".", "_base_image_inspect", "is", "None", ":", "if", "self", ".", "base_from_scratch", ":", "self", ".", "_base_image_inspect", "=", "{", "}", "elif", "self", ".", "parents_pulled", "or", "self", ".", "custom_base_image", ":", "try", ":", "self", ".", "_base_image_inspect", "=", "self", ".", "tasker", ".", "inspect_image", "(", "self", ".", "base_image", ")", "except", "docker", ".", "errors", ".", "NotFound", ":", "# If the base image cannot be found throw KeyError -", "# as this property should behave like a dict", "raise", "KeyError", "(", "\"Unprocessed base image Dockerfile cannot be inspected\"", ")", "else", ":", "self", ".", "_base_image_inspect", "=", "atomic_reactor", ".", "util", ".", "get_inspect_for_image", "(", "self", ".", "base_image", ",", "self", ".", "base_image", ".", "registry", ",", "self", ".", "base_image_insecure", ",", "self", ".", "base_image_dockercfg_path", ")", "base_image_str", "=", "str", "(", "self", ".", "base_image", ")", "if", "base_image_str", "not", "in", "self", ".", "_parent_images_inspect", ":", "self", ".", "_parent_images_inspect", "[", "base_image_str", "]", "=", "self", ".", "_base_image_inspect", "return", "self", ".", "_base_image_inspect" ]
43.8
25.066667
def _create_contextualvals_obj_from_context(cls, context): """ Gathers all of the 'contextual' data needed to render a menu instance and returns it in a structure that can be conveniently referenced throughout the process of preparing the menu and menu items and for rendering. """ context_processor_vals = context.get('wagtailmenus_vals', {}) return ContextualVals( context, context['request'], get_site_from_request(context['request']), context.get('current_level', 0) + 1, context.get('original_menu_tag', cls.related_templatetag_name), context.get('original_menu_instance'), context_processor_vals.get('current_page'), context_processor_vals.get('section_root'), context_processor_vals.get('current_page_ancestor_ids', ()), )
[ "def", "_create_contextualvals_obj_from_context", "(", "cls", ",", "context", ")", ":", "context_processor_vals", "=", "context", ".", "get", "(", "'wagtailmenus_vals'", ",", "{", "}", ")", "return", "ContextualVals", "(", "context", ",", "context", "[", "'request'", "]", ",", "get_site_from_request", "(", "context", "[", "'request'", "]", ")", ",", "context", ".", "get", "(", "'current_level'", ",", "0", ")", "+", "1", ",", "context", ".", "get", "(", "'original_menu_tag'", ",", "cls", ".", "related_templatetag_name", ")", ",", "context", ".", "get", "(", "'original_menu_instance'", ")", ",", "context_processor_vals", ".", "get", "(", "'current_page'", ")", ",", "context_processor_vals", ".", "get", "(", "'section_root'", ")", ",", "context_processor_vals", ".", "get", "(", "'current_page_ancestor_ids'", ",", "(", ")", ")", ",", ")" ]
46.894737
19.210526
def virtual_network_present(name, address_prefixes, resource_group, dns_servers=None, tags=None, connection_auth=None, **kwargs): ''' .. versionadded:: 2019.2.0 Ensure a virtual network exists. :param name: Name of the virtual network. :param resource_group: The resource group assigned to the virtual network. :param address_prefixes: A list of CIDR blocks which can be used by subnets within the virtual network. :param dns_servers: A list of DNS server addresses. :param tags: A dictionary of strings can be passed as tag metadata to the virtual network object. :param connection_auth: A dict with subscription and authentication parameters to be used in connecting to the Azure Resource Manager API. Example usage: .. code-block:: yaml Ensure virtual network exists: azurearm_network.virtual_network_present: - name: vnet1 - resource_group: group1 - address_prefixes: - '10.0.0.0/8' - '192.168.0.0/16' - dns_servers: - '8.8.8.8' - tags: contact_name: Elmer Fudd Gantry - connection_auth: {{ profile }} - require: - azurearm_resource: Ensure resource group exists ''' ret = { 'name': name, 'result': False, 'comment': '', 'changes': {} } if not isinstance(connection_auth, dict): ret['comment'] = 'Connection information must be specified via connection_auth dictionary!' return ret vnet = __salt__['azurearm_network.virtual_network_get']( name, resource_group, azurearm_log_level='info', **connection_auth ) if 'error' not in vnet: tag_changes = __utils__['dictdiffer.deep_diff'](vnet.get('tags', {}), tags or {}) if tag_changes: ret['changes']['tags'] = tag_changes dns_changes = set(dns_servers or []).symmetric_difference( set(vnet.get('dhcp_options', {}).get('dns_servers', []))) if dns_changes: ret['changes']['dns_servers'] = { 'old': vnet.get('dhcp_options', {}).get('dns_servers', []), 'new': dns_servers, } addr_changes = set(address_prefixes or []).symmetric_difference( set(vnet.get('address_space', {}).get('address_prefixes', []))) if addr_changes: ret['changes']['address_space'] = { 'address_prefixes': { 'old': vnet.get('address_space', {}).get('address_prefixes', []), 'new': address_prefixes, } } if kwargs.get('enable_ddos_protection', False) != vnet.get('enable_ddos_protection'): ret['changes']['enable_ddos_protection'] = { 'old': vnet.get('enable_ddos_protection'), 'new': kwargs.get('enable_ddos_protection') } if kwargs.get('enable_vm_protection', False) != vnet.get('enable_vm_protection'): ret['changes']['enable_vm_protection'] = { 'old': vnet.get('enable_vm_protection'), 'new': kwargs.get('enable_vm_protection') } if not ret['changes']: ret['result'] = True ret['comment'] = 'Virtual network {0} is already present.'.format(name) return ret if __opts__['test']: ret['result'] = None ret['comment'] = 'Virtual network {0} would be updated.'.format(name) return ret else: ret['changes'] = { 'old': {}, 'new': { 'name': name, 'resource_group': resource_group, 'address_space': {'address_prefixes': address_prefixes}, 'dhcp_options': {'dns_servers': dns_servers}, 'enable_ddos_protection': kwargs.get('enable_ddos_protection', False), 'enable_vm_protection': kwargs.get('enable_vm_protection', False), 'tags': tags, } } if __opts__['test']: ret['comment'] = 'Virtual network {0} would be created.'.format(name) ret['result'] = None return ret vnet_kwargs = kwargs.copy() vnet_kwargs.update(connection_auth) vnet = __salt__['azurearm_network.virtual_network_create_or_update']( name=name, resource_group=resource_group, address_prefixes=address_prefixes, dns_servers=dns_servers, tags=tags, **vnet_kwargs ) if 'error' not in vnet: ret['result'] = True ret['comment'] = 'Virtual network {0} has been created.'.format(name) return ret ret['comment'] = 'Failed to create virtual network {0}! ({1})'.format(name, vnet.get('error')) return ret
[ "def", "virtual_network_present", "(", "name", ",", "address_prefixes", ",", "resource_group", ",", "dns_servers", "=", "None", ",", "tags", "=", "None", ",", "connection_auth", "=", "None", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "False", ",", "'comment'", ":", "''", ",", "'changes'", ":", "{", "}", "}", "if", "not", "isinstance", "(", "connection_auth", ",", "dict", ")", ":", "ret", "[", "'comment'", "]", "=", "'Connection information must be specified via connection_auth dictionary!'", "return", "ret", "vnet", "=", "__salt__", "[", "'azurearm_network.virtual_network_get'", "]", "(", "name", ",", "resource_group", ",", "azurearm_log_level", "=", "'info'", ",", "*", "*", "connection_auth", ")", "if", "'error'", "not", "in", "vnet", ":", "tag_changes", "=", "__utils__", "[", "'dictdiffer.deep_diff'", "]", "(", "vnet", ".", "get", "(", "'tags'", ",", "{", "}", ")", ",", "tags", "or", "{", "}", ")", "if", "tag_changes", ":", "ret", "[", "'changes'", "]", "[", "'tags'", "]", "=", "tag_changes", "dns_changes", "=", "set", "(", "dns_servers", "or", "[", "]", ")", ".", "symmetric_difference", "(", "set", "(", "vnet", ".", "get", "(", "'dhcp_options'", ",", "{", "}", ")", ".", "get", "(", "'dns_servers'", ",", "[", "]", ")", ")", ")", "if", "dns_changes", ":", "ret", "[", "'changes'", "]", "[", "'dns_servers'", "]", "=", "{", "'old'", ":", "vnet", ".", "get", "(", "'dhcp_options'", ",", "{", "}", ")", ".", "get", "(", "'dns_servers'", ",", "[", "]", ")", ",", "'new'", ":", "dns_servers", ",", "}", "addr_changes", "=", "set", "(", "address_prefixes", "or", "[", "]", ")", ".", "symmetric_difference", "(", "set", "(", "vnet", ".", "get", "(", "'address_space'", ",", "{", "}", ")", ".", "get", "(", "'address_prefixes'", ",", "[", "]", ")", ")", ")", "if", "addr_changes", ":", "ret", "[", "'changes'", "]", "[", "'address_space'", "]", "=", "{", "'address_prefixes'", ":", "{", "'old'", ":", "vnet", ".", "get", "(", "'address_space'", ",", "{", "}", ")", ".", "get", "(", "'address_prefixes'", ",", "[", "]", ")", ",", "'new'", ":", "address_prefixes", ",", "}", "}", "if", "kwargs", ".", "get", "(", "'enable_ddos_protection'", ",", "False", ")", "!=", "vnet", ".", "get", "(", "'enable_ddos_protection'", ")", ":", "ret", "[", "'changes'", "]", "[", "'enable_ddos_protection'", "]", "=", "{", "'old'", ":", "vnet", ".", "get", "(", "'enable_ddos_protection'", ")", ",", "'new'", ":", "kwargs", ".", "get", "(", "'enable_ddos_protection'", ")", "}", "if", "kwargs", ".", "get", "(", "'enable_vm_protection'", ",", "False", ")", "!=", "vnet", ".", "get", "(", "'enable_vm_protection'", ")", ":", "ret", "[", "'changes'", "]", "[", "'enable_vm_protection'", "]", "=", "{", "'old'", ":", "vnet", ".", "get", "(", "'enable_vm_protection'", ")", ",", "'new'", ":", "kwargs", ".", "get", "(", "'enable_vm_protection'", ")", "}", "if", "not", "ret", "[", "'changes'", "]", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'Virtual network {0} is already present.'", ".", "format", "(", "name", ")", "return", "ret", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'result'", "]", "=", "None", "ret", "[", "'comment'", "]", "=", "'Virtual network {0} would be updated.'", ".", "format", "(", "name", ")", "return", "ret", "else", ":", "ret", "[", "'changes'", "]", "=", "{", "'old'", ":", "{", "}", ",", "'new'", ":", "{", "'name'", ":", "name", ",", "'resource_group'", ":", "resource_group", ",", "'address_space'", ":", "{", "'address_prefixes'", ":", "address_prefixes", "}", ",", "'dhcp_options'", ":", "{", "'dns_servers'", ":", "dns_servers", "}", ",", "'enable_ddos_protection'", ":", "kwargs", ".", "get", "(", "'enable_ddos_protection'", ",", "False", ")", ",", "'enable_vm_protection'", ":", "kwargs", ".", "get", "(", "'enable_vm_protection'", ",", "False", ")", ",", "'tags'", ":", "tags", ",", "}", "}", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "'Virtual network {0} would be created.'", ".", "format", "(", "name", ")", "ret", "[", "'result'", "]", "=", "None", "return", "ret", "vnet_kwargs", "=", "kwargs", ".", "copy", "(", ")", "vnet_kwargs", ".", "update", "(", "connection_auth", ")", "vnet", "=", "__salt__", "[", "'azurearm_network.virtual_network_create_or_update'", "]", "(", "name", "=", "name", ",", "resource_group", "=", "resource_group", ",", "address_prefixes", "=", "address_prefixes", ",", "dns_servers", "=", "dns_servers", ",", "tags", "=", "tags", ",", "*", "*", "vnet_kwargs", ")", "if", "'error'", "not", "in", "vnet", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'Virtual network {0} has been created.'", ".", "format", "(", "name", ")", "return", "ret", "ret", "[", "'comment'", "]", "=", "'Failed to create virtual network {0}! ({1})'", ".", "format", "(", "name", ",", "vnet", ".", "get", "(", "'error'", ")", ")", "return", "ret" ]
33.07483
24.176871
def add_to_playlist(self, items, playlist='video'): '''Adds the provided list of items to the specified playlist. Available playlists include *video* and *music*. ''' playlists = {'music': 0, 'video': 1} assert playlist in playlists.keys(), ('Playlist "%s" is invalid.' % playlist) selected_playlist = xbmc.PlayList(playlists[playlist]) _items = [] for item in items: if not hasattr(item, 'as_xbmc_listitem'): if 'info_type' in item.keys(): log.warning('info_type key has no affect for playlist ' 'items as the info_type is inferred from the ' 'playlist type.') # info_type has to be same as the playlist type item['info_type'] = playlist item = xbmcswift2.ListItem.from_dict(**item) _items.append(item) selected_playlist.add(item.get_path(), item.as_xbmc_listitem()) return _items
[ "def", "add_to_playlist", "(", "self", ",", "items", ",", "playlist", "=", "'video'", ")", ":", "playlists", "=", "{", "'music'", ":", "0", ",", "'video'", ":", "1", "}", "assert", "playlist", "in", "playlists", ".", "keys", "(", ")", ",", "(", "'Playlist \"%s\" is invalid.'", "%", "playlist", ")", "selected_playlist", "=", "xbmc", ".", "PlayList", "(", "playlists", "[", "playlist", "]", ")", "_items", "=", "[", "]", "for", "item", "in", "items", ":", "if", "not", "hasattr", "(", "item", ",", "'as_xbmc_listitem'", ")", ":", "if", "'info_type'", "in", "item", ".", "keys", "(", ")", ":", "log", ".", "warning", "(", "'info_type key has no affect for playlist '", "'items as the info_type is inferred from the '", "'playlist type.'", ")", "# info_type has to be same as the playlist type", "item", "[", "'info_type'", "]", "=", "playlist", "item", "=", "xbmcswift2", ".", "ListItem", ".", "from_dict", "(", "*", "*", "item", ")", "_items", ".", "append", "(", "item", ")", "selected_playlist", ".", "add", "(", "item", ".", "get_path", "(", ")", ",", "item", ".", "as_xbmc_listitem", "(", ")", ")", "return", "_items" ]
48.318182
20.318182
def set_pre_processing_parameters(self, image_input_names = [], is_bgr = False, red_bias = 0.0, green_bias = 0.0, blue_bias = 0.0, gray_bias = 0.0, image_scale = 1.0): """Add pre-processing parameters to the neural network object Parameters ---------- image_input_names: [str] Name of input blobs that are images is_bgr: boolean | dict() Channel order for input blobs that are images. BGR if True else RGB. To specify a different value for each image input, provide a dictionary with input names as keys. red_bias: float | dict() Image re-centering parameter (red channel) blue_bias: float | dict() Image re-centering parameter (blue channel) green_bias: float | dict() Image re-centering parameter (green channel) gray_bias: float | dict() Image re-centering parameter (for grayscale images) image_scale: float | dict() Value by which to scale the images. See Also -------- set_input, set_output, set_class_labels """ spec = self.spec if not image_input_names: return # nothing to do here if not isinstance(is_bgr, dict): is_bgr = dict.fromkeys(image_input_names, is_bgr) if not isinstance(red_bias, dict): red_bias = dict.fromkeys(image_input_names, red_bias) if not isinstance(blue_bias, dict): blue_bias = dict.fromkeys(image_input_names, blue_bias) if not isinstance(green_bias, dict): green_bias = dict.fromkeys(image_input_names, green_bias) if not isinstance(gray_bias, dict): gray_bias = dict.fromkeys(image_input_names, gray_bias) if not isinstance(image_scale, dict): image_scale = dict.fromkeys(image_input_names, image_scale) # Add image inputs for input_ in spec.description.input: if input_.name in image_input_names: if input_.type.WhichOneof('Type') == 'multiArrayType': array_shape = tuple(input_.type.multiArrayType.shape) channels, height, width = array_shape if channels == 1: input_.type.imageType.colorSpace = _FeatureTypes_pb2.ImageFeatureType.ColorSpace.Value('GRAYSCALE') elif channels == 3: if input_.name in is_bgr: if is_bgr[input_.name]: input_.type.imageType.colorSpace = _FeatureTypes_pb2.ImageFeatureType.ColorSpace.Value('BGR') else: input_.type.imageType.colorSpace = _FeatureTypes_pb2.ImageFeatureType.ColorSpace.Value('RGB') else: input_.type.imageType.colorSpace = _FeatureTypes_pb2.ImageFeatureType.ColorSpace.Value('RGB') else: raise ValueError("Channel Value %d not supported for image inputs" % channels) input_.type.imageType.width = width input_.type.imageType.height = height preprocessing = self.nn_spec.preprocessing.add() preprocessing.featureName = input_.name scaler = preprocessing.scaler if input_.name in image_scale: scaler.channelScale = image_scale[input_.name] else: scaler.channelScale = 1.0 if input_.name in red_bias: scaler.redBias = red_bias[input_.name] if input_.name in blue_bias: scaler.blueBias = blue_bias[input_.name] if input_.name in green_bias: scaler.greenBias = green_bias[input_.name] if input_.name in gray_bias: scaler.grayBias = gray_bias[input_.name]
[ "def", "set_pre_processing_parameters", "(", "self", ",", "image_input_names", "=", "[", "]", ",", "is_bgr", "=", "False", ",", "red_bias", "=", "0.0", ",", "green_bias", "=", "0.0", ",", "blue_bias", "=", "0.0", ",", "gray_bias", "=", "0.0", ",", "image_scale", "=", "1.0", ")", ":", "spec", "=", "self", ".", "spec", "if", "not", "image_input_names", ":", "return", "# nothing to do here", "if", "not", "isinstance", "(", "is_bgr", ",", "dict", ")", ":", "is_bgr", "=", "dict", ".", "fromkeys", "(", "image_input_names", ",", "is_bgr", ")", "if", "not", "isinstance", "(", "red_bias", ",", "dict", ")", ":", "red_bias", "=", "dict", ".", "fromkeys", "(", "image_input_names", ",", "red_bias", ")", "if", "not", "isinstance", "(", "blue_bias", ",", "dict", ")", ":", "blue_bias", "=", "dict", ".", "fromkeys", "(", "image_input_names", ",", "blue_bias", ")", "if", "not", "isinstance", "(", "green_bias", ",", "dict", ")", ":", "green_bias", "=", "dict", ".", "fromkeys", "(", "image_input_names", ",", "green_bias", ")", "if", "not", "isinstance", "(", "gray_bias", ",", "dict", ")", ":", "gray_bias", "=", "dict", ".", "fromkeys", "(", "image_input_names", ",", "gray_bias", ")", "if", "not", "isinstance", "(", "image_scale", ",", "dict", ")", ":", "image_scale", "=", "dict", ".", "fromkeys", "(", "image_input_names", ",", "image_scale", ")", "# Add image inputs", "for", "input_", "in", "spec", ".", "description", ".", "input", ":", "if", "input_", ".", "name", "in", "image_input_names", ":", "if", "input_", ".", "type", ".", "WhichOneof", "(", "'Type'", ")", "==", "'multiArrayType'", ":", "array_shape", "=", "tuple", "(", "input_", ".", "type", ".", "multiArrayType", ".", "shape", ")", "channels", ",", "height", ",", "width", "=", "array_shape", "if", "channels", "==", "1", ":", "input_", ".", "type", ".", "imageType", ".", "colorSpace", "=", "_FeatureTypes_pb2", ".", "ImageFeatureType", ".", "ColorSpace", ".", "Value", "(", "'GRAYSCALE'", ")", "elif", "channels", "==", "3", ":", "if", "input_", ".", "name", "in", "is_bgr", ":", "if", "is_bgr", "[", "input_", ".", "name", "]", ":", "input_", ".", "type", ".", "imageType", ".", "colorSpace", "=", "_FeatureTypes_pb2", ".", "ImageFeatureType", ".", "ColorSpace", ".", "Value", "(", "'BGR'", ")", "else", ":", "input_", ".", "type", ".", "imageType", ".", "colorSpace", "=", "_FeatureTypes_pb2", ".", "ImageFeatureType", ".", "ColorSpace", ".", "Value", "(", "'RGB'", ")", "else", ":", "input_", ".", "type", ".", "imageType", ".", "colorSpace", "=", "_FeatureTypes_pb2", ".", "ImageFeatureType", ".", "ColorSpace", ".", "Value", "(", "'RGB'", ")", "else", ":", "raise", "ValueError", "(", "\"Channel Value %d not supported for image inputs\"", "%", "channels", ")", "input_", ".", "type", ".", "imageType", ".", "width", "=", "width", "input_", ".", "type", ".", "imageType", ".", "height", "=", "height", "preprocessing", "=", "self", ".", "nn_spec", ".", "preprocessing", ".", "add", "(", ")", "preprocessing", ".", "featureName", "=", "input_", ".", "name", "scaler", "=", "preprocessing", ".", "scaler", "if", "input_", ".", "name", "in", "image_scale", ":", "scaler", ".", "channelScale", "=", "image_scale", "[", "input_", ".", "name", "]", "else", ":", "scaler", ".", "channelScale", "=", "1.0", "if", "input_", ".", "name", "in", "red_bias", ":", "scaler", ".", "redBias", "=", "red_bias", "[", "input_", ".", "name", "]", "if", "input_", ".", "name", "in", "blue_bias", ":", "scaler", ".", "blueBias", "=", "blue_bias", "[", "input_", ".", "name", "]", "if", "input_", ".", "name", "in", "green_bias", ":", "scaler", ".", "greenBias", "=", "green_bias", "[", "input_", ".", "name", "]", "if", "input_", ".", "name", "in", "gray_bias", ":", "scaler", ".", "grayBias", "=", "gray_bias", "[", "input_", ".", "name", "]" ]
49.155844
27.727273
def get_assignable_bin_ids(self, bin_id): """Gets a list of bins including and under the given bin node in which any resource can be assigned. arg: bin_id (osid.id.Id): the ``Id`` of the ``Bin`` return: (osid.id.IdList) - list of assignable bin ``Ids`` raise: NullArgument - ``bin_id`` is ``null`` raise: OperationFailed - unable to complete request *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceBinAssignmentSession.get_assignable_bin_ids # This will likely be overridden by an authorization adapter mgr = self._get_provider_manager('RESOURCE', local=True) lookup_session = mgr.get_bin_lookup_session(proxy=self._proxy) bins = lookup_session.get_bins() id_list = [] for bin in bins: id_list.append(bin.get_id()) return IdList(id_list)
[ "def", "get_assignable_bin_ids", "(", "self", ",", "bin_id", ")", ":", "# Implemented from template for", "# osid.resource.ResourceBinAssignmentSession.get_assignable_bin_ids", "# This will likely be overridden by an authorization adapter", "mgr", "=", "self", ".", "_get_provider_manager", "(", "'RESOURCE'", ",", "local", "=", "True", ")", "lookup_session", "=", "mgr", ".", "get_bin_lookup_session", "(", "proxy", "=", "self", ".", "_proxy", ")", "bins", "=", "lookup_session", ".", "get_bins", "(", ")", "id_list", "=", "[", "]", "for", "bin", "in", "bins", ":", "id_list", ".", "append", "(", "bin", ".", "get_id", "(", ")", ")", "return", "IdList", "(", "id_list", ")" ]
46.85
17.6
def to_file(self, file_name=None): """Saves a DataFrame with all the needed info about the experiment""" file_name = self._check_file_name(file_name) pages = self.pages top_level_dict = { 'info_df': pages, 'metadata': self._prm_packer() } jason_string = json.dumps( top_level_dict, default=lambda info_df: json.loads( info_df.to_json() ) ) self.paginate() with open(file_name, 'w') as outfile: outfile.write(jason_string) self.file_name = file_name logging.info("Saved file to {}".format(file_name))
[ "def", "to_file", "(", "self", ",", "file_name", "=", "None", ")", ":", "file_name", "=", "self", ".", "_check_file_name", "(", "file_name", ")", "pages", "=", "self", ".", "pages", "top_level_dict", "=", "{", "'info_df'", ":", "pages", ",", "'metadata'", ":", "self", ".", "_prm_packer", "(", ")", "}", "jason_string", "=", "json", ".", "dumps", "(", "top_level_dict", ",", "default", "=", "lambda", "info_df", ":", "json", ".", "loads", "(", "info_df", ".", "to_json", "(", ")", ")", ")", "self", ".", "paginate", "(", ")", "with", "open", "(", "file_name", ",", "'w'", ")", "as", "outfile", ":", "outfile", ".", "write", "(", "jason_string", ")", "self", ".", "file_name", "=", "file_name", "logging", ".", "info", "(", "\"Saved file to {}\"", ".", "format", "(", "file_name", ")", ")" ]
26.28
18.72
def process_reply(self, reply, status, description): """ Process a web service operation SOAP reply. Depending on how the ``retxml`` option is set, may return the SOAP reply XML or process it and return the Python object representing the returned value. @param reply: The SOAP reply envelope. @type reply: I{bytes} @param status: The HTTP status code (None indicates httplib.OK). @type status: int|I{None} @param description: Additional status description. @type description: str @return: The invoked web service operation return value. @rtype: I{builtin}|I{subclass of} L{Object}|I{bytes}|I{None} """ if status is None: status = httplib.OK debug_message = "Reply HTTP status - %d" % (status,) if status in (httplib.ACCEPTED, httplib.NO_CONTENT): log.debug(debug_message) return #TODO: Consider whether and how to allow plugins to handle error, # httplib.ACCEPTED & httplib.NO_CONTENT replies as well as successful # ones. if status == httplib.OK: log.debug("%s\n%s", debug_message, reply) else: log.debug("%s - %s\n%s", debug_message, description, reply) plugins = PluginContainer(self.options.plugins) ctx = plugins.message.received(reply=reply) reply = ctx.reply # SOAP standard states that SOAP errors must be accompanied by HTTP # status code 500 - internal server error: # # From SOAP 1.1 specification: # In case of a SOAP error while processing the request, the SOAP HTTP # server MUST issue an HTTP 500 "Internal Server Error" response and # include a SOAP message in the response containing a SOAP Fault # element (see section 4.4) indicating the SOAP processing error. # # From WS-I Basic profile: # An INSTANCE MUST use a "500 Internal Server Error" HTTP status code # if the response message is a SOAP Fault. replyroot = None if status in (httplib.OK, httplib.INTERNAL_SERVER_ERROR): replyroot = _parse(reply) plugins.message.parsed(reply=replyroot) fault = self.__get_fault(replyroot) if fault: if status != httplib.INTERNAL_SERVER_ERROR: log.warn("Web service reported a SOAP processing fault " "using an unexpected HTTP status code %d. Reporting " "as an internal server error.", status) if self.options.faults: raise WebFault(fault, replyroot) return httplib.INTERNAL_SERVER_ERROR, fault if status != httplib.OK: if self.options.faults: #TODO: Use a more specific exception class here. raise Exception((status, description)) return status, description if self.options.retxml: return reply result = replyroot and self.method.binding.output.get_reply( self.method, replyroot) ctx = plugins.message.unmarshalled(reply=result) result = ctx.reply if self.options.faults: return result return httplib.OK, result
[ "def", "process_reply", "(", "self", ",", "reply", ",", "status", ",", "description", ")", ":", "if", "status", "is", "None", ":", "status", "=", "httplib", ".", "OK", "debug_message", "=", "\"Reply HTTP status - %d\"", "%", "(", "status", ",", ")", "if", "status", "in", "(", "httplib", ".", "ACCEPTED", ",", "httplib", ".", "NO_CONTENT", ")", ":", "log", ".", "debug", "(", "debug_message", ")", "return", "#TODO: Consider whether and how to allow plugins to handle error,", "# httplib.ACCEPTED & httplib.NO_CONTENT replies as well as successful", "# ones.", "if", "status", "==", "httplib", ".", "OK", ":", "log", ".", "debug", "(", "\"%s\\n%s\"", ",", "debug_message", ",", "reply", ")", "else", ":", "log", ".", "debug", "(", "\"%s - %s\\n%s\"", ",", "debug_message", ",", "description", ",", "reply", ")", "plugins", "=", "PluginContainer", "(", "self", ".", "options", ".", "plugins", ")", "ctx", "=", "plugins", ".", "message", ".", "received", "(", "reply", "=", "reply", ")", "reply", "=", "ctx", ".", "reply", "# SOAP standard states that SOAP errors must be accompanied by HTTP", "# status code 500 - internal server error:", "#", "# From SOAP 1.1 specification:", "# In case of a SOAP error while processing the request, the SOAP HTTP", "# server MUST issue an HTTP 500 \"Internal Server Error\" response and", "# include a SOAP message in the response containing a SOAP Fault", "# element (see section 4.4) indicating the SOAP processing error.", "#", "# From WS-I Basic profile:", "# An INSTANCE MUST use a \"500 Internal Server Error\" HTTP status code", "# if the response message is a SOAP Fault.", "replyroot", "=", "None", "if", "status", "in", "(", "httplib", ".", "OK", ",", "httplib", ".", "INTERNAL_SERVER_ERROR", ")", ":", "replyroot", "=", "_parse", "(", "reply", ")", "plugins", ".", "message", ".", "parsed", "(", "reply", "=", "replyroot", ")", "fault", "=", "self", ".", "__get_fault", "(", "replyroot", ")", "if", "fault", ":", "if", "status", "!=", "httplib", ".", "INTERNAL_SERVER_ERROR", ":", "log", ".", "warn", "(", "\"Web service reported a SOAP processing fault \"", "\"using an unexpected HTTP status code %d. Reporting \"", "\"as an internal server error.\"", ",", "status", ")", "if", "self", ".", "options", ".", "faults", ":", "raise", "WebFault", "(", "fault", ",", "replyroot", ")", "return", "httplib", ".", "INTERNAL_SERVER_ERROR", ",", "fault", "if", "status", "!=", "httplib", ".", "OK", ":", "if", "self", ".", "options", ".", "faults", ":", "#TODO: Use a more specific exception class here.", "raise", "Exception", "(", "(", "status", ",", "description", ")", ")", "return", "status", ",", "description", "if", "self", ".", "options", ".", "retxml", ":", "return", "reply", "result", "=", "replyroot", "and", "self", ".", "method", ".", "binding", ".", "output", ".", "get_reply", "(", "self", ".", "method", ",", "replyroot", ")", "ctx", "=", "plugins", ".", "message", ".", "unmarshalled", "(", "reply", "=", "result", ")", "result", "=", "ctx", ".", "reply", "if", "self", ".", "options", ".", "faults", ":", "return", "result", "return", "httplib", ".", "OK", ",", "result" ]
42.350649
19.935065
def get_plugin_modules(plugins): """ Get plugin modules from input strings :param tuple plugins: a tuple of plugin names in str """ if not plugins: raise MissingPluginNames("input plugin names are required") modules = [] for plugin in plugins: short_name = PLUGIN_MAPPING.get(plugin.lower(), plugin.lower()) full_path = '%s%s' % (module_prefix, short_name) modules.append(importlib.import_module(full_path)) return tuple(modules)
[ "def", "get_plugin_modules", "(", "plugins", ")", ":", "if", "not", "plugins", ":", "raise", "MissingPluginNames", "(", "\"input plugin names are required\"", ")", "modules", "=", "[", "]", "for", "plugin", "in", "plugins", ":", "short_name", "=", "PLUGIN_MAPPING", ".", "get", "(", "plugin", ".", "lower", "(", ")", ",", "plugin", ".", "lower", "(", ")", ")", "full_path", "=", "'%s%s'", "%", "(", "module_prefix", ",", "short_name", ")", "modules", ".", "append", "(", "importlib", ".", "import_module", "(", "full_path", ")", ")", "return", "tuple", "(", "modules", ")" ]
30.0625
19.4375
def get_traceback_html(self, **kwargs): "Return HTML version of debug 500 HTTP error page." t = Template(TECHNICAL_500_TEMPLATE) c = self.get_traceback_data() c['kwargs'] = kwargs return t.render(Context(c))
[ "def", "get_traceback_html", "(", "self", ",", "*", "*", "kwargs", ")", ":", "t", "=", "Template", "(", "TECHNICAL_500_TEMPLATE", ")", "c", "=", "self", ".", "get_traceback_data", "(", ")", "c", "[", "'kwargs'", "]", "=", "kwargs", "return", "t", ".", "render", "(", "Context", "(", "c", ")", ")" ]
40.333333
7.333333
def _unflatten_beam_dim(tensor, batch_size, beam_size): """Reshapes first dimension back to [batch_size, beam_size]. Args: tensor: Tensor to reshape of shape [batch_size*beam_size, ...] batch_size: Tensor, original batch size. beam_size: int, original beam size. Returns: Reshaped tensor of shape [batch_size, beam_size, ...] """ shape = _shape_list(tensor) new_shape = [batch_size, beam_size] + shape[1:] return tf.reshape(tensor, new_shape)
[ "def", "_unflatten_beam_dim", "(", "tensor", ",", "batch_size", ",", "beam_size", ")", ":", "shape", "=", "_shape_list", "(", "tensor", ")", "new_shape", "=", "[", "batch_size", ",", "beam_size", "]", "+", "shape", "[", "1", ":", "]", "return", "tf", ".", "reshape", "(", "tensor", ",", "new_shape", ")" ]
32.928571
16.285714
def template_render(template, context=None, request=None): """ Passing Context or RequestContext to Template.render is deprecated in 1.9+, see https://github.com/django/django/pull/3883 and https://github.com/django/django/blob/1.9rc1/django/template/backends/django.py#L82-L84 :param template: Template instance :param context: dict :param request: Request instance :return: rendered template as SafeText instance """ if django.VERSION < (1, 8) or isinstance(template, Template): if request: context = RequestContext(request, context) else: context = Context(context) return template.render(context) # backends template, e.g. django.template.backends.django.Template else: return template.render(context, request=request)
[ "def", "template_render", "(", "template", ",", "context", "=", "None", ",", "request", "=", "None", ")", ":", "if", "django", ".", "VERSION", "<", "(", "1", ",", "8", ")", "or", "isinstance", "(", "template", ",", "Template", ")", ":", "if", "request", ":", "context", "=", "RequestContext", "(", "request", ",", "context", ")", "else", ":", "context", "=", "Context", "(", "context", ")", "return", "template", ".", "render", "(", "context", ")", "# backends template, e.g. django.template.backends.django.Template", "else", ":", "return", "template", ".", "render", "(", "context", ",", "request", "=", "request", ")" ]
40.4
18.1
def read(self): """ Reads the todo.txt file and returns a list of todo items. """ todos = [] try: todofile = codecs.open(self.path, 'r', encoding="utf-8") todos = todofile.readlines() todofile.close() except IOError: pass return todos
[ "def", "read", "(", "self", ")", ":", "todos", "=", "[", "]", "try", ":", "todofile", "=", "codecs", ".", "open", "(", "self", ".", "path", ",", "'r'", ",", "encoding", "=", "\"utf-8\"", ")", "todos", "=", "todofile", ".", "readlines", "(", ")", "todofile", ".", "close", "(", ")", "except", "IOError", ":", "pass", "return", "todos" ]
28.454545
19.636364
def get_subdomain_ops_at_txid(txid, proxy=None, hostport=None): """ Get the list of subdomain operations added by a txid Returns the list of operations ([{...}]) on success Returns {'error': ...} on failure """ assert proxy or hostport, 'Need proxy or hostport' if proxy is None: proxy = connect_hostport(hostport) subdomain_ops_schema = { 'type': 'object', 'properties': { 'subdomain_ops': { 'type': 'array', 'items': { 'type': 'object', 'properties': OP_HISTORY_SCHEMA['properties'], 'required': SUBDOMAIN_HISTORY_REQUIRED, }, }, }, 'required': ['subdomain_ops'], } schema = json_response_schema(subdomain_ops_schema) resp = {} try: resp = proxy.get_subdomain_ops_at_txid(txid) resp = json_validate(schema, resp) if json_is_error(resp): return resp # names must be valid for op in resp['subdomain_ops']: assert is_subdomain(str(op['fully_qualified_subdomain'])), ('Invalid subdomain "{}"'.format(op['fully_qualified_subdomain'])) except ValidationError as ve: if BLOCKSTACK_DEBUG: log.exception(ve) resp = {'error': 'Server response did not match expected schema. You are likely communicating with an out-of-date Blockstack node.', 'http_status': 502} return resp except AssertionError as e: if BLOCKSTACK_DEBUG: log.exception(e) resp = {'error': 'Server response included an invalid subdomain', 'http_status': 500} return resp except socket.timeout: log.error("Connection timed out") resp = {'error': 'Connection to remote host timed out.', 'http_status': 503} return resp except socket.error as se: log.error("Connection error {}".format(se.errno)) resp = {'error': 'Connection to remote host failed.', 'http_status': 502} return resp except Exception as ee: if BLOCKSTACK_DEBUG: log.exception(ee) log.error("Caught exception while connecting to Blockstack node: {}".format(ee)) resp = {'error': 'Failed to contact Blockstack node. Try again with `--debug`.', 'http_status': 500} return resp return resp['subdomain_ops']
[ "def", "get_subdomain_ops_at_txid", "(", "txid", ",", "proxy", "=", "None", ",", "hostport", "=", "None", ")", ":", "assert", "proxy", "or", "hostport", ",", "'Need proxy or hostport'", "if", "proxy", "is", "None", ":", "proxy", "=", "connect_hostport", "(", "hostport", ")", "subdomain_ops_schema", "=", "{", "'type'", ":", "'object'", ",", "'properties'", ":", "{", "'subdomain_ops'", ":", "{", "'type'", ":", "'array'", ",", "'items'", ":", "{", "'type'", ":", "'object'", ",", "'properties'", ":", "OP_HISTORY_SCHEMA", "[", "'properties'", "]", ",", "'required'", ":", "SUBDOMAIN_HISTORY_REQUIRED", ",", "}", ",", "}", ",", "}", ",", "'required'", ":", "[", "'subdomain_ops'", "]", ",", "}", "schema", "=", "json_response_schema", "(", "subdomain_ops_schema", ")", "resp", "=", "{", "}", "try", ":", "resp", "=", "proxy", ".", "get_subdomain_ops_at_txid", "(", "txid", ")", "resp", "=", "json_validate", "(", "schema", ",", "resp", ")", "if", "json_is_error", "(", "resp", ")", ":", "return", "resp", "# names must be valid", "for", "op", "in", "resp", "[", "'subdomain_ops'", "]", ":", "assert", "is_subdomain", "(", "str", "(", "op", "[", "'fully_qualified_subdomain'", "]", ")", ")", ",", "(", "'Invalid subdomain \"{}\"'", ".", "format", "(", "op", "[", "'fully_qualified_subdomain'", "]", ")", ")", "except", "ValidationError", "as", "ve", ":", "if", "BLOCKSTACK_DEBUG", ":", "log", ".", "exception", "(", "ve", ")", "resp", "=", "{", "'error'", ":", "'Server response did not match expected schema. You are likely communicating with an out-of-date Blockstack node.'", ",", "'http_status'", ":", "502", "}", "return", "resp", "except", "AssertionError", "as", "e", ":", "if", "BLOCKSTACK_DEBUG", ":", "log", ".", "exception", "(", "e", ")", "resp", "=", "{", "'error'", ":", "'Server response included an invalid subdomain'", ",", "'http_status'", ":", "500", "}", "return", "resp", "except", "socket", ".", "timeout", ":", "log", ".", "error", "(", "\"Connection timed out\"", ")", "resp", "=", "{", "'error'", ":", "'Connection to remote host timed out.'", ",", "'http_status'", ":", "503", "}", "return", "resp", "except", "socket", ".", "error", "as", "se", ":", "log", ".", "error", "(", "\"Connection error {}\"", ".", "format", "(", "se", ".", "errno", ")", ")", "resp", "=", "{", "'error'", ":", "'Connection to remote host failed.'", ",", "'http_status'", ":", "502", "}", "return", "resp", "except", "Exception", "as", "ee", ":", "if", "BLOCKSTACK_DEBUG", ":", "log", ".", "exception", "(", "ee", ")", "log", ".", "error", "(", "\"Caught exception while connecting to Blockstack node: {}\"", ".", "format", "(", "ee", ")", ")", "resp", "=", "{", "'error'", ":", "'Failed to contact Blockstack node. Try again with `--debug`.'", ",", "'http_status'", ":", "500", "}", "return", "resp", "return", "resp", "[", "'subdomain_ops'", "]" ]
32.971831
23.985915
def md_report(self, file_path): """Generate and save MD report""" self.logger.debug('Generating MD report') report = self.zap.core.mdreport() self._write_report(report, file_path)
[ "def", "md_report", "(", "self", ",", "file_path", ")", ":", "self", ".", "logger", ".", "debug", "(", "'Generating MD report'", ")", "report", "=", "self", ".", "zap", ".", "core", ".", "mdreport", "(", ")", "self", ".", "_write_report", "(", "report", ",", "file_path", ")" ]
41.4
4.8
def sys_access(self, buf, mode): """ Checks real user's permissions for a file :rtype: int :param buf: a buffer containing the pathname to the file to check its permissions. :param mode: the access permissions to check. :return: - C{0} if the calling process can access the file in the desired mode. - C{-1} if the calling process can not access the file in the desired mode. """ filename = b'' for i in range(0, 255): c = Operators.CHR(self.current.read_int(buf + i, 8)) if c == b'\x00': break filename += c if os.access(filename, mode): return 0 else: return -1
[ "def", "sys_access", "(", "self", ",", "buf", ",", "mode", ")", ":", "filename", "=", "b''", "for", "i", "in", "range", "(", "0", ",", "255", ")", ":", "c", "=", "Operators", ".", "CHR", "(", "self", ".", "current", ".", "read_int", "(", "buf", "+", "i", ",", "8", ")", ")", "if", "c", "==", "b'\\x00'", ":", "break", "filename", "+=", "c", "if", "os", ".", "access", "(", "filename", ",", "mode", ")", ":", "return", "0", "else", ":", "return", "-", "1" ]
33.318182
20.954545
def log_uuid(self, uuid): """Logs the object with the specified `uuid` to `self.uuids` if possible. Args: uuid (str): string value of :meth:`uuid.uuid4` value for the object. """ #We only need to try and describe an object once; if it is already in #our database, then just move along. if uuid not in self.uuids and uuid in uuids: self.uuids[uuid] = uuids[uuid].describe()
[ "def", "log_uuid", "(", "self", ",", "uuid", ")", ":", "#We only need to try and describe an object once; if it is already in", "#our database, then just move along.", "if", "uuid", "not", "in", "self", ".", "uuids", "and", "uuid", "in", "uuids", ":", "self", ".", "uuids", "[", "uuid", "]", "=", "uuids", "[", "uuid", "]", ".", "describe", "(", ")" ]
38
18.5
def get_terminal_size(default_rows=25, default_cols=80): """ Returns the number of lines and columns of the current terminal. It attempts several strategies to determine the size and if all fail, it returns (80, 25). :rtype: int, int :return: The rows and columns of the terminal. """ # Collect a list of viable input channels that may tell us something # about the terminal dimensions. fileno_list = [] try: fileno_list.append(sys.stdout.fileno()) except AttributeError: # Channel was redirected to an object that has no fileno() pass except ValueError: # Channel was closed while attemting to read it pass try: fileno_list.append(sys.stdin.fileno()) except AttributeError: pass except ValueError: # Channel was closed while attemting to read it pass try: fileno_list.append(sys.stderr.fileno()) except AttributeError: pass except ValueError: # Channel was closed while attemting to read it pass # Ask each channel for the terminal window size. for fd in fileno_list: try: rows, cols = _get_terminal_size(fd) except TypeError: # _get_terminal_size() returned None. pass else: return rows, cols # Try os.ctermid() try: fd = os.open(os.ctermid(), os.O_RDONLY) except AttributeError: # os.ctermid does not exist on Windows. pass except OSError: # The device pointed to by os.ctermid() does not exist. pass else: try: rows, cols = _get_terminal_size(fd) except TypeError: # _get_terminal_size() returned None. pass else: return rows, cols finally: os.close(fd) # Try `stty size` with open(os.devnull, 'w') as devnull: try: process = Popen(['stty', 'size'], stderr=devnull, stdout=PIPE, close_fds=True) except (OSError, ValueError): pass else: errcode = process.wait() output = process.stdout.read() try: rows, cols = output.split() return int(rows), int(cols) except (ValueError, TypeError): pass # Try environment variables. try: return tuple(int(os.getenv(var)) for var in ('LINES', 'COLUMNS')) except (ValueError, TypeError): pass # Give up. return default_rows, default_cols
[ "def", "get_terminal_size", "(", "default_rows", "=", "25", ",", "default_cols", "=", "80", ")", ":", "# Collect a list of viable input channels that may tell us something", "# about the terminal dimensions.", "fileno_list", "=", "[", "]", "try", ":", "fileno_list", ".", "append", "(", "sys", ".", "stdout", ".", "fileno", "(", ")", ")", "except", "AttributeError", ":", "# Channel was redirected to an object that has no fileno()", "pass", "except", "ValueError", ":", "# Channel was closed while attemting to read it", "pass", "try", ":", "fileno_list", ".", "append", "(", "sys", ".", "stdin", ".", "fileno", "(", ")", ")", "except", "AttributeError", ":", "pass", "except", "ValueError", ":", "# Channel was closed while attemting to read it", "pass", "try", ":", "fileno_list", ".", "append", "(", "sys", ".", "stderr", ".", "fileno", "(", ")", ")", "except", "AttributeError", ":", "pass", "except", "ValueError", ":", "# Channel was closed while attemting to read it", "pass", "# Ask each channel for the terminal window size.", "for", "fd", "in", "fileno_list", ":", "try", ":", "rows", ",", "cols", "=", "_get_terminal_size", "(", "fd", ")", "except", "TypeError", ":", "# _get_terminal_size() returned None.", "pass", "else", ":", "return", "rows", ",", "cols", "# Try os.ctermid()", "try", ":", "fd", "=", "os", ".", "open", "(", "os", ".", "ctermid", "(", ")", ",", "os", ".", "O_RDONLY", ")", "except", "AttributeError", ":", "# os.ctermid does not exist on Windows.", "pass", "except", "OSError", ":", "# The device pointed to by os.ctermid() does not exist.", "pass", "else", ":", "try", ":", "rows", ",", "cols", "=", "_get_terminal_size", "(", "fd", ")", "except", "TypeError", ":", "# _get_terminal_size() returned None.", "pass", "else", ":", "return", "rows", ",", "cols", "finally", ":", "os", ".", "close", "(", "fd", ")", "# Try `stty size`", "with", "open", "(", "os", ".", "devnull", ",", "'w'", ")", "as", "devnull", ":", "try", ":", "process", "=", "Popen", "(", "[", "'stty'", ",", "'size'", "]", ",", "stderr", "=", "devnull", ",", "stdout", "=", "PIPE", ",", "close_fds", "=", "True", ")", "except", "(", "OSError", ",", "ValueError", ")", ":", "pass", "else", ":", "errcode", "=", "process", ".", "wait", "(", ")", "output", "=", "process", ".", "stdout", ".", "read", "(", ")", "try", ":", "rows", ",", "cols", "=", "output", ".", "split", "(", ")", "return", "int", "(", "rows", ")", ",", "int", "(", "cols", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "pass", "# Try environment variables.", "try", ":", "return", "tuple", "(", "int", "(", "os", ".", "getenv", "(", "var", ")", ")", "for", "var", "in", "(", "'LINES'", ",", "'COLUMNS'", ")", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "pass", "# Give up.", "return", "default_rows", ",", "default_cols" ]
28.337079
19.325843
def validate_expires_at(form, field): """Validate that date is in the future.""" if form.accept.data: if not field.data or datetime.utcnow().date() >= field.data: raise validators.StopValidation(_( "Please provide a future date." )) if not field.data or \ datetime.utcnow().date() + timedelta(days=365) < field.data: raise validators.StopValidation(_( "Please provide a date no more than 1 year into the future." ))
[ "def", "validate_expires_at", "(", "form", ",", "field", ")", ":", "if", "form", ".", "accept", ".", "data", ":", "if", "not", "field", ".", "data", "or", "datetime", ".", "utcnow", "(", ")", ".", "date", "(", ")", ">=", "field", ".", "data", ":", "raise", "validators", ".", "StopValidation", "(", "_", "(", "\"Please provide a future date.\"", ")", ")", "if", "not", "field", ".", "data", "or", "datetime", ".", "utcnow", "(", ")", ".", "date", "(", ")", "+", "timedelta", "(", "days", "=", "365", ")", "<", "field", ".", "data", ":", "raise", "validators", ".", "StopValidation", "(", "_", "(", "\"Please provide a date no more than 1 year into the future.\"", ")", ")" ]
43.666667
16.666667
def sample(self, size=1): """Generate samples of the random variable. Parameters ---------- size : int The number of samples to generate. Returns ------- :obj:`numpy.ndarray` of int or int The samples of the random variable. If `size == 1`, then the returned value will not be wrapped in an array. """ samples = scipy.stats.bernoulli.rvs(self.p, size=size) if size == 1: return samples[0] return samples
[ "def", "sample", "(", "self", ",", "size", "=", "1", ")", ":", "samples", "=", "scipy", ".", "stats", ".", "bernoulli", ".", "rvs", "(", "self", ".", "p", ",", "size", "=", "size", ")", "if", "size", "==", "1", ":", "return", "samples", "[", "0", "]", "return", "samples" ]
29.111111
18.888889
def single_feature_fit(self, feature): """Get the log2 bayes factor of the fit for each modality""" if np.isfinite(feature).sum() == 0: series = pd.Series(index=MODALITY_ORDER) else: logbf_one_param = pd.Series( {k: v.logsumexp_logliks(feature) for k, v in self.one_param_models.items()}) # Check if none of the previous features fit if (logbf_one_param <= self.logbf_thresh).all(): logbf_two_param = pd.Series( {k: v.logsumexp_logliks(feature) for k, v in self.two_param_models.items()}) series = pd.concat([logbf_one_param, logbf_two_param]) series[NULL_MODEL] = self.logbf_thresh else: series = logbf_one_param series.index.name = 'Modality' series.name = self.score_name return series
[ "def", "single_feature_fit", "(", "self", ",", "feature", ")", ":", "if", "np", ".", "isfinite", "(", "feature", ")", ".", "sum", "(", ")", "==", "0", ":", "series", "=", "pd", ".", "Series", "(", "index", "=", "MODALITY_ORDER", ")", "else", ":", "logbf_one_param", "=", "pd", ".", "Series", "(", "{", "k", ":", "v", ".", "logsumexp_logliks", "(", "feature", ")", "for", "k", ",", "v", "in", "self", ".", "one_param_models", ".", "items", "(", ")", "}", ")", "# Check if none of the previous features fit", "if", "(", "logbf_one_param", "<=", "self", ".", "logbf_thresh", ")", ".", "all", "(", ")", ":", "logbf_two_param", "=", "pd", ".", "Series", "(", "{", "k", ":", "v", ".", "logsumexp_logliks", "(", "feature", ")", "for", "k", ",", "v", "in", "self", ".", "two_param_models", ".", "items", "(", ")", "}", ")", "series", "=", "pd", ".", "concat", "(", "[", "logbf_one_param", ",", "logbf_two_param", "]", ")", "series", "[", "NULL_MODEL", "]", "=", "self", ".", "logbf_thresh", "else", ":", "series", "=", "logbf_one_param", "series", ".", "index", ".", "name", "=", "'Modality'", "series", ".", "name", "=", "self", ".", "score_name", "return", "series" ]
43.571429
13.285714
def ParseMessageRow(self, parser_mediator, query, row, **unused_kwargs): """Parses a message row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row. """ query_hash = hash(query) event_data = KikIOSMessageEventData() event_data.body = self._GetRowValue(query_hash, row, 'ZBODY') event_data.displayname = self._GetRowValue(query_hash, row, 'ZDISPLAYNAME') event_data.message_status = self._GetRowValue(query_hash, row, 'ZSTATE') event_data.message_type = self._GetRowValue(query_hash, row, 'ZTYPE') event_data.offset = self._GetRowValue(query_hash, row, 'id') event_data.query = query event_data.username = self._GetRowValue(query_hash, row, 'ZUSERNAME') timestamp = self._GetRowValue(query_hash, row, 'ZRECEIVEDTIMESTAMP') # Convert the floating point value to an integer. timestamp = int(timestamp) date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data)
[ "def", "ParseMessageRow", "(", "self", ",", "parser_mediator", ",", "query", ",", "row", ",", "*", "*", "unused_kwargs", ")", ":", "query_hash", "=", "hash", "(", "query", ")", "event_data", "=", "KikIOSMessageEventData", "(", ")", "event_data", ".", "body", "=", "self", ".", "_GetRowValue", "(", "query_hash", ",", "row", ",", "'ZBODY'", ")", "event_data", ".", "displayname", "=", "self", ".", "_GetRowValue", "(", "query_hash", ",", "row", ",", "'ZDISPLAYNAME'", ")", "event_data", ".", "message_status", "=", "self", ".", "_GetRowValue", "(", "query_hash", ",", "row", ",", "'ZSTATE'", ")", "event_data", ".", "message_type", "=", "self", ".", "_GetRowValue", "(", "query_hash", ",", "row", ",", "'ZTYPE'", ")", "event_data", ".", "offset", "=", "self", ".", "_GetRowValue", "(", "query_hash", ",", "row", ",", "'id'", ")", "event_data", ".", "query", "=", "query", "event_data", ".", "username", "=", "self", ".", "_GetRowValue", "(", "query_hash", ",", "row", ",", "'ZUSERNAME'", ")", "timestamp", "=", "self", ".", "_GetRowValue", "(", "query_hash", ",", "row", ",", "'ZRECEIVEDTIMESTAMP'", ")", "# Convert the floating point value to an integer.", "timestamp", "=", "int", "(", "timestamp", ")", "date_time", "=", "dfdatetime_cocoa_time", ".", "CocoaTime", "(", "timestamp", "=", "timestamp", ")", "event", "=", "time_events", ".", "DateTimeValuesEvent", "(", "date_time", ",", "definitions", ".", "TIME_DESCRIPTION_CREATION", ")", "parser_mediator", ".", "ProduceEventWithEventData", "(", "event", ",", "event_data", ")" ]
45.962963
22.148148
def do_windowed(self, line): """ Un-fullscreen the current window """ self.bot.canvas.sink.trigger_fullscreen_action(False) print(self.response_prompt, file=self.stdout)
[ "def", "do_windowed", "(", "self", ",", "line", ")", ":", "self", ".", "bot", ".", "canvas", ".", "sink", ".", "trigger_fullscreen_action", "(", "False", ")", "print", "(", "self", ".", "response_prompt", ",", "file", "=", "self", ".", "stdout", ")" ]
34
7.666667
def amount_object_to_dict(self, amount): """Return the dictionary representation of an Amount object. Amount object must have amount and currency properties and as_tuple method which will return (currency, amount) and as_quantized method to quantize amount property. :param amount: instance of Amount object :return: dict with amount and currency keys. """ currency, amount = ( amount.as_quantized(digits=2).as_tuple() if not isinstance(amount, dict) else (amount["currency"], amount["amount"]) ) if currency not in self.currencies: raise ValueError(self.err_unknown_currency.format(currency=currency)) return { "amount": str(amount), "currency": str(currency), }
[ "def", "amount_object_to_dict", "(", "self", ",", "amount", ")", ":", "currency", ",", "amount", "=", "(", "amount", ".", "as_quantized", "(", "digits", "=", "2", ")", ".", "as_tuple", "(", ")", "if", "not", "isinstance", "(", "amount", ",", "dict", ")", "else", "(", "amount", "[", "\"currency\"", "]", ",", "amount", "[", "\"amount\"", "]", ")", ")", "if", "currency", "not", "in", "self", ".", "currencies", ":", "raise", "ValueError", "(", "self", ".", "err_unknown_currency", ".", "format", "(", "currency", "=", "currency", ")", ")", "return", "{", "\"amount\"", ":", "str", "(", "amount", ")", ",", "\"currency\"", ":", "str", "(", "currency", ")", ",", "}" ]
38.380952
19.952381
def _get_default_mapping(self, obj): """Return default mapping if there are no special needs.""" mapping = {v: k for k, v in obj.TYPE_MAPPING.items()} mapping.update({ fields.Email: text_type, fields.Dict: dict, fields.Url: text_type, fields.List: list, fields.LocalDateTime: datetime.datetime, fields.Nested: '_from_nested_schema', }) return mapping
[ "def", "_get_default_mapping", "(", "self", ",", "obj", ")", ":", "mapping", "=", "{", "v", ":", "k", "for", "k", ",", "v", "in", "obj", ".", "TYPE_MAPPING", ".", "items", "(", ")", "}", "mapping", ".", "update", "(", "{", "fields", ".", "Email", ":", "text_type", ",", "fields", ".", "Dict", ":", "dict", ",", "fields", ".", "Url", ":", "text_type", ",", "fields", ".", "List", ":", "list", ",", "fields", ".", "LocalDateTime", ":", "datetime", ".", "datetime", ",", "fields", ".", "Nested", ":", "'_from_nested_schema'", ",", "}", ")", "return", "mapping" ]
37.583333
11.666667
def revision(self): """Revision number""" rev = self._p4dict.get('haveRev', -1) if rev == 'none': rev = 0 return int(rev)
[ "def", "revision", "(", "self", ")", ":", "rev", "=", "self", ".", "_p4dict", ".", "get", "(", "'haveRev'", ",", "-", "1", ")", "if", "rev", "==", "'none'", ":", "rev", "=", "0", "return", "int", "(", "rev", ")" ]
26.666667
13.166667
def run_sex_check(in_prefix, in_type, out_prefix, base_dir, options): """Runs step6 (sexcheck). :param in_prefix: the prefix of the input files. :param in_type: the type of the input files. :param out_prefix: the output prefix. :param base_dir: the output directory. :param options: the options needed. :type in_prefix: str :type in_type: str :type out_prefix: str :type base_dir: str :type options: list :returns: a tuple containing the prefix of the output files (the input prefix for the next script) and the type of the output files (``bfile``). This function calls the :py:mod:`pyGenClean.SexCheck.sex_check` module. The required file type for this module is ``bfile``, hence the need to use the :py:func:`check_input_files` to check if the file input file type is the good one, or to create it if needed. .. note:: The :py:mod:`pyGenClean.SexCheck.sex_check` module doesn't return usable output files. Hence, this function returns the input file prefix and its type. """ # Creating the output directory os.mkdir(out_prefix) # We know we need a bfile required_type = "bfile" check_input_files(in_prefix, in_type, required_type) # We need to inject the name of the input file and the name of the output # prefix script_prefix = os.path.join(out_prefix, "sexcheck") options += ["--{}".format(required_type), in_prefix, "--out", script_prefix] # We run the script try: sex_check.main(options) except sex_check.ProgramError as e: msg = "sex_check {}".format(e) raise ProgramError(msg) # Reading the hetero file on X hetero = {} if os.path.isfile(script_prefix + ".chr23_recodeA.raw.hetero"): with open(script_prefix + ".chr23_recodeA.raw.hetero", "r") as i_file: header = { name: i for i, name in enumerate(createRowFromPlinkSpacedOutput(i_file.readline())) } for required_col in ("PED", "ID", "HETERO"): if required_col not in header: msg = "{}: no column named {}".format( script_prefix + ".chr23_recodeA.raw.hetero", required_col, ) raise ProgramError(msg) # Reading the data for line in i_file: row = line.rstrip("\r\n").split("\t") famid = row[header["PED"]] indid = row[header["ID"]] # Formatting the hetero value het = None try: het = "{:.4f}".format(float(row[header["HETERO"]])) except: het = "N/A" hetero[(famid, indid)] = het # Reading the number of no call on Y nb_no_call = {} if os.path.isfile(script_prefix + ".chr24_recodeA.raw.noCall"): with open(script_prefix + ".chr24_recodeA.raw.noCall", "r") as i_file: header = { name: i for i, name in enumerate(createRowFromPlinkSpacedOutput(i_file.readline())) } for required_col in ("PED", "ID", "nbGeno", "nbNoCall"): if required_col not in header: msg = "{}: no column named {}".format( script_prefix + ".chr24_recodeA.raw.noCall", required_col, ) raise ProgramError(msg) # Reading the data for line in i_file: row = line.rstrip("\r\n").split("\t") famid = row[header["PED"]] indid = row[header["ID"]] # Getting the statistics nb_geno = row[header["nbGeno"]] nb_nocall = row[header["nbNoCall"]] percent = None try: percent = "{:.4f}".format( float(nb_nocall) / float(nb_geno), ) except: percent = "N/A" nb_no_call[(famid, indid)] = percent # Reading the problem file to gather statistics. Note that dataset without # problem will only have the header line (and no data) nb_problems = 0 table = [] nb_no_genetic = 0 nb_discordant = 0 with open(script_prefix + ".list_problem_sex", "r") as i_file: # Reading the header header = i_file.readline().rstrip("\r\n").split("\t") table.append(header) header = {name: i for i, name in enumerate(header)} for required_col in ("FID", "IID", "SNPSEX"): if required_col not in header: msg = "{}: no column named {}".format( script_prefix + ".list_problem_sex", required_col, ) raise ProgramError(msg) # Adding the missing column name table[-1].append("HET") table[-1].append(r"\%NOCALL") # Reading the rest of the data for line in i_file: nb_problems += 1 # Creating the row row = line.rstrip("\r\n").split("\t") # Counting if row[header["SNPSEX"]] == "0": nb_no_genetic += 1 else: nb_discordant += 1 table.append([ latex_template.sanitize_tex(row[header[name]]) for name in ("FID", "IID", "PEDSEX", "SNPSEX", "STATUS", "F") ]) table[-1].append( hetero.get((row[header["FID"]], row[header["IID"]]), "N/A"), ) table[-1].append( nb_no_call.get((row[header["FID"]], row[header["IID"]]), "N/A") ) # Getting the value for the maleF option male_f = sex_check.parser.get_default("maleF") if "--maleF" in options: male_f = options[options.index("--maleF") + 1] # Getting the value for the femaleF option female_f = sex_check.parser.get_default("femaleF") if "--femaleF" in options: female_f = options[options.index("--femaleF") + 1] # We write a LaTeX summary latex_file = os.path.join(script_prefix + ".summary.tex") graphics_paths = set() try: with open(latex_file, "w") as o_file: print >>o_file, latex_template.subsection(sex_check.pretty_name) text = ( "Using $F$ thresholds of {male_f} and {female_f} for males " "and females respectively, {nb_problems:,d} sample{plural} " "had gender problem according to Plink.".format( male_f=male_f, female_f=female_f, nb_problems=nb_problems, plural="s" if nb_problems > 1 else "", ) ) print >>o_file, latex_template.wrap_lines(text) # The float template float_template = latex_template.jinja2_env.get_template( "float_template.tex", ) if nb_problems > 0: # The label and text for the table table_label = re.sub( r"[/\\]", "_", script_prefix, ) + "_problems" text = ( r"Table~\ref{" + table_label + "} summarizes the gender " "problems encountered during the analysis." ) print >>o_file, latex_template.wrap_lines(text) # Getting the template longtable_template = latex_template.jinja2_env.get_template( "longtable_template.tex", ) # Rendering print >>o_file, longtable_template.render( table_caption="Summarization of the gender problems " "encountered during Plink's analysis. " "HET is the heterozygosity rate on the X " r"chromosome. \%NOCALL is the percentage of " "no calls on the Y chromosome.", table_label=table_label, nb_col=len(table[1]), col_alignments="llrrlrrrr", text_size="scriptsize", header_data=zip(table[0], [1 for i in table[0]]), tabular_data=sorted(table[1:], key=lambda item: item[1]), ) # Getting the templates graphic_template = latex_template.jinja2_env.get_template( "graphics_template.tex", ) # If there is a figure, we add it here if os.path.isfile(script_prefix + ".png"): # Adding the figure figure_label = re.sub(r"[/\\]", "_", script_prefix) text = ( r"Figure~\ref{" + figure_label + r"} shows the $\bar{y}$ " r"intensities versus the $\bar{x}$ intensities for each " "samples. Problematic samples are shown using triangles." ) print >>o_file, latex_template.wrap_lines(text) # Getting the paths graphics_path, path = os.path.split(script_prefix + ".png") graphics_path = os.path.relpath(graphics_path, base_dir) print >>o_file, float_template.render( float_type="figure", float_placement="H", float_caption="Gender check using Plink. Mean $x$ and $y$ " "intensities are shown for each sample. " "Males are shown in blue, and females in " "red. Triangles show problematic samples " "(green for males, mauve for females). " "Unknown gender are shown in gray.", float_label=figure_label, float_content=graphic_template.render( width=r"0.8\textwidth", path=latex_template.sanitize_fig_name(path), ), ) # Adding the path where the graphic is graphics_paths.add(graphics_path) # If there is a 'sexcheck.LRR_BAF' directory, then there are LRR # and BAF plots. if os.path.isdir(script_prefix + ".LRR_BAF"): figures = glob( os.path.join(script_prefix + ".LRR_BAF", "*.png"), ) if len(figures) > 0: # Getting the sample IDs sample_ids = [ re.search( "^baf_lrr_(\S+)_lrr_baf.png$", os.path.basename(figure), ) for figure in figures ] sample_ids = [ "unknown sample" if not sample else sample.group(1) for sample in sample_ids ] # Sorting according to sample IDs sorted_indexes = sorted(range(len(figures)), key=figures.__getitem__) figures = [figures[i] for i in sorted_indexes] sample_ids = [sample_ids[i] for i in sorted_indexes] # Getting the labels labels = [ re.sub( r"[/\\]", "_", script_prefix + "_baf_lrr_" + os.path.splitext(sample)[0], ) for sample in sample_ids ] fig_1 = labels[0] fig_2 = "" if len(figures) > 1: fig_2 = labels[-1] text = ( "Figure" + ("s" if len(figures) > 1 else "") + r"~\ref{" + fig_1 + "} " + (r"to \ref{" + fig_2 + "} " if fig_2 else "") + "show" + (" " if len(figures) > 1 else "s ") + "the " "log R ratio and the B allele frequency versus the " "position on chromosome X and Y for the problematic " "sample{}.".format("s" if len(figures) > 1 else "") ) print >>o_file, latex_template.wrap_lines(text) zipped = zip(figures, sample_ids, labels) for figure, sample_id, label in zipped: sample_id = latex_template.sanitize_tex(sample_id) # Getting the paths graphics_path, path = os.path.split(figure) graphics_path = os.path.relpath(graphics_path, base_dir) caption = ( "Plots showing the log R ratio and the B allele " "frequency for chromosome X and Y (on the left " "and right, respectively) for sample " "{}.".format(sample_id) ) print >>o_file, float_template.render( float_type="figure", float_placement="H", float_caption=caption, float_label=label, float_content=graphic_template.render( width=r"\textwidth", path=latex_template.sanitize_fig_name(path), ), ) # Adding the path where the graphic is graphics_paths.add(graphics_path) except IOError: msg = "{}: cannot write LaTeX summary".format(latex_file) raise ProgramError(msg) # Writing the summary results with open(os.path.join(base_dir, "results_summary.txt"), "a") as o_file: print >>o_file, "# {}".format(script_prefix) print >>o_file, "Number of samples with gender problem" print >>o_file, " - no genetic gender\t{:,d}".format(nb_no_genetic) print >>o_file, " - discordant gender\t{:,d}".format(nb_discordant) print >>o_file, "---" # We know this step does not produce a new data set, so we return the # original one return _StepResult( next_file=in_prefix, next_file_type=required_type, latex_summary=latex_file, description=sex_check.desc, long_description=sex_check.long_desc, graph_path=graphics_paths, )
[ "def", "run_sex_check", "(", "in_prefix", ",", "in_type", ",", "out_prefix", ",", "base_dir", ",", "options", ")", ":", "# Creating the output directory", "os", ".", "mkdir", "(", "out_prefix", ")", "# We know we need a bfile", "required_type", "=", "\"bfile\"", "check_input_files", "(", "in_prefix", ",", "in_type", ",", "required_type", ")", "# We need to inject the name of the input file and the name of the output", "# prefix", "script_prefix", "=", "os", ".", "path", ".", "join", "(", "out_prefix", ",", "\"sexcheck\"", ")", "options", "+=", "[", "\"--{}\"", ".", "format", "(", "required_type", ")", ",", "in_prefix", ",", "\"--out\"", ",", "script_prefix", "]", "# We run the script", "try", ":", "sex_check", ".", "main", "(", "options", ")", "except", "sex_check", ".", "ProgramError", "as", "e", ":", "msg", "=", "\"sex_check {}\"", ".", "format", "(", "e", ")", "raise", "ProgramError", "(", "msg", ")", "# Reading the hetero file on X", "hetero", "=", "{", "}", "if", "os", ".", "path", ".", "isfile", "(", "script_prefix", "+", "\".chr23_recodeA.raw.hetero\"", ")", ":", "with", "open", "(", "script_prefix", "+", "\".chr23_recodeA.raw.hetero\"", ",", "\"r\"", ")", "as", "i_file", ":", "header", "=", "{", "name", ":", "i", "for", "i", ",", "name", "in", "enumerate", "(", "createRowFromPlinkSpacedOutput", "(", "i_file", ".", "readline", "(", ")", ")", ")", "}", "for", "required_col", "in", "(", "\"PED\"", ",", "\"ID\"", ",", "\"HETERO\"", ")", ":", "if", "required_col", "not", "in", "header", ":", "msg", "=", "\"{}: no column named {}\"", ".", "format", "(", "script_prefix", "+", "\".chr23_recodeA.raw.hetero\"", ",", "required_col", ",", ")", "raise", "ProgramError", "(", "msg", ")", "# Reading the data", "for", "line", "in", "i_file", ":", "row", "=", "line", ".", "rstrip", "(", "\"\\r\\n\"", ")", ".", "split", "(", "\"\\t\"", ")", "famid", "=", "row", "[", "header", "[", "\"PED\"", "]", "]", "indid", "=", "row", "[", "header", "[", "\"ID\"", "]", "]", "# Formatting the hetero value", "het", "=", "None", "try", ":", "het", "=", "\"{:.4f}\"", ".", "format", "(", "float", "(", "row", "[", "header", "[", "\"HETERO\"", "]", "]", ")", ")", "except", ":", "het", "=", "\"N/A\"", "hetero", "[", "(", "famid", ",", "indid", ")", "]", "=", "het", "# Reading the number of no call on Y", "nb_no_call", "=", "{", "}", "if", "os", ".", "path", ".", "isfile", "(", "script_prefix", "+", "\".chr24_recodeA.raw.noCall\"", ")", ":", "with", "open", "(", "script_prefix", "+", "\".chr24_recodeA.raw.noCall\"", ",", "\"r\"", ")", "as", "i_file", ":", "header", "=", "{", "name", ":", "i", "for", "i", ",", "name", "in", "enumerate", "(", "createRowFromPlinkSpacedOutput", "(", "i_file", ".", "readline", "(", ")", ")", ")", "}", "for", "required_col", "in", "(", "\"PED\"", ",", "\"ID\"", ",", "\"nbGeno\"", ",", "\"nbNoCall\"", ")", ":", "if", "required_col", "not", "in", "header", ":", "msg", "=", "\"{}: no column named {}\"", ".", "format", "(", "script_prefix", "+", "\".chr24_recodeA.raw.noCall\"", ",", "required_col", ",", ")", "raise", "ProgramError", "(", "msg", ")", "# Reading the data", "for", "line", "in", "i_file", ":", "row", "=", "line", ".", "rstrip", "(", "\"\\r\\n\"", ")", ".", "split", "(", "\"\\t\"", ")", "famid", "=", "row", "[", "header", "[", "\"PED\"", "]", "]", "indid", "=", "row", "[", "header", "[", "\"ID\"", "]", "]", "# Getting the statistics", "nb_geno", "=", "row", "[", "header", "[", "\"nbGeno\"", "]", "]", "nb_nocall", "=", "row", "[", "header", "[", "\"nbNoCall\"", "]", "]", "percent", "=", "None", "try", ":", "percent", "=", "\"{:.4f}\"", ".", "format", "(", "float", "(", "nb_nocall", ")", "/", "float", "(", "nb_geno", ")", ",", ")", "except", ":", "percent", "=", "\"N/A\"", "nb_no_call", "[", "(", "famid", ",", "indid", ")", "]", "=", "percent", "# Reading the problem file to gather statistics. Note that dataset without", "# problem will only have the header line (and no data)", "nb_problems", "=", "0", "table", "=", "[", "]", "nb_no_genetic", "=", "0", "nb_discordant", "=", "0", "with", "open", "(", "script_prefix", "+", "\".list_problem_sex\"", ",", "\"r\"", ")", "as", "i_file", ":", "# Reading the header", "header", "=", "i_file", ".", "readline", "(", ")", ".", "rstrip", "(", "\"\\r\\n\"", ")", ".", "split", "(", "\"\\t\"", ")", "table", ".", "append", "(", "header", ")", "header", "=", "{", "name", ":", "i", "for", "i", ",", "name", "in", "enumerate", "(", "header", ")", "}", "for", "required_col", "in", "(", "\"FID\"", ",", "\"IID\"", ",", "\"SNPSEX\"", ")", ":", "if", "required_col", "not", "in", "header", ":", "msg", "=", "\"{}: no column named {}\"", ".", "format", "(", "script_prefix", "+", "\".list_problem_sex\"", ",", "required_col", ",", ")", "raise", "ProgramError", "(", "msg", ")", "# Adding the missing column name", "table", "[", "-", "1", "]", ".", "append", "(", "\"HET\"", ")", "table", "[", "-", "1", "]", ".", "append", "(", "r\"\\%NOCALL\"", ")", "# Reading the rest of the data", "for", "line", "in", "i_file", ":", "nb_problems", "+=", "1", "# Creating the row", "row", "=", "line", ".", "rstrip", "(", "\"\\r\\n\"", ")", ".", "split", "(", "\"\\t\"", ")", "# Counting", "if", "row", "[", "header", "[", "\"SNPSEX\"", "]", "]", "==", "\"0\"", ":", "nb_no_genetic", "+=", "1", "else", ":", "nb_discordant", "+=", "1", "table", ".", "append", "(", "[", "latex_template", ".", "sanitize_tex", "(", "row", "[", "header", "[", "name", "]", "]", ")", "for", "name", "in", "(", "\"FID\"", ",", "\"IID\"", ",", "\"PEDSEX\"", ",", "\"SNPSEX\"", ",", "\"STATUS\"", ",", "\"F\"", ")", "]", ")", "table", "[", "-", "1", "]", ".", "append", "(", "hetero", ".", "get", "(", "(", "row", "[", "header", "[", "\"FID\"", "]", "]", ",", "row", "[", "header", "[", "\"IID\"", "]", "]", ")", ",", "\"N/A\"", ")", ",", ")", "table", "[", "-", "1", "]", ".", "append", "(", "nb_no_call", ".", "get", "(", "(", "row", "[", "header", "[", "\"FID\"", "]", "]", ",", "row", "[", "header", "[", "\"IID\"", "]", "]", ")", ",", "\"N/A\"", ")", ")", "# Getting the value for the maleF option", "male_f", "=", "sex_check", ".", "parser", ".", "get_default", "(", "\"maleF\"", ")", "if", "\"--maleF\"", "in", "options", ":", "male_f", "=", "options", "[", "options", ".", "index", "(", "\"--maleF\"", ")", "+", "1", "]", "# Getting the value for the femaleF option", "female_f", "=", "sex_check", ".", "parser", ".", "get_default", "(", "\"femaleF\"", ")", "if", "\"--femaleF\"", "in", "options", ":", "female_f", "=", "options", "[", "options", ".", "index", "(", "\"--femaleF\"", ")", "+", "1", "]", "# We write a LaTeX summary", "latex_file", "=", "os", ".", "path", ".", "join", "(", "script_prefix", "+", "\".summary.tex\"", ")", "graphics_paths", "=", "set", "(", ")", "try", ":", "with", "open", "(", "latex_file", ",", "\"w\"", ")", "as", "o_file", ":", "print", ">>", "o_file", ",", "latex_template", ".", "subsection", "(", "sex_check", ".", "pretty_name", ")", "text", "=", "(", "\"Using $F$ thresholds of {male_f} and {female_f} for males \"", "\"and females respectively, {nb_problems:,d} sample{plural} \"", "\"had gender problem according to Plink.\"", ".", "format", "(", "male_f", "=", "male_f", ",", "female_f", "=", "female_f", ",", "nb_problems", "=", "nb_problems", ",", "plural", "=", "\"s\"", "if", "nb_problems", ">", "1", "else", "\"\"", ",", ")", ")", "print", ">>", "o_file", ",", "latex_template", ".", "wrap_lines", "(", "text", ")", "# The float template", "float_template", "=", "latex_template", ".", "jinja2_env", ".", "get_template", "(", "\"float_template.tex\"", ",", ")", "if", "nb_problems", ">", "0", ":", "# The label and text for the table", "table_label", "=", "re", ".", "sub", "(", "r\"[/\\\\]\"", ",", "\"_\"", ",", "script_prefix", ",", ")", "+", "\"_problems\"", "text", "=", "(", "r\"Table~\\ref{\"", "+", "table_label", "+", "\"} summarizes the gender \"", "\"problems encountered during the analysis.\"", ")", "print", ">>", "o_file", ",", "latex_template", ".", "wrap_lines", "(", "text", ")", "# Getting the template", "longtable_template", "=", "latex_template", ".", "jinja2_env", ".", "get_template", "(", "\"longtable_template.tex\"", ",", ")", "# Rendering", "print", ">>", "o_file", ",", "longtable_template", ".", "render", "(", "table_caption", "=", "\"Summarization of the gender problems \"", "\"encountered during Plink's analysis. \"", "\"HET is the heterozygosity rate on the X \"", "r\"chromosome. \\%NOCALL is the percentage of \"", "\"no calls on the Y chromosome.\"", ",", "table_label", "=", "table_label", ",", "nb_col", "=", "len", "(", "table", "[", "1", "]", ")", ",", "col_alignments", "=", "\"llrrlrrrr\"", ",", "text_size", "=", "\"scriptsize\"", ",", "header_data", "=", "zip", "(", "table", "[", "0", "]", ",", "[", "1", "for", "i", "in", "table", "[", "0", "]", "]", ")", ",", "tabular_data", "=", "sorted", "(", "table", "[", "1", ":", "]", ",", "key", "=", "lambda", "item", ":", "item", "[", "1", "]", ")", ",", ")", "# Getting the templates", "graphic_template", "=", "latex_template", ".", "jinja2_env", ".", "get_template", "(", "\"graphics_template.tex\"", ",", ")", "# If there is a figure, we add it here", "if", "os", ".", "path", ".", "isfile", "(", "script_prefix", "+", "\".png\"", ")", ":", "# Adding the figure", "figure_label", "=", "re", ".", "sub", "(", "r\"[/\\\\]\"", ",", "\"_\"", ",", "script_prefix", ")", "text", "=", "(", "r\"Figure~\\ref{\"", "+", "figure_label", "+", "r\"} shows the $\\bar{y}$ \"", "r\"intensities versus the $\\bar{x}$ intensities for each \"", "\"samples. Problematic samples are shown using triangles.\"", ")", "print", ">>", "o_file", ",", "latex_template", ".", "wrap_lines", "(", "text", ")", "# Getting the paths", "graphics_path", ",", "path", "=", "os", ".", "path", ".", "split", "(", "script_prefix", "+", "\".png\"", ")", "graphics_path", "=", "os", ".", "path", ".", "relpath", "(", "graphics_path", ",", "base_dir", ")", "print", ">>", "o_file", ",", "float_template", ".", "render", "(", "float_type", "=", "\"figure\"", ",", "float_placement", "=", "\"H\"", ",", "float_caption", "=", "\"Gender check using Plink. Mean $x$ and $y$ \"", "\"intensities are shown for each sample. \"", "\"Males are shown in blue, and females in \"", "\"red. Triangles show problematic samples \"", "\"(green for males, mauve for females). \"", "\"Unknown gender are shown in gray.\"", ",", "float_label", "=", "figure_label", ",", "float_content", "=", "graphic_template", ".", "render", "(", "width", "=", "r\"0.8\\textwidth\"", ",", "path", "=", "latex_template", ".", "sanitize_fig_name", "(", "path", ")", ",", ")", ",", ")", "# Adding the path where the graphic is", "graphics_paths", ".", "add", "(", "graphics_path", ")", "# If there is a 'sexcheck.LRR_BAF' directory, then there are LRR", "# and BAF plots.", "if", "os", ".", "path", ".", "isdir", "(", "script_prefix", "+", "\".LRR_BAF\"", ")", ":", "figures", "=", "glob", "(", "os", ".", "path", ".", "join", "(", "script_prefix", "+", "\".LRR_BAF\"", ",", "\"*.png\"", ")", ",", ")", "if", "len", "(", "figures", ")", ">", "0", ":", "# Getting the sample IDs", "sample_ids", "=", "[", "re", ".", "search", "(", "\"^baf_lrr_(\\S+)_lrr_baf.png$\"", ",", "os", ".", "path", ".", "basename", "(", "figure", ")", ",", ")", "for", "figure", "in", "figures", "]", "sample_ids", "=", "[", "\"unknown sample\"", "if", "not", "sample", "else", "sample", ".", "group", "(", "1", ")", "for", "sample", "in", "sample_ids", "]", "# Sorting according to sample IDs", "sorted_indexes", "=", "sorted", "(", "range", "(", "len", "(", "figures", ")", ")", ",", "key", "=", "figures", ".", "__getitem__", ")", "figures", "=", "[", "figures", "[", "i", "]", "for", "i", "in", "sorted_indexes", "]", "sample_ids", "=", "[", "sample_ids", "[", "i", "]", "for", "i", "in", "sorted_indexes", "]", "# Getting the labels", "labels", "=", "[", "re", ".", "sub", "(", "r\"[/\\\\]\"", ",", "\"_\"", ",", "script_prefix", "+", "\"_baf_lrr_\"", "+", "os", ".", "path", ".", "splitext", "(", "sample", ")", "[", "0", "]", ",", ")", "for", "sample", "in", "sample_ids", "]", "fig_1", "=", "labels", "[", "0", "]", "fig_2", "=", "\"\"", "if", "len", "(", "figures", ")", ">", "1", ":", "fig_2", "=", "labels", "[", "-", "1", "]", "text", "=", "(", "\"Figure\"", "+", "(", "\"s\"", "if", "len", "(", "figures", ")", ">", "1", "else", "\"\"", ")", "+", "r\"~\\ref{\"", "+", "fig_1", "+", "\"} \"", "+", "(", "r\"to \\ref{\"", "+", "fig_2", "+", "\"} \"", "if", "fig_2", "else", "\"\"", ")", "+", "\"show\"", "+", "(", "\" \"", "if", "len", "(", "figures", ")", ">", "1", "else", "\"s \"", ")", "+", "\"the \"", "\"log R ratio and the B allele frequency versus the \"", "\"position on chromosome X and Y for the problematic \"", "\"sample{}.\"", ".", "format", "(", "\"s\"", "if", "len", "(", "figures", ")", ">", "1", "else", "\"\"", ")", ")", "print", ">>", "o_file", ",", "latex_template", ".", "wrap_lines", "(", "text", ")", "zipped", "=", "zip", "(", "figures", ",", "sample_ids", ",", "labels", ")", "for", "figure", ",", "sample_id", ",", "label", "in", "zipped", ":", "sample_id", "=", "latex_template", ".", "sanitize_tex", "(", "sample_id", ")", "# Getting the paths", "graphics_path", ",", "path", "=", "os", ".", "path", ".", "split", "(", "figure", ")", "graphics_path", "=", "os", ".", "path", ".", "relpath", "(", "graphics_path", ",", "base_dir", ")", "caption", "=", "(", "\"Plots showing the log R ratio and the B allele \"", "\"frequency for chromosome X and Y (on the left \"", "\"and right, respectively) for sample \"", "\"{}.\"", ".", "format", "(", "sample_id", ")", ")", "print", ">>", "o_file", ",", "float_template", ".", "render", "(", "float_type", "=", "\"figure\"", ",", "float_placement", "=", "\"H\"", ",", "float_caption", "=", "caption", ",", "float_label", "=", "label", ",", "float_content", "=", "graphic_template", ".", "render", "(", "width", "=", "r\"\\textwidth\"", ",", "path", "=", "latex_template", ".", "sanitize_fig_name", "(", "path", ")", ",", ")", ",", ")", "# Adding the path where the graphic is", "graphics_paths", ".", "add", "(", "graphics_path", ")", "except", "IOError", ":", "msg", "=", "\"{}: cannot write LaTeX summary\"", ".", "format", "(", "latex_file", ")", "raise", "ProgramError", "(", "msg", ")", "# Writing the summary results", "with", "open", "(", "os", ".", "path", ".", "join", "(", "base_dir", ",", "\"results_summary.txt\"", ")", ",", "\"a\"", ")", "as", "o_file", ":", "print", ">>", "o_file", ",", "\"# {}\"", ".", "format", "(", "script_prefix", ")", "print", ">>", "o_file", ",", "\"Number of samples with gender problem\"", "print", ">>", "o_file", ",", "\" - no genetic gender\\t{:,d}\"", ".", "format", "(", "nb_no_genetic", ")", "print", ">>", "o_file", ",", "\" - discordant gender\\t{:,d}\"", ".", "format", "(", "nb_discordant", ")", "print", ">>", "o_file", ",", "\"---\"", "# We know this step does not produce a new data set, so we return the", "# original one", "return", "_StepResult", "(", "next_file", "=", "in_prefix", ",", "next_file_type", "=", "required_type", ",", "latex_summary", "=", "latex_file", ",", "description", "=", "sex_check", ".", "desc", ",", "long_description", "=", "sex_check", ".", "long_desc", ",", "graph_path", "=", "graphics_paths", ",", ")" ]
39.880435
19.956522
def interpolate_cubic(self, lons, lats, data): """ Interpolate using cubic spline approximation Returns the same as interpolate(lons,lats,data,order=3) """ return self.interpolate(lons, lats, data, order=3)
[ "def", "interpolate_cubic", "(", "self", ",", "lons", ",", "lats", ",", "data", ")", ":", "return", "self", ".", "interpolate", "(", "lons", ",", "lats", ",", "data", ",", "order", "=", "3", ")" ]
40.166667
9.833333
def redraw_label(self): """ Re-draws the text by calculating its position. Currently, the text will always be centered on the position of the layer. """ # Convenience variables x,y,_,_ = self.getPos() sx,sy = self.getSize() if self.font_name is not None: self._label.font_name = self.font_name if self.font_size is not None: self._label.font_size = self.font_size if self.font_color is not None: self._label.color = self.font_color self._label.x = x+sx/2. self._label.y = y+sy/2. self._label.width = sx # Height is not set, would look weird otherwise #self._label.height = sx self._label._update()
[ "def", "redraw_label", "(", "self", ")", ":", "# Convenience variables", "x", ",", "y", ",", "_", ",", "_", "=", "self", ".", "getPos", "(", ")", "sx", ",", "sy", "=", "self", ".", "getSize", "(", ")", "if", "self", ".", "font_name", "is", "not", "None", ":", "self", ".", "_label", ".", "font_name", "=", "self", ".", "font_name", "if", "self", ".", "font_size", "is", "not", "None", ":", "self", ".", "_label", ".", "font_size", "=", "self", ".", "font_size", "if", "self", ".", "font_color", "is", "not", "None", ":", "self", ".", "_label", ".", "color", "=", "self", ".", "font_color", "self", ".", "_label", ".", "x", "=", "x", "+", "sx", "/", "2.", "self", ".", "_label", ".", "y", "=", "y", "+", "sy", "/", "2.", "self", ".", "_label", ".", "width", "=", "sx", "# Height is not set, would look weird otherwise", "#self._label.height = sx", "self", ".", "_label", ".", "_update", "(", ")" ]
33.304348
12.608696
def Open(self): """Opens the USB device for this setting, and claims the interface.""" # Make sure we close any previous handle open to this usb device. port_path = tuple(self.port_path) with self._HANDLE_CACHE_LOCK: old_handle = self._HANDLE_CACHE.get(port_path) if old_handle is not None: old_handle.Close() self._read_endpoint = None self._write_endpoint = None for endpoint in self._setting.iterEndpoints(): address = endpoint.getAddress() if address & libusb1.USB_ENDPOINT_DIR_MASK: self._read_endpoint = address self._max_read_packet_len = endpoint.getMaxPacketSize() else: self._write_endpoint = address assert self._read_endpoint is not None assert self._write_endpoint is not None handle = self._device.open() iface_number = self._setting.getNumber() try: if (platform.system() != 'Windows' and handle.kernelDriverActive(iface_number)): handle.detachKernelDriver(iface_number) except libusb1.USBError as e: if e.value == libusb1.LIBUSB_ERROR_NOT_FOUND: _LOG.warning('Kernel driver not found for interface: %s.', iface_number) else: raise handle.claimInterface(iface_number) self._handle = handle self._interface_number = iface_number with self._HANDLE_CACHE_LOCK: self._HANDLE_CACHE[port_path] = self # When this object is deleted, make sure it's closed. weakref.ref(self, self.Close)
[ "def", "Open", "(", "self", ")", ":", "# Make sure we close any previous handle open to this usb device.", "port_path", "=", "tuple", "(", "self", ".", "port_path", ")", "with", "self", ".", "_HANDLE_CACHE_LOCK", ":", "old_handle", "=", "self", ".", "_HANDLE_CACHE", ".", "get", "(", "port_path", ")", "if", "old_handle", "is", "not", "None", ":", "old_handle", ".", "Close", "(", ")", "self", ".", "_read_endpoint", "=", "None", "self", ".", "_write_endpoint", "=", "None", "for", "endpoint", "in", "self", ".", "_setting", ".", "iterEndpoints", "(", ")", ":", "address", "=", "endpoint", ".", "getAddress", "(", ")", "if", "address", "&", "libusb1", ".", "USB_ENDPOINT_DIR_MASK", ":", "self", ".", "_read_endpoint", "=", "address", "self", ".", "_max_read_packet_len", "=", "endpoint", ".", "getMaxPacketSize", "(", ")", "else", ":", "self", ".", "_write_endpoint", "=", "address", "assert", "self", ".", "_read_endpoint", "is", "not", "None", "assert", "self", ".", "_write_endpoint", "is", "not", "None", "handle", "=", "self", ".", "_device", ".", "open", "(", ")", "iface_number", "=", "self", ".", "_setting", ".", "getNumber", "(", ")", "try", ":", "if", "(", "platform", ".", "system", "(", ")", "!=", "'Windows'", "and", "handle", ".", "kernelDriverActive", "(", "iface_number", ")", ")", ":", "handle", ".", "detachKernelDriver", "(", "iface_number", ")", "except", "libusb1", ".", "USBError", "as", "e", ":", "if", "e", ".", "value", "==", "libusb1", ".", "LIBUSB_ERROR_NOT_FOUND", ":", "_LOG", ".", "warning", "(", "'Kernel driver not found for interface: %s.'", ",", "iface_number", ")", "else", ":", "raise", "handle", ".", "claimInterface", "(", "iface_number", ")", "self", ".", "_handle", "=", "handle", "self", ".", "_interface_number", "=", "iface_number", "with", "self", ".", "_HANDLE_CACHE_LOCK", ":", "self", ".", "_HANDLE_CACHE", "[", "port_path", "]", "=", "self", "# When this object is deleted, make sure it's closed.", "weakref", ".", "ref", "(", "self", ",", "self", ".", "Close", ")" ]
39.261905
15.690476
def trim(self, count, approximate=True): """ Trim the stream to the given "count" of messages, discarding the oldest messages first. :param count: maximum size of stream :param approximate: allow size to be approximate """ return self.database.xtrim(self.key, count, approximate)
[ "def", "trim", "(", "self", ",", "count", ",", "approximate", "=", "True", ")", ":", "return", "self", ".", "database", ".", "xtrim", "(", "self", ".", "key", ",", "count", ",", "approximate", ")" ]
36.444444
15.555556
def swish(x, name='swish'): """Swish function. See `Swish: a Self-Gated Activation Function <https://arxiv.org/abs/1710.05941>`__. Parameters ---------- x : Tensor input. name: str function name (optional). Returns ------- Tensor A ``Tensor`` in the same type as ``x``. """ with tf.name_scope(name): x = tf.nn.sigmoid(x) * x return x
[ "def", "swish", "(", "x", ",", "name", "=", "'swish'", ")", ":", "with", "tf", ".", "name_scope", "(", "name", ")", ":", "x", "=", "tf", ".", "nn", ".", "sigmoid", "(", "x", ")", "*", "x", "return", "x" ]
18.952381
23.857143
def collect_conflicts_between_fields_and_fragment( context: ValidationContext, conflicts: List[Conflict], cached_fields_and_fragment_names: Dict, compared_fragments: Set[str], compared_fragment_pairs: "PairSet", are_mutually_exclusive: bool, field_map: NodeAndDefCollection, fragment_name: str, ) -> None: """Collect conflicts between fields and fragment. Collect all conflicts found between a set of fields and a fragment reference including via spreading in any nested fragments. """ # Memoize so a fragment is not compared for conflicts more than once. if fragment_name in compared_fragments: return compared_fragments.add(fragment_name) fragment = context.get_fragment(fragment_name) if not fragment: return None field_map2, fragment_names2 = get_referenced_fields_and_fragment_names( context, cached_fields_and_fragment_names, fragment ) # Do not compare a fragment's fieldMap to itself. if field_map is field_map2: return # (D) First collect any conflicts between the provided collection of fields and the # collection of fields represented by the given fragment. collect_conflicts_between( context, conflicts, cached_fields_and_fragment_names, compared_fragment_pairs, are_mutually_exclusive, field_map, field_map2, ) # (E) Then collect any conflicts between the provided collection of fields and any # fragment names found in the given fragment. for fragment_name2 in fragment_names2: collect_conflicts_between_fields_and_fragment( context, conflicts, cached_fields_and_fragment_names, compared_fragments, compared_fragment_pairs, are_mutually_exclusive, field_map, fragment_name2, )
[ "def", "collect_conflicts_between_fields_and_fragment", "(", "context", ":", "ValidationContext", ",", "conflicts", ":", "List", "[", "Conflict", "]", ",", "cached_fields_and_fragment_names", ":", "Dict", ",", "compared_fragments", ":", "Set", "[", "str", "]", ",", "compared_fragment_pairs", ":", "\"PairSet\"", ",", "are_mutually_exclusive", ":", "bool", ",", "field_map", ":", "NodeAndDefCollection", ",", "fragment_name", ":", "str", ",", ")", "->", "None", ":", "# Memoize so a fragment is not compared for conflicts more than once.", "if", "fragment_name", "in", "compared_fragments", ":", "return", "compared_fragments", ".", "add", "(", "fragment_name", ")", "fragment", "=", "context", ".", "get_fragment", "(", "fragment_name", ")", "if", "not", "fragment", ":", "return", "None", "field_map2", ",", "fragment_names2", "=", "get_referenced_fields_and_fragment_names", "(", "context", ",", "cached_fields_and_fragment_names", ",", "fragment", ")", "# Do not compare a fragment's fieldMap to itself.", "if", "field_map", "is", "field_map2", ":", "return", "# (D) First collect any conflicts between the provided collection of fields and the", "# collection of fields represented by the given fragment.", "collect_conflicts_between", "(", "context", ",", "conflicts", ",", "cached_fields_and_fragment_names", ",", "compared_fragment_pairs", ",", "are_mutually_exclusive", ",", "field_map", ",", "field_map2", ",", ")", "# (E) Then collect any conflicts between the provided collection of fields and any", "# fragment names found in the given fragment.", "for", "fragment_name2", "in", "fragment_names2", ":", "collect_conflicts_between_fields_and_fragment", "(", "context", ",", "conflicts", ",", "cached_fields_and_fragment_names", ",", "compared_fragments", ",", "compared_fragment_pairs", ",", "are_mutually_exclusive", ",", "field_map", ",", "fragment_name2", ",", ")" ]
32.508772
18.508772
def list_vpnservices(self, retrieve_all=True, **_params): """Fetches a list of all configured VPN services for a project.""" return self.list('vpnservices', self.vpnservices_path, retrieve_all, **_params)
[ "def", "list_vpnservices", "(", "self", ",", "retrieve_all", "=", "True", ",", "*", "*", "_params", ")", ":", "return", "self", ".", "list", "(", "'vpnservices'", ",", "self", ".", "vpnservices_path", ",", "retrieve_all", ",", "*", "*", "_params", ")" ]
60.5
14.5
def _inform_if_path_does_not_exist(path): """ If the path does not exist, print a message saying so. This is intended to be helpful to users if they specify a custom path that eg cannot find. """ expanded_path = get_expanded_path(path) if not os.path.exists(expanded_path): print('Could not find custom path at: {}'.format(expanded_path))
[ "def", "_inform_if_path_does_not_exist", "(", "path", ")", ":", "expanded_path", "=", "get_expanded_path", "(", "path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "expanded_path", ")", ":", "print", "(", "'Could not find custom path at: {}'", ".", "format", "(", "expanded_path", ")", ")" ]
45.375
13.625
def CORS(func=None): """ CORS support """ def w(r=None): from uliweb import request, response if request.method == 'OPTIONS': response = Response(status=204) response.headers['Access-Control-Allow-Credentials'] = 'true' response.headers['Access-Control-Allow-Origin'] = request.headers['Origin'] response.headers['Access-Control-Allow-Methods'] = 'GET, POST, PUT, DELETE, OPTIONS' response.headers['Access-Control-Allow-Headers'] = 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range' response.headers['Access-Control-Max-Age'] = 24*3600 response.headers['Content-Type'] = 'text/plain; charset=utf-8' response.headers['Content-Length'] = 0 return response elif request.method in ('GET', 'POST'): if isinstance(r, Response): response = r response.headers['Access-Control-Allow-Credentials'] = 'true' if 'Origin' in request.headers: response.headers['Access-Control-Allow-Origin'] = request.headers['Origin'] response.headers['Access-Control-Allow-Methods'] = 'GET, POST, PUT, DELETE, OPTIONS' response.headers['Access-Control-Allow-Headers'] = 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range' response.headers['Access-Control-Expose-Headers'] = 'Content-Length,Content-Range' if callable(func): @wraps(func) def f(*arg, **kwargs): if request.method == 'OPTIONS': return w() ret = func(*arg, **kwargs) w(ret) return ret return f else: w()
[ "def", "CORS", "(", "func", "=", "None", ")", ":", "def", "w", "(", "r", "=", "None", ")", ":", "from", "uliweb", "import", "request", ",", "response", "if", "request", ".", "method", "==", "'OPTIONS'", ":", "response", "=", "Response", "(", "status", "=", "204", ")", "response", ".", "headers", "[", "'Access-Control-Allow-Credentials'", "]", "=", "'true'", "response", ".", "headers", "[", "'Access-Control-Allow-Origin'", "]", "=", "request", ".", "headers", "[", "'Origin'", "]", "response", ".", "headers", "[", "'Access-Control-Allow-Methods'", "]", "=", "'GET, POST, PUT, DELETE, OPTIONS'", "response", ".", "headers", "[", "'Access-Control-Allow-Headers'", "]", "=", "'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range'", "response", ".", "headers", "[", "'Access-Control-Max-Age'", "]", "=", "24", "*", "3600", "response", ".", "headers", "[", "'Content-Type'", "]", "=", "'text/plain; charset=utf-8'", "response", ".", "headers", "[", "'Content-Length'", "]", "=", "0", "return", "response", "elif", "request", ".", "method", "in", "(", "'GET'", ",", "'POST'", ")", ":", "if", "isinstance", "(", "r", ",", "Response", ")", ":", "response", "=", "r", "response", ".", "headers", "[", "'Access-Control-Allow-Credentials'", "]", "=", "'true'", "if", "'Origin'", "in", "request", ".", "headers", ":", "response", ".", "headers", "[", "'Access-Control-Allow-Origin'", "]", "=", "request", ".", "headers", "[", "'Origin'", "]", "response", ".", "headers", "[", "'Access-Control-Allow-Methods'", "]", "=", "'GET, POST, PUT, DELETE, OPTIONS'", "response", ".", "headers", "[", "'Access-Control-Allow-Headers'", "]", "=", "'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range'", "response", ".", "headers", "[", "'Access-Control-Expose-Headers'", "]", "=", "'Content-Length,Content-Range'", "if", "callable", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "f", "(", "*", "arg", ",", "*", "*", "kwargs", ")", ":", "if", "request", ".", "method", "==", "'OPTIONS'", ":", "return", "w", "(", ")", "ret", "=", "func", "(", "*", "arg", ",", "*", "*", "kwargs", ")", "w", "(", "ret", ")", "return", "ret", "return", "f", "else", ":", "w", "(", ")" ]
43.125
26.825
def _folder_item_remarks(self, analysis_brain, item): """Renders the Remarks field for the passed in analysis If the edition of the analysis is permitted, adds the field into the list of editable fields. :param analysis_brain: Brain that represents an analysis :param item: analysis' dictionary counterpart that represents a row """ if self.analysis_remarks_enabled(): item["Remarks"] = analysis_brain.getRemarks if self.is_analysis_edition_allowed(analysis_brain): item["allow_edit"].extend(["Remarks"])
[ "def", "_folder_item_remarks", "(", "self", ",", "analysis_brain", ",", "item", ")", ":", "if", "self", ".", "analysis_remarks_enabled", "(", ")", ":", "item", "[", "\"Remarks\"", "]", "=", "analysis_brain", ".", "getRemarks", "if", "self", ".", "is_analysis_edition_allowed", "(", "analysis_brain", ")", ":", "item", "[", "\"allow_edit\"", "]", ".", "extend", "(", "[", "\"Remarks\"", "]", ")" ]
38.8
21.6
def populate_user_events(): """Generate a list of all registered authorized and anonymous events""" global AuthorizedEvents global AnonymousEvents def inheritors(klass): """Find inheritors of a specified object class""" subclasses = {} subclasses_set = set() work = [klass] while work: parent = work.pop() for child in parent.__subclasses__(): if child not in subclasses_set: # pprint(child.__dict__) name = child.__module__ + "." + child.__name__ if name.startswith('hfos'): subclasses_set.add(child) event = { 'event': child, 'name': name, 'doc': child.__doc__, 'args': [] } if child.__module__ in subclasses: subclasses[child.__module__][ child.__name__] = event else: subclasses[child.__module__] = { child.__name__: event } work.append(child) return subclasses # TODO: Change event system again, to catch authorized (i.e. "user") as # well as normal events, so they can be processed by Automat # NormalEvents = inheritors(Event) AuthorizedEvents = inheritors(authorizedevent) AnonymousEvents = inheritors(anonymousevent)
[ "def", "populate_user_events", "(", ")", ":", "global", "AuthorizedEvents", "global", "AnonymousEvents", "def", "inheritors", "(", "klass", ")", ":", "\"\"\"Find inheritors of a specified object class\"\"\"", "subclasses", "=", "{", "}", "subclasses_set", "=", "set", "(", ")", "work", "=", "[", "klass", "]", "while", "work", ":", "parent", "=", "work", ".", "pop", "(", ")", "for", "child", "in", "parent", ".", "__subclasses__", "(", ")", ":", "if", "child", "not", "in", "subclasses_set", ":", "# pprint(child.__dict__)", "name", "=", "child", ".", "__module__", "+", "\".\"", "+", "child", ".", "__name__", "if", "name", ".", "startswith", "(", "'hfos'", ")", ":", "subclasses_set", ".", "add", "(", "child", ")", "event", "=", "{", "'event'", ":", "child", ",", "'name'", ":", "name", ",", "'doc'", ":", "child", ".", "__doc__", ",", "'args'", ":", "[", "]", "}", "if", "child", ".", "__module__", "in", "subclasses", ":", "subclasses", "[", "child", ".", "__module__", "]", "[", "child", ".", "__name__", "]", "=", "event", "else", ":", "subclasses", "[", "child", ".", "__module__", "]", "=", "{", "child", ".", "__name__", ":", "event", "}", "work", ".", "append", "(", "child", ")", "return", "subclasses", "# TODO: Change event system again, to catch authorized (i.e. \"user\") as", "# well as normal events, so they can be processed by Automat", "# NormalEvents = inheritors(Event)", "AuthorizedEvents", "=", "inheritors", "(", "authorizedevent", ")", "AnonymousEvents", "=", "inheritors", "(", "anonymousevent", ")" ]
35.75
16.113636
def integrate(datasets_full, genes_list, batch_size=BATCH_SIZE, verbose=VERBOSE, ds_names=None, dimred=DIMRED, approx=APPROX, sigma=SIGMA, alpha=ALPHA, knn=KNN, geosketch=False, geosketch_max=20000, n_iter=1, union=False, hvg=None): """Integrate a list of data sets. Parameters ---------- datasets_full : `list` of `scipy.sparse.csr_matrix` or of `numpy.ndarray` Data sets to integrate and correct. genes_list: `list` of `list` of `string` List of genes for each data set. batch_size: `int`, optional (default: `5000`) The batch size used in the alignment vector computation. Useful when correcting very large (>100k samples) data sets. Set to large value that runs within available memory. verbose: `bool` or `int`, optional (default: 2) When `True` or not equal to 0, prints logging output. ds_names: `list` of `string`, optional When `verbose=True`, reports data set names in logging output. dimred: `int`, optional (default: 100) Dimensionality of integrated embedding. approx: `bool`, optional (default: `True`) Use approximate nearest neighbors, greatly speeds up matching runtime. sigma: `float`, optional (default: 15) Correction smoothing parameter on Gaussian kernel. alpha: `float`, optional (default: 0.10) Alignment score minimum cutoff. knn: `int`, optional (default: 20) Number of nearest neighbors to use for matching. hvg: `int`, optional (default: None) Use this number of top highly variable genes based on dispersion. Returns ------- integrated, genes Returns a two-tuple containing a list of `numpy.ndarray` with integrated low dimensional embeddings and a single list of genes containing the intersection of inputted genes. """ datasets_full = check_datasets(datasets_full) datasets, genes = merge_datasets(datasets_full, genes_list, ds_names=ds_names, union=union) datasets_dimred, genes = process_data(datasets, genes, hvg=hvg, dimred=dimred) for _ in range(n_iter): datasets_dimred = assemble( datasets_dimred, # Assemble in low dimensional space. verbose=verbose, knn=knn, sigma=sigma, approx=approx, alpha=alpha, ds_names=ds_names, batch_size=batch_size, geosketch=geosketch, geosketch_max=geosketch_max, ) return datasets_dimred, genes
[ "def", "integrate", "(", "datasets_full", ",", "genes_list", ",", "batch_size", "=", "BATCH_SIZE", ",", "verbose", "=", "VERBOSE", ",", "ds_names", "=", "None", ",", "dimred", "=", "DIMRED", ",", "approx", "=", "APPROX", ",", "sigma", "=", "SIGMA", ",", "alpha", "=", "ALPHA", ",", "knn", "=", "KNN", ",", "geosketch", "=", "False", ",", "geosketch_max", "=", "20000", ",", "n_iter", "=", "1", ",", "union", "=", "False", ",", "hvg", "=", "None", ")", ":", "datasets_full", "=", "check_datasets", "(", "datasets_full", ")", "datasets", ",", "genes", "=", "merge_datasets", "(", "datasets_full", ",", "genes_list", ",", "ds_names", "=", "ds_names", ",", "union", "=", "union", ")", "datasets_dimred", ",", "genes", "=", "process_data", "(", "datasets", ",", "genes", ",", "hvg", "=", "hvg", ",", "dimred", "=", "dimred", ")", "for", "_", "in", "range", "(", "n_iter", ")", ":", "datasets_dimred", "=", "assemble", "(", "datasets_dimred", ",", "# Assemble in low dimensional space.", "verbose", "=", "verbose", ",", "knn", "=", "knn", ",", "sigma", "=", "sigma", ",", "approx", "=", "approx", ",", "alpha", "=", "alpha", ",", "ds_names", "=", "ds_names", ",", "batch_size", "=", "batch_size", ",", "geosketch", "=", "geosketch", ",", "geosketch_max", "=", "geosketch_max", ",", ")", "return", "datasets_dimred", ",", "genes" ]
44.964286
19.464286
def _setContent(self): '''GED defines element name, so also define typecode aname ''' kw = KW.copy() try: kw.update(dict(klass=self.getClassName(), element='ElementDeclaration', literal=self.literalTag(), substitutionGroup=self._substitutionGroupTag(), schema=self.schemaTag(), init=self.simpleConstructor(), ns=self.ns, name=self.name, aname=self.getAttributeName(self.name), baseslogic=self.getBasesLogic(ID3), #ofwhat=self.getTypecodeList(), #atypecode=self.attribute_typecode, #pyclass=self.getPyClass(), alias=NAD.getAlias(self.sKlassNS), subclass=type_class_name(self.sKlass), )) except Exception, ex: args = ['Failure processing an element w/local complexType: %s' %( self._item.getItemTrace())] args += ex.args ex.args = tuple(args) raise if self.local: kw['element'] = 'LocalElementDeclaration' element = [ '%(ID1)sclass %(klass)s(%(element)s):', '%(ID2)s%(literal)s', '%(ID2)s%(schema)s', '%(ID2)s%(substitutionGroup)s', '%(ID2)s%(init)s', '%(ID3)skw["pname"] = ("%(ns)s","%(name)s")', '%(ID3)skw["aname"] = "%(aname)s"', '%(baseslogic)s', '%(ID3)s%(alias)s.%(subclass)s.__init__(self, **kw)', '%(ID3)sif self.pyclass is not None: self.pyclass.__name__ = "%(klass)s_Holder"', ] self.writeArray(map(lambda l: l %kw, element))
[ "def", "_setContent", "(", "self", ")", ":", "kw", "=", "KW", ".", "copy", "(", ")", "try", ":", "kw", ".", "update", "(", "dict", "(", "klass", "=", "self", ".", "getClassName", "(", ")", ",", "element", "=", "'ElementDeclaration'", ",", "literal", "=", "self", ".", "literalTag", "(", ")", ",", "substitutionGroup", "=", "self", ".", "_substitutionGroupTag", "(", ")", ",", "schema", "=", "self", ".", "schemaTag", "(", ")", ",", "init", "=", "self", ".", "simpleConstructor", "(", ")", ",", "ns", "=", "self", ".", "ns", ",", "name", "=", "self", ".", "name", ",", "aname", "=", "self", ".", "getAttributeName", "(", "self", ".", "name", ")", ",", "baseslogic", "=", "self", ".", "getBasesLogic", "(", "ID3", ")", ",", "#ofwhat=self.getTypecodeList(),", "#atypecode=self.attribute_typecode,", "#pyclass=self.getPyClass(),", "alias", "=", "NAD", ".", "getAlias", "(", "self", ".", "sKlassNS", ")", ",", "subclass", "=", "type_class_name", "(", "self", ".", "sKlass", ")", ",", ")", ")", "except", "Exception", ",", "ex", ":", "args", "=", "[", "'Failure processing an element w/local complexType: %s'", "%", "(", "self", ".", "_item", ".", "getItemTrace", "(", ")", ")", "]", "args", "+=", "ex", ".", "args", "ex", ".", "args", "=", "tuple", "(", "args", ")", "raise", "if", "self", ".", "local", ":", "kw", "[", "'element'", "]", "=", "'LocalElementDeclaration'", "element", "=", "[", "'%(ID1)sclass %(klass)s(%(element)s):'", ",", "'%(ID2)s%(literal)s'", ",", "'%(ID2)s%(schema)s'", ",", "'%(ID2)s%(substitutionGroup)s'", ",", "'%(ID2)s%(init)s'", ",", "'%(ID3)skw[\"pname\"] = (\"%(ns)s\",\"%(name)s\")'", ",", "'%(ID3)skw[\"aname\"] = \"%(aname)s\"'", ",", "'%(baseslogic)s'", ",", "'%(ID3)s%(alias)s.%(subclass)s.__init__(self, **kw)'", ",", "'%(ID3)sif self.pyclass is not None: self.pyclass.__name__ = \"%(klass)s_Holder\"'", ",", "]", "self", ".", "writeArray", "(", "map", "(", "lambda", "l", ":", "l", "%", "kw", ",", "element", ")", ")" ]
41.568182
18.295455
def rdopkg(*cargs): """ rdopkg CLI interface Execute rdopkg action with specified arguments and return shell friendly exit code. This is the default high level way to interact with rdopkg. py> rdopkg('new-version', '1.2.3') is equivalent to $> rdopkg new-version 1.2.3 """ runner = rdopkg_runner() return shell.run(runner, cargs=cargs, prog='rdopkg', version=__version__)
[ "def", "rdopkg", "(", "*", "cargs", ")", ":", "runner", "=", "rdopkg_runner", "(", ")", "return", "shell", ".", "run", "(", "runner", ",", "cargs", "=", "cargs", ",", "prog", "=", "'rdopkg'", ",", "version", "=", "__version__", ")" ]
23.6
17.8
def _calc_uca_chunk(self, data, dX, dY, direction, mag, flats, area_edges, plotflag=False, edge_todo_i_no_mask=True): """ Calculates the upstream contributing area for the interior, and includes edge contributions if they are provided through area_edges. """ # %% # Figure out which section the drainage goes towards, and what # proportion goes to the straight-sided (as opposed to diagonal) node. section, proportion = self._calc_uca_section_proportion( data, dX, dY, direction, flats) # Build the drainage or adjacency matrix A = self._mk_adjacency_matrix(section, proportion, flats, data, mag, dX, dY) if CYTHON: B = A.tocsr() colsum = np.array(A.sum(1)).ravel() ids = colsum == 0 # If no one drains into me area = (dX * dY) # Record minimum area min_area = np.nanmin(area) self.twi_min_area = min(self.twi_min_area, min_area) area = np.concatenate((area[0:1], area)).reshape(area.size+1, 1) area = area.repeat(data.shape[1], 1) # Set the edge areas to zero, will add those contributions later area[:, 0] = area_edges[:, 0] area[:, -1] = area_edges[:, -1] area[-1, :] = area_edges[-1, :] area[0, :] = area_edges[0, :] # These edges are done, they have been drained already ids[area_edges.ravel() > 0] = True done = np.zeros(data.shape, bool) done.ravel()[ids] = True # deal with no-data values done[1:-1, 1:-1] = done[1:-1, 1:-1] | data.mask[1:-1, 1:-1] # Check the inlet edges edge_todo = np.zeros_like(done) ids_ed = np.arange(data.size).reshape(data.shape) # left edge_todo[:, 0] = (A[:, ids_ed[:, 0]].sum(0) > 0) \ & (area_edges[:, 0] == 0) edge_todo[:, -1] = (A[:, ids_ed[:, -1]].sum(0) > 0) \ & (area_edges[:, -1] == 0) edge_todo[0, :] = (A[:, ids_ed[0, :]].sum(0) > 0) \ & (area_edges[0, :] == 0) edge_todo[-1, :] = (A[:, ids_ed[-1, :]].sum(0) > 0) \ & (area_edges[-1, :] == 0) # Will do the tile-level doneness edge_todo_i_no_mask = edge_todo.copy() & edge_todo_i_no_mask edge_todo_no_mask = edge_todo_i_no_mask.copy() # tile-level doneness edge_todo[data.mask] = False # Don't do masked areas # Initialize done edges edge_todo_i = edge_todo.copy() ids_old = np.zeros_like(ids) # %% count = 1 if CYTHON: area_ = area.ravel() done_ = done.ravel() edge_todo_ = edge_todo.astype('float64').ravel() edge_todo_no_mask_ = edge_todo_no_mask.astype('float64').ravel() data_ = data.ravel() while (np.any(~done) and count < self.circular_ref_maxcount): print ".", count += 1 if CYTHON: area_, done_, edge_todo_, edge_todo_no_mask_ = cyutils.drain_area(area_, done_, ids, A.indptr, A.indices, A.data, B.indptr, B.indices, area.shape[0], area.shape[1], edge_todo_, edge_todo_no_mask_) else: # If I use ids.sum() > 0 then I might get stuck in # circular references. while (ids - ids_old).sum() > 0: # %% ids_old = ids.copy() ids, area, done, edge_todo = \ self._drain_step(A, ids, area, done, edge_todo) # figure(1);clf();imshow(area, interpolation='none');colorbar() # figure(2);clf();imshow(ids.reshape(area.shape), interpolation='none');colorbar() # figure(3);clf();imshow(done, interpolation='none');colorbar() done_ = done.ravel() #%% ids[:] = False max_elev = (data_ * (~done_)).max() ids[((data_ * (~done_) - max_elev) / max_elev > -0.01)] = True if CYTHON: area = area_.reshape(area.shape) done = done_.reshape(done.shape) edge_todo = edge_todo_.reshape(edge_todo.shape).astype(bool) edge_todo_no_mask = edge_todo_no_mask_.reshape(edge_todo_no_mask.shape).astype(bool) area[flats] = np.nan edge_done = ~edge_todo edge_done[data.mask] = True # Don't do masked areas if self.apply_uca_limit_edges: # 2x because of bifurcations (maybe should be more than 2x, but # should be ok edge_done[area > self.uca_saturation_limit * 2 * min_area] = True # %% if plotflag: # TODO DTYPE self._plot_connectivity(A, (done.astype('float64') is False) + flats.astype('float64') * 2, [0, 3]) return area, edge_todo_i, edge_done, edge_todo_i_no_mask, edge_todo_no_mask
[ "def", "_calc_uca_chunk", "(", "self", ",", "data", ",", "dX", ",", "dY", ",", "direction", ",", "mag", ",", "flats", ",", "area_edges", ",", "plotflag", "=", "False", ",", "edge_todo_i_no_mask", "=", "True", ")", ":", "# %%", "# Figure out which section the drainage goes towards, and what", "# proportion goes to the straight-sided (as opposed to diagonal) node.", "section", ",", "proportion", "=", "self", ".", "_calc_uca_section_proportion", "(", "data", ",", "dX", ",", "dY", ",", "direction", ",", "flats", ")", "# Build the drainage or adjacency matrix", "A", "=", "self", ".", "_mk_adjacency_matrix", "(", "section", ",", "proportion", ",", "flats", ",", "data", ",", "mag", ",", "dX", ",", "dY", ")", "if", "CYTHON", ":", "B", "=", "A", ".", "tocsr", "(", ")", "colsum", "=", "np", ".", "array", "(", "A", ".", "sum", "(", "1", ")", ")", ".", "ravel", "(", ")", "ids", "=", "colsum", "==", "0", "# If no one drains into me", "area", "=", "(", "dX", "*", "dY", ")", "# Record minimum area", "min_area", "=", "np", ".", "nanmin", "(", "area", ")", "self", ".", "twi_min_area", "=", "min", "(", "self", ".", "twi_min_area", ",", "min_area", ")", "area", "=", "np", ".", "concatenate", "(", "(", "area", "[", "0", ":", "1", "]", ",", "area", ")", ")", ".", "reshape", "(", "area", ".", "size", "+", "1", ",", "1", ")", "area", "=", "area", ".", "repeat", "(", "data", ".", "shape", "[", "1", "]", ",", "1", ")", "# Set the edge areas to zero, will add those contributions later", "area", "[", ":", ",", "0", "]", "=", "area_edges", "[", ":", ",", "0", "]", "area", "[", ":", ",", "-", "1", "]", "=", "area_edges", "[", ":", ",", "-", "1", "]", "area", "[", "-", "1", ",", ":", "]", "=", "area_edges", "[", "-", "1", ",", ":", "]", "area", "[", "0", ",", ":", "]", "=", "area_edges", "[", "0", ",", ":", "]", "# These edges are done, they have been drained already", "ids", "[", "area_edges", ".", "ravel", "(", ")", ">", "0", "]", "=", "True", "done", "=", "np", ".", "zeros", "(", "data", ".", "shape", ",", "bool", ")", "done", ".", "ravel", "(", ")", "[", "ids", "]", "=", "True", "# deal with no-data values", "done", "[", "1", ":", "-", "1", ",", "1", ":", "-", "1", "]", "=", "done", "[", "1", ":", "-", "1", ",", "1", ":", "-", "1", "]", "|", "data", ".", "mask", "[", "1", ":", "-", "1", ",", "1", ":", "-", "1", "]", "# Check the inlet edges", "edge_todo", "=", "np", ".", "zeros_like", "(", "done", ")", "ids_ed", "=", "np", ".", "arange", "(", "data", ".", "size", ")", ".", "reshape", "(", "data", ".", "shape", ")", "# left", "edge_todo", "[", ":", ",", "0", "]", "=", "(", "A", "[", ":", ",", "ids_ed", "[", ":", ",", "0", "]", "]", ".", "sum", "(", "0", ")", ">", "0", ")", "&", "(", "area_edges", "[", ":", ",", "0", "]", "==", "0", ")", "edge_todo", "[", ":", ",", "-", "1", "]", "=", "(", "A", "[", ":", ",", "ids_ed", "[", ":", ",", "-", "1", "]", "]", ".", "sum", "(", "0", ")", ">", "0", ")", "&", "(", "area_edges", "[", ":", ",", "-", "1", "]", "==", "0", ")", "edge_todo", "[", "0", ",", ":", "]", "=", "(", "A", "[", ":", ",", "ids_ed", "[", "0", ",", ":", "]", "]", ".", "sum", "(", "0", ")", ">", "0", ")", "&", "(", "area_edges", "[", "0", ",", ":", "]", "==", "0", ")", "edge_todo", "[", "-", "1", ",", ":", "]", "=", "(", "A", "[", ":", ",", "ids_ed", "[", "-", "1", ",", ":", "]", "]", ".", "sum", "(", "0", ")", ">", "0", ")", "&", "(", "area_edges", "[", "-", "1", ",", ":", "]", "==", "0", ")", "# Will do the tile-level doneness", "edge_todo_i_no_mask", "=", "edge_todo", ".", "copy", "(", ")", "&", "edge_todo_i_no_mask", "edge_todo_no_mask", "=", "edge_todo_i_no_mask", ".", "copy", "(", ")", "# tile-level doneness", "edge_todo", "[", "data", ".", "mask", "]", "=", "False", "# Don't do masked areas", "# Initialize done edges", "edge_todo_i", "=", "edge_todo", ".", "copy", "(", ")", "ids_old", "=", "np", ".", "zeros_like", "(", "ids", ")", "# %%", "count", "=", "1", "if", "CYTHON", ":", "area_", "=", "area", ".", "ravel", "(", ")", "done_", "=", "done", ".", "ravel", "(", ")", "edge_todo_", "=", "edge_todo", ".", "astype", "(", "'float64'", ")", ".", "ravel", "(", ")", "edge_todo_no_mask_", "=", "edge_todo_no_mask", ".", "astype", "(", "'float64'", ")", ".", "ravel", "(", ")", "data_", "=", "data", ".", "ravel", "(", ")", "while", "(", "np", ".", "any", "(", "~", "done", ")", "and", "count", "<", "self", ".", "circular_ref_maxcount", ")", ":", "print", "\".\"", ",", "count", "+=", "1", "if", "CYTHON", ":", "area_", ",", "done_", ",", "edge_todo_", ",", "edge_todo_no_mask_", "=", "cyutils", ".", "drain_area", "(", "area_", ",", "done_", ",", "ids", ",", "A", ".", "indptr", ",", "A", ".", "indices", ",", "A", ".", "data", ",", "B", ".", "indptr", ",", "B", ".", "indices", ",", "area", ".", "shape", "[", "0", "]", ",", "area", ".", "shape", "[", "1", "]", ",", "edge_todo_", ",", "edge_todo_no_mask_", ")", "else", ":", "# If I use ids.sum() > 0 then I might get stuck in", "# circular references.", "while", "(", "ids", "-", "ids_old", ")", ".", "sum", "(", ")", ">", "0", ":", "# %%", "ids_old", "=", "ids", ".", "copy", "(", ")", "ids", ",", "area", ",", "done", ",", "edge_todo", "=", "self", ".", "_drain_step", "(", "A", ",", "ids", ",", "area", ",", "done", ",", "edge_todo", ")", "# figure(1);clf();imshow(area, interpolation='none');colorbar()", "# figure(2);clf();imshow(ids.reshape(area.shape), interpolation='none');colorbar()", "# figure(3);clf();imshow(done, interpolation='none');colorbar()", "done_", "=", "done", ".", "ravel", "(", ")", "#%%", "ids", "[", ":", "]", "=", "False", "max_elev", "=", "(", "data_", "*", "(", "~", "done_", ")", ")", ".", "max", "(", ")", "ids", "[", "(", "(", "data_", "*", "(", "~", "done_", ")", "-", "max_elev", ")", "/", "max_elev", ">", "-", "0.01", ")", "]", "=", "True", "if", "CYTHON", ":", "area", "=", "area_", ".", "reshape", "(", "area", ".", "shape", ")", "done", "=", "done_", ".", "reshape", "(", "done", ".", "shape", ")", "edge_todo", "=", "edge_todo_", ".", "reshape", "(", "edge_todo", ".", "shape", ")", ".", "astype", "(", "bool", ")", "edge_todo_no_mask", "=", "edge_todo_no_mask_", ".", "reshape", "(", "edge_todo_no_mask", ".", "shape", ")", ".", "astype", "(", "bool", ")", "area", "[", "flats", "]", "=", "np", ".", "nan", "edge_done", "=", "~", "edge_todo", "edge_done", "[", "data", ".", "mask", "]", "=", "True", "# Don't do masked areas", "if", "self", ".", "apply_uca_limit_edges", ":", "# 2x because of bifurcations (maybe should be more than 2x, but", "# should be ok", "edge_done", "[", "area", ">", "self", ".", "uca_saturation_limit", "*", "2", "*", "min_area", "]", "=", "True", "# %%", "if", "plotflag", ":", "# TODO DTYPE", "self", ".", "_plot_connectivity", "(", "A", ",", "(", "done", ".", "astype", "(", "'float64'", ")", "is", "False", ")", "+", "flats", ".", "astype", "(", "'float64'", ")", "*", "2", ",", "[", "0", ",", "3", "]", ")", "return", "area", ",", "edge_todo_i", ",", "edge_done", ",", "edge_todo_i_no_mask", ",", "edge_todo_no_mask" ]
41.618644
20.838983
def _execute(self, func, command, **new_attributes): """ Execute command. """ if self._cfg_factory: # if we have a cfg_factory try: # we try to load a config with the factory if self.cfg_file: self.cfg = self._cfg_factory(self.cfg_file) except Exception as error: # raise se exception self._cfg_error(error) # let's get command(function) argspec arg_specs = inspect.getargspec(command) if arg_specs.defaults: # if we have defaults # count defaults arguments count = len(arg_specs.defaults) # get arguments names args_names = arg_specs.args[:count] # get keyword arguments names kwargs_name = arg_specs.args[count:] else: # else all names are the args only args_names = arg_specs.args # and keyword arguments is empty kwargs_name = [] pargs = [] kwargs = {} # for every argument in argument names for name in args_names: if name == 'args': # if argument name is *special name* **args** # we append a reference to self.args pargs.append(self.args) elif name == 'self': # else if argment name is *special name* **self** if ArgParseInated in inspect.getmro(func.__cls__): # if the class that holds the function is subclass of # ArgParseInated we'll instantiate it, passing some # parameter pargs.append(func.__cls__(self, **new_attributes)) else: # else we'll instatiate the class without parameters pargs.append(func.__cls__()) else: # else we'll append the argument getting it from the self.args pargs.append(getattr(self.args, name)) # for every argument in keyword arguments for name in kwargs_name: if name == 'args': # if argument name is *special name* **args** # we set for the arg a reference to self.args kwargs[name] = self.args elif name in self.args: # else if name is in self.args we'll set the relative value. kwargs[name] = getattr(self.args, name) # set the **global** write function setattr(__builtin__, self._write_name, self.write) # set the **global** write line function setattr(__builtin__, self._write_line_name, self.writeln) # let's setup something. for setup_func in self.setup: setup_func(self) # call event before_execute self._self_event('before_execute', command, *pargs, **kwargs) # if events returns a non None value we use it as retrval. retval, pargs, kwargs = self._call_event( 'before_execute', command, pargs, kwargs) # if before_execute event returns None go on with command if retval is None: # let's execute the command and assign the returned value to retval retval = command(*pargs, **kwargs) # call event after_execute self._call_event('after_execute', command, pargs, kwargs) self._self_event('after_execute', command, *pargs, **kwargs) if self.auto_exit: # if we have auto_exit is True if retval is None: self._self_event( 'before_exit_ok', command, retval=EXIT_OK, *pargs, **kwargs) # if retval is None we'll assume it's EXIT_OK self.exit(EXIT_OK) elif isinstance(retval, basestring): self._self_event('before_exit_ok', command, retval=retval, *pargs, **kwargs) # else if retval is a string we will exit with the message and # ERRORCODE is equal to 0 self.exit(EXIT_OK, retval) elif isinstance(retval, int): if retval == EXIT_OK: self._self_event('before_exit_ok', command, retval=retval, *pargs, **kwargs) else: self._self_event('before_exit_error', command, retval=retval, *pargs, **kwargs) # else if retval is an integer we'll exits with it as ERRORCODE self.exit(retval) elif isinstance(retval, (tuple, list,)): self._self_event('before_exit_error', command, retval=retval, *pargs, **kwargs) # if retval is a tuple or a list we'll exist with ERRORCODE and # message self.exit(retval[0], retval[1]) self._self_event('before_exit', command, retval=retval, *pargs, **kwargs) self.exit() else: # else if auto_exit is not True # we'll simply return retval return retval
[ "def", "_execute", "(", "self", ",", "func", ",", "command", ",", "*", "*", "new_attributes", ")", ":", "if", "self", ".", "_cfg_factory", ":", "# if we have a cfg_factory", "try", ":", "# we try to load a config with the factory", "if", "self", ".", "cfg_file", ":", "self", ".", "cfg", "=", "self", ".", "_cfg_factory", "(", "self", ".", "cfg_file", ")", "except", "Exception", "as", "error", ":", "# raise se exception", "self", ".", "_cfg_error", "(", "error", ")", "# let's get command(function) argspec", "arg_specs", "=", "inspect", ".", "getargspec", "(", "command", ")", "if", "arg_specs", ".", "defaults", ":", "# if we have defaults", "# count defaults arguments", "count", "=", "len", "(", "arg_specs", ".", "defaults", ")", "# get arguments names", "args_names", "=", "arg_specs", ".", "args", "[", ":", "count", "]", "# get keyword arguments names", "kwargs_name", "=", "arg_specs", ".", "args", "[", "count", ":", "]", "else", ":", "# else all names are the args only", "args_names", "=", "arg_specs", ".", "args", "# and keyword arguments is empty", "kwargs_name", "=", "[", "]", "pargs", "=", "[", "]", "kwargs", "=", "{", "}", "# for every argument in argument names", "for", "name", "in", "args_names", ":", "if", "name", "==", "'args'", ":", "# if argument name is *special name* **args**", "# we append a reference to self.args", "pargs", ".", "append", "(", "self", ".", "args", ")", "elif", "name", "==", "'self'", ":", "# else if argment name is *special name* **self**", "if", "ArgParseInated", "in", "inspect", ".", "getmro", "(", "func", ".", "__cls__", ")", ":", "# if the class that holds the function is subclass of", "# ArgParseInated we'll instantiate it, passing some", "# parameter", "pargs", ".", "append", "(", "func", ".", "__cls__", "(", "self", ",", "*", "*", "new_attributes", ")", ")", "else", ":", "# else we'll instatiate the class without parameters", "pargs", ".", "append", "(", "func", ".", "__cls__", "(", ")", ")", "else", ":", "# else we'll append the argument getting it from the self.args", "pargs", ".", "append", "(", "getattr", "(", "self", ".", "args", ",", "name", ")", ")", "# for every argument in keyword arguments", "for", "name", "in", "kwargs_name", ":", "if", "name", "==", "'args'", ":", "# if argument name is *special name* **args**", "# we set for the arg a reference to self.args", "kwargs", "[", "name", "]", "=", "self", ".", "args", "elif", "name", "in", "self", ".", "args", ":", "# else if name is in self.args we'll set the relative value.", "kwargs", "[", "name", "]", "=", "getattr", "(", "self", ".", "args", ",", "name", ")", "# set the **global** write function", "setattr", "(", "__builtin__", ",", "self", ".", "_write_name", ",", "self", ".", "write", ")", "# set the **global** write line function", "setattr", "(", "__builtin__", ",", "self", ".", "_write_line_name", ",", "self", ".", "writeln", ")", "# let's setup something.", "for", "setup_func", "in", "self", ".", "setup", ":", "setup_func", "(", "self", ")", "# call event before_execute", "self", ".", "_self_event", "(", "'before_execute'", ",", "command", ",", "*", "pargs", ",", "*", "*", "kwargs", ")", "# if events returns a non None value we use it as retrval.", "retval", ",", "pargs", ",", "kwargs", "=", "self", ".", "_call_event", "(", "'before_execute'", ",", "command", ",", "pargs", ",", "kwargs", ")", "# if before_execute event returns None go on with command", "if", "retval", "is", "None", ":", "# let's execute the command and assign the returned value to retval", "retval", "=", "command", "(", "*", "pargs", ",", "*", "*", "kwargs", ")", "# call event after_execute", "self", ".", "_call_event", "(", "'after_execute'", ",", "command", ",", "pargs", ",", "kwargs", ")", "self", ".", "_self_event", "(", "'after_execute'", ",", "command", ",", "*", "pargs", ",", "*", "*", "kwargs", ")", "if", "self", ".", "auto_exit", ":", "# if we have auto_exit is True", "if", "retval", "is", "None", ":", "self", ".", "_self_event", "(", "'before_exit_ok'", ",", "command", ",", "retval", "=", "EXIT_OK", ",", "*", "pargs", ",", "*", "*", "kwargs", ")", "# if retval is None we'll assume it's EXIT_OK", "self", ".", "exit", "(", "EXIT_OK", ")", "elif", "isinstance", "(", "retval", ",", "basestring", ")", ":", "self", ".", "_self_event", "(", "'before_exit_ok'", ",", "command", ",", "retval", "=", "retval", ",", "*", "pargs", ",", "*", "*", "kwargs", ")", "# else if retval is a string we will exit with the message and", "# ERRORCODE is equal to 0", "self", ".", "exit", "(", "EXIT_OK", ",", "retval", ")", "elif", "isinstance", "(", "retval", ",", "int", ")", ":", "if", "retval", "==", "EXIT_OK", ":", "self", ".", "_self_event", "(", "'before_exit_ok'", ",", "command", ",", "retval", "=", "retval", ",", "*", "pargs", ",", "*", "*", "kwargs", ")", "else", ":", "self", ".", "_self_event", "(", "'before_exit_error'", ",", "command", ",", "retval", "=", "retval", ",", "*", "pargs", ",", "*", "*", "kwargs", ")", "# else if retval is an integer we'll exits with it as ERRORCODE", "self", ".", "exit", "(", "retval", ")", "elif", "isinstance", "(", "retval", ",", "(", "tuple", ",", "list", ",", ")", ")", ":", "self", ".", "_self_event", "(", "'before_exit_error'", ",", "command", ",", "retval", "=", "retval", ",", "*", "pargs", ",", "*", "*", "kwargs", ")", "# if retval is a tuple or a list we'll exist with ERRORCODE and", "# message", "self", ".", "exit", "(", "retval", "[", "0", "]", ",", "retval", "[", "1", "]", ")", "self", ".", "_self_event", "(", "'before_exit'", ",", "command", ",", "retval", "=", "retval", ",", "*", "pargs", ",", "*", "*", "kwargs", ")", "self", ".", "exit", "(", ")", "else", ":", "# else if auto_exit is not True", "# we'll simply return retval", "return", "retval" ]
46.485981
15.682243
def getopt(args, shortopts, longopts = []): """getopt(args, options[, long_options]) -> opts, args Parses command line options and parameter list. args is the argument list to be parsed, without the leading reference to the running program. Typically, this means "sys.argv[1:]". shortopts is the string of option letters that the script wants to recognize, with options that require an argument followed by a colon (i.e., the same format that Unix getopt() uses). If specified, longopts is a list of strings with the names of the long options which should be supported. The leading '--' characters should not be included in the option name. Options which require an argument should be followed by an equal sign ('='). The return value consists of two elements: the first is a list of (option, value) pairs; the second is the list of program arguments left after the option list was stripped (this is a trailing slice of the first argument). Each option-and-value pair returned has the option as its first element, prefixed with a hyphen (e.g., '-x'), and the option argument as its second element, or an empty string if the option has no argument. The options occur in the list in the same order in which they were found, thus allowing multiple occurrences. Long and short options may be mixed. """ opts = [] if type(longopts) == type(""): longopts = [longopts] else: longopts = list(longopts) while args and args[0].startswith('-') and args[0] != '-': if args[0] == '--': args = args[1:] break if args[0].startswith('--'): opts, args = do_longs(opts, args[0][2:], longopts, args[1:]) else: opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:]) return opts, args
[ "def", "getopt", "(", "args", ",", "shortopts", ",", "longopts", "=", "[", "]", ")", ":", "opts", "=", "[", "]", "if", "type", "(", "longopts", ")", "==", "type", "(", "\"\"", ")", ":", "longopts", "=", "[", "longopts", "]", "else", ":", "longopts", "=", "list", "(", "longopts", ")", "while", "args", "and", "args", "[", "0", "]", ".", "startswith", "(", "'-'", ")", "and", "args", "[", "0", "]", "!=", "'-'", ":", "if", "args", "[", "0", "]", "==", "'--'", ":", "args", "=", "args", "[", "1", ":", "]", "break", "if", "args", "[", "0", "]", ".", "startswith", "(", "'--'", ")", ":", "opts", ",", "args", "=", "do_longs", "(", "opts", ",", "args", "[", "0", "]", "[", "2", ":", "]", ",", "longopts", ",", "args", "[", "1", ":", "]", ")", "else", ":", "opts", ",", "args", "=", "do_shorts", "(", "opts", ",", "args", "[", "0", "]", "[", "1", ":", "]", ",", "shortopts", ",", "args", "[", "1", ":", "]", ")", "return", "opts", ",", "args" ]
43.809524
23.738095
def set_chassis_location(location, host=None, admin_username=None, admin_password=None): ''' Set the location of the chassis. location The name of the location to be set on the chassis. host The chassis host. admin_username The username used to access the chassis. admin_password The password used to access the chassis. CLI Example: .. code-block:: bash salt '*' dracr.set_chassis_location location-name host=111.222.333.444 admin_username=root admin_password=secret ''' return __execute_cmd('setsysinfo -c chassislocation {0}'.format(location), host=host, admin_username=admin_username, admin_password=admin_password)
[ "def", "set_chassis_location", "(", "location", ",", "host", "=", "None", ",", "admin_username", "=", "None", ",", "admin_password", "=", "None", ")", ":", "return", "__execute_cmd", "(", "'setsysinfo -c chassislocation {0}'", ".", "format", "(", "location", ")", ",", "host", "=", "host", ",", "admin_username", "=", "admin_username", ",", "admin_password", "=", "admin_password", ")" ]
27.166667
24.5
def read_record(fp, first_line=None): """ Read a record from a file of AMOS messages On success returns a Message object On end of file raises EOFError """ if first_line is None: first_line = fp.readline() if not first_line: raise EOFError() match = _START.match(first_line) if not match: raise Exception('Bad start of message', first_line) type = match.group(1) message = Message(type) while True: row = fp.readline() match = _MULTILINE_FIELD.match(row) if match: key = match.group(1) val = "" while row: pos = fp.tell() row = fp.readline() if row[0] in '.': break elif row[0] in '{}': fp.seek(pos) # put the line back break val += row message.contents.append((key, val, True)) continue match = _FIELD.match(row) if match: key, val = match.group(1), match.group(2) message.contents.append((key, val, False)) continue match = _START.match(row) if match: message.append(read_record(fp, row)) continue if row[0] == '}': break raise Exception('Bad line', row) return message
[ "def", "read_record", "(", "fp", ",", "first_line", "=", "None", ")", ":", "if", "first_line", "is", "None", ":", "first_line", "=", "fp", ".", "readline", "(", ")", "if", "not", "first_line", ":", "raise", "EOFError", "(", ")", "match", "=", "_START", ".", "match", "(", "first_line", ")", "if", "not", "match", ":", "raise", "Exception", "(", "'Bad start of message'", ",", "first_line", ")", "type", "=", "match", ".", "group", "(", "1", ")", "message", "=", "Message", "(", "type", ")", "while", "True", ":", "row", "=", "fp", ".", "readline", "(", ")", "match", "=", "_MULTILINE_FIELD", ".", "match", "(", "row", ")", "if", "match", ":", "key", "=", "match", ".", "group", "(", "1", ")", "val", "=", "\"\"", "while", "row", ":", "pos", "=", "fp", ".", "tell", "(", ")", "row", "=", "fp", ".", "readline", "(", ")", "if", "row", "[", "0", "]", "in", "'.'", ":", "break", "elif", "row", "[", "0", "]", "in", "'{}'", ":", "fp", ".", "seek", "(", "pos", ")", "# put the line back", "break", "val", "+=", "row", "message", ".", "contents", ".", "append", "(", "(", "key", ",", "val", ",", "True", ")", ")", "continue", "match", "=", "_FIELD", ".", "match", "(", "row", ")", "if", "match", ":", "key", ",", "val", "=", "match", ".", "group", "(", "1", ")", ",", "match", ".", "group", "(", "2", ")", "message", ".", "contents", ".", "append", "(", "(", "key", ",", "val", ",", "False", ")", ")", "continue", "match", "=", "_START", ".", "match", "(", "row", ")", "if", "match", ":", "message", ".", "append", "(", "read_record", "(", "fp", ",", "row", ")", ")", "continue", "if", "row", "[", "0", "]", "==", "'}'", ":", "break", "raise", "Exception", "(", "'Bad line'", ",", "row", ")", "return", "message" ]
23.649123
18.315789
def _radixPass(a, b, r, n, K): """ Stable sort of the sequence a according to the keys given in r. >>> a=range(5) >>> b=[0]*5 >>> r=[2,1,3,0,4] >>> _radixPass(a, b, r, 5, 5) >>> b [3, 1, 0, 2, 4] When n is less than the length of a, the end of b must be left unaltered. >>> b=[5]*5 >>> _radixPass(a, b, r, 2, 2) >>> b [1, 0, 5, 5, 5] >>> _a=a=[1, 0] >>> b= [0]*2 >>> r=[0, 1] >>> _radixPass(a, b, r, 2, 2) >>> a=_a >>> b [0, 1] >>> a=[1, 1] >>> _radixPass(a, b, r, 2, 2) >>> b [1, 1] >>> a=[0, 1, 1, 0] >>> b= [0]*4 >>> r=[0, 1] >>> _radixPass(a, b, r, 4, 2) >>> a=_a >>> b [0, 0, 1, 1] """ c = _array("i", [0] * (K + 1)) # counter array for i in range(n): # count occurrences c[r[a[i]]] += 1 sum = 0 for i in range(K + 1): # exclusive prefix sums t = c[i] c[i] = sum sum += t for a_i in a[:n]: # sort b[c[r[a_i]]] = a_i c[r[a_i]] += 1
[ "def", "_radixPass", "(", "a", ",", "b", ",", "r", ",", "n", ",", "K", ")", ":", "c", "=", "_array", "(", "\"i\"", ",", "[", "0", "]", "*", "(", "K", "+", "1", ")", ")", "# counter array", "for", "i", "in", "range", "(", "n", ")", ":", "# count occurrences", "c", "[", "r", "[", "a", "[", "i", "]", "]", "]", "+=", "1", "sum", "=", "0", "for", "i", "in", "range", "(", "K", "+", "1", ")", ":", "# exclusive prefix sums", "t", "=", "c", "[", "i", "]", "c", "[", "i", "]", "=", "sum", "sum", "+=", "t", "for", "a_i", "in", "a", "[", ":", "n", "]", ":", "# sort", "b", "[", "c", "[", "r", "[", "a_i", "]", "]", "]", "=", "a_i", "c", "[", "r", "[", "a_i", "]", "]", "+=", "1" ]
18.37037
23.703704
def export_node(bpmn_graph, export_elements, node, nodes_classification, order=0, prefix="", condition="", who="", add_join=False): """ General method for node exporting :param bpmn_graph: an instance of BpmnDiagramGraph class, :param export_elements: a dictionary object. The key is a node ID, value is a dictionary of parameters that will be used in exported CSV document, :param node: networkx.Node object, :param nodes_classification: dictionary of classification labels. Key - node id. Value - a list of labels, :param order: the order param of exported node, :param prefix: the prefix of exported node - if the task appears after some gateway, the prefix will identify the branch :param condition: the condition param of exported node, :param who: the condition param of exported node, :param add_join: boolean flag. Used to indicate if "Join" element should be added to CSV. :return: None or the next node object if the exported node was a gateway join. """ node_type = node[1][consts.Consts.type] if node_type == consts.Consts.start_event: return BpmnDiagramGraphCsvExport.export_start_event(bpmn_graph, export_elements, node, nodes_classification, order=order, prefix=prefix, condition=condition, who=who) elif node_type == consts.Consts.end_event: return BpmnDiagramGraphCsvExport.export_end_event(export_elements, node, order=order, prefix=prefix, condition=condition, who=who) else: return BpmnDiagramGraphCsvExport.export_element(bpmn_graph, export_elements, node, nodes_classification, order=order, prefix=prefix, condition=condition, who=who, add_join=add_join)
[ "def", "export_node", "(", "bpmn_graph", ",", "export_elements", ",", "node", ",", "nodes_classification", ",", "order", "=", "0", ",", "prefix", "=", "\"\"", ",", "condition", "=", "\"\"", ",", "who", "=", "\"\"", ",", "add_join", "=", "False", ")", ":", "node_type", "=", "node", "[", "1", "]", "[", "consts", ".", "Consts", ".", "type", "]", "if", "node_type", "==", "consts", ".", "Consts", ".", "start_event", ":", "return", "BpmnDiagramGraphCsvExport", ".", "export_start_event", "(", "bpmn_graph", ",", "export_elements", ",", "node", ",", "nodes_classification", ",", "order", "=", "order", ",", "prefix", "=", "prefix", ",", "condition", "=", "condition", ",", "who", "=", "who", ")", "elif", "node_type", "==", "consts", ".", "Consts", ".", "end_event", ":", "return", "BpmnDiagramGraphCsvExport", ".", "export_end_event", "(", "export_elements", ",", "node", ",", "order", "=", "order", ",", "prefix", "=", "prefix", ",", "condition", "=", "condition", ",", "who", "=", "who", ")", "else", ":", "return", "BpmnDiagramGraphCsvExport", ".", "export_element", "(", "bpmn_graph", ",", "export_elements", ",", "node", ",", "nodes_classification", ",", "order", "=", "order", ",", "prefix", "=", "prefix", ",", "condition", "=", "condition", ",", "who", "=", "who", ",", "add_join", "=", "add_join", ")" ]
69.333333
37
def react(reactor, main, argv): """ Call C{main} and run the reactor until the L{Deferred} it returns fires. @param reactor: An unstarted L{IReactorCore} provider which will be run and later stopped. @param main: A callable which returns a L{Deferred}. It should take as many arguments as there are elements in the list C{argv}. @param argv: A list of arguments to pass to C{main}. @return: C{None} """ stopping = [] reactor.addSystemEventTrigger('before', 'shutdown', stopping.append, True) finished = main(reactor, *argv) finished.addErrback(err, "main function encountered error") def cbFinish(ignored): if not stopping: reactor.callWhenRunning(reactor.stop) finished.addCallback(cbFinish) reactor.run()
[ "def", "react", "(", "reactor", ",", "main", ",", "argv", ")", ":", "stopping", "=", "[", "]", "reactor", ".", "addSystemEventTrigger", "(", "'before'", ",", "'shutdown'", ",", "stopping", ".", "append", ",", "True", ")", "finished", "=", "main", "(", "reactor", ",", "*", "argv", ")", "finished", ".", "addErrback", "(", "err", ",", "\"main function encountered error\"", ")", "def", "cbFinish", "(", "ignored", ")", ":", "if", "not", "stopping", ":", "reactor", ".", "callWhenRunning", "(", "reactor", ".", "stop", ")", "finished", ".", "addCallback", "(", "cbFinish", ")", "reactor", ".", "run", "(", ")" ]
33.956522
22.391304
def format_help(self, description): """ Format the setting's description into HTML. """ for bold in ("``", "*"): parts = [] if description is None: description = "" for i, s in enumerate(description.split(bold)): parts.append(s if i % 2 == 0 else "<b>%s</b>" % s) description = "".join(parts) description = urlize(description, autoescape=False) return mark_safe(description.replace("\n", "<br>"))
[ "def", "format_help", "(", "self", ",", "description", ")", ":", "for", "bold", "in", "(", "\"``\"", ",", "\"*\"", ")", ":", "parts", "=", "[", "]", "if", "description", "is", "None", ":", "description", "=", "\"\"", "for", "i", ",", "s", "in", "enumerate", "(", "description", ".", "split", "(", "bold", ")", ")", ":", "parts", ".", "append", "(", "s", "if", "i", "%", "2", "==", "0", "else", "\"<b>%s</b>\"", "%", "s", ")", "description", "=", "\"\"", ".", "join", "(", "parts", ")", "description", "=", "urlize", "(", "description", ",", "autoescape", "=", "False", ")", "return", "mark_safe", "(", "description", ".", "replace", "(", "\"\\n\"", ",", "\"<br>\"", ")", ")" ]
39.384615
10.615385
def add_tour_step(self, message, selector=None, name=None, title=None, theme=None, alignment=None, duration=None): """ Allows the user to add tour steps for a website. @Params message - The message to display. selector - The CSS Selector of the Element to attach to. name - If creating multiple tours at the same time, use this to select the tour you wish to add steps to. title - Additional header text that appears above the message. theme - (NON-Bootstrap Tours ONLY) The styling of the tour step. Choose from "light"/"arrows", "dark", "default", "square", and "square-dark". ("arrows" is used if None is selected.) alignment - Choose from "top", "bottom", "left", and "right". ("top" is the default alignment). duration - (Bootstrap Tours ONLY) The amount of time, in seconds, before automatically advancing to the next tour step. """ if not selector: selector = "html" if page_utils.is_xpath_selector(selector): selector = self.convert_to_css_selector(selector, By.XPATH) selector = self.__escape_quotes_if_needed(selector) if not name: name = "default" if name not in self._tour_steps: # By default, will create an IntroJS tour if no tours exist self.create_tour(name=name, theme="introjs") if not title: title = "" title = self.__escape_quotes_if_needed(title) if message: message = self.__escape_quotes_if_needed(message) else: message = "" if not alignment or ( alignment not in ["top", "bottom", "left", "right"]): if "Hopscotch" not in self._tour_steps[name][0]: alignment = "top" else: alignment = "bottom" if "Bootstrap" in self._tour_steps[name][0]: self.__add_bootstrap_tour_step( message, selector=selector, name=name, title=title, alignment=alignment, duration=duration) elif "Hopscotch" in self._tour_steps[name][0]: self.__add_hopscotch_tour_step( message, selector=selector, name=name, title=title, alignment=alignment) elif "IntroJS" in self._tour_steps[name][0]: self.__add_introjs_tour_step( message, selector=selector, name=name, title=title, alignment=alignment) else: self.__add_shepherd_tour_step( message, selector=selector, name=name, title=title, theme=theme, alignment=alignment)
[ "def", "add_tour_step", "(", "self", ",", "message", ",", "selector", "=", "None", ",", "name", "=", "None", ",", "title", "=", "None", ",", "theme", "=", "None", ",", "alignment", "=", "None", ",", "duration", "=", "None", ")", ":", "if", "not", "selector", ":", "selector", "=", "\"html\"", "if", "page_utils", ".", "is_xpath_selector", "(", "selector", ")", ":", "selector", "=", "self", ".", "convert_to_css_selector", "(", "selector", ",", "By", ".", "XPATH", ")", "selector", "=", "self", ".", "__escape_quotes_if_needed", "(", "selector", ")", "if", "not", "name", ":", "name", "=", "\"default\"", "if", "name", "not", "in", "self", ".", "_tour_steps", ":", "# By default, will create an IntroJS tour if no tours exist", "self", ".", "create_tour", "(", "name", "=", "name", ",", "theme", "=", "\"introjs\"", ")", "if", "not", "title", ":", "title", "=", "\"\"", "title", "=", "self", ".", "__escape_quotes_if_needed", "(", "title", ")", "if", "message", ":", "message", "=", "self", ".", "__escape_quotes_if_needed", "(", "message", ")", "else", ":", "message", "=", "\"\"", "if", "not", "alignment", "or", "(", "alignment", "not", "in", "[", "\"top\"", ",", "\"bottom\"", ",", "\"left\"", ",", "\"right\"", "]", ")", ":", "if", "\"Hopscotch\"", "not", "in", "self", ".", "_tour_steps", "[", "name", "]", "[", "0", "]", ":", "alignment", "=", "\"top\"", "else", ":", "alignment", "=", "\"bottom\"", "if", "\"Bootstrap\"", "in", "self", ".", "_tour_steps", "[", "name", "]", "[", "0", "]", ":", "self", ".", "__add_bootstrap_tour_step", "(", "message", ",", "selector", "=", "selector", ",", "name", "=", "name", ",", "title", "=", "title", ",", "alignment", "=", "alignment", ",", "duration", "=", "duration", ")", "elif", "\"Hopscotch\"", "in", "self", ".", "_tour_steps", "[", "name", "]", "[", "0", "]", ":", "self", ".", "__add_hopscotch_tour_step", "(", "message", ",", "selector", "=", "selector", ",", "name", "=", "name", ",", "title", "=", "title", ",", "alignment", "=", "alignment", ")", "elif", "\"IntroJS\"", "in", "self", ".", "_tour_steps", "[", "name", "]", "[", "0", "]", ":", "self", ".", "__add_introjs_tour_step", "(", "message", ",", "selector", "=", "selector", ",", "name", "=", "name", ",", "title", "=", "title", ",", "alignment", "=", "alignment", ")", "else", ":", "self", ".", "__add_shepherd_tour_step", "(", "message", ",", "selector", "=", "selector", ",", "name", "=", "name", ",", "title", "=", "title", ",", "theme", "=", "theme", ",", "alignment", "=", "alignment", ")" ]
45.098361
20.360656