text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def make_quadratic(poly, strength, vartype=None, bqm=None): """Create a binary quadratic model from a higher order polynomial. Args: poly (dict): Polynomial as a dict of form {term: bias, ...}, where `term` is a tuple of variables and `bias` the associated bias. strength (float): Strength of the reduction constraint. Insufficient strength can result in the binary quadratic model not having the same minimizations as the polynomial. vartype (:class:`.Vartype`, optional): Vartype of the polynomial. If `bqm` is provided, vartype is not required. bqm (:class:`.BinaryQuadraticModel`, optional): The terms of the reduced polynomial are added to this binary quadratic model. If not provided, a new binary quadratic model is created. Returns: :class:`.BinaryQuadraticModel` Examples: >>> poly = {(0,): -1, (1,): 1, (2,): 1.5, (0, 1): -1, (0, 1, 2): -2} >>> bqm = dimod.make_quadratic(poly, 5.0, dimod.SPIN) """ if bqm is None: if vartype is None: raise ValueError("one of vartype and bqm must be provided") bqm = BinaryQuadraticModel.empty(vartype) else: if not isinstance(bqm, BinaryQuadraticModel): raise TypeError('create_using must be a BinaryQuadraticModel') if vartype is not None and vartype is not bqm.vartype: raise ValueError("one of vartype and create_using must be provided") bqm.info['reduction'] = {} new_poly = {} for term, bias in iteritems(poly): if len(term) == 0: bqm.add_offset(bias) elif len(term) == 1: v, = term bqm.add_variable(v, bias) else: new_poly[term] = bias return _reduce_degree(bqm, new_poly, vartype, strength)
[ "def", "make_quadratic", "(", "poly", ",", "strength", ",", "vartype", "=", "None", ",", "bqm", "=", "None", ")", ":", "if", "bqm", "is", "None", ":", "if", "vartype", "is", "None", ":", "raise", "ValueError", "(", "\"one of vartype and bqm must be provided\"", ")", "bqm", "=", "BinaryQuadraticModel", ".", "empty", "(", "vartype", ")", "else", ":", "if", "not", "isinstance", "(", "bqm", ",", "BinaryQuadraticModel", ")", ":", "raise", "TypeError", "(", "'create_using must be a BinaryQuadraticModel'", ")", "if", "vartype", "is", "not", "None", "and", "vartype", "is", "not", "bqm", ".", "vartype", ":", "raise", "ValueError", "(", "\"one of vartype and create_using must be provided\"", ")", "bqm", ".", "info", "[", "'reduction'", "]", "=", "{", "}", "new_poly", "=", "{", "}", "for", "term", ",", "bias", "in", "iteritems", "(", "poly", ")", ":", "if", "len", "(", "term", ")", "==", "0", ":", "bqm", ".", "add_offset", "(", "bias", ")", "elif", "len", "(", "term", ")", "==", "1", ":", "v", ",", "=", "term", "bqm", ".", "add_variable", "(", "v", ",", "bias", ")", "else", ":", "new_poly", "[", "term", "]", "=", "bias", "return", "_reduce_degree", "(", "bqm", ",", "new_poly", ",", "vartype", ",", "strength", ")" ]
35.823529
25.411765
def add_item(self, item, replace = False): """ Add an item to the roster. This will not automatically update the roster on the server. :Parameters: - `item`: the item to add - `replace`: if `True` then existing item will be replaced, otherwise a `ValueError` will be raised on conflict :Types: - `item`: `RosterItem` - `replace`: `bool` """ if item.jid in self._jids: if replace: self.remove_item(item.jid) else: raise ValueError("JID already in the roster") index = len(self._items) self._items.append(item) self._jids[item.jid] = index
[ "def", "add_item", "(", "self", ",", "item", ",", "replace", "=", "False", ")", ":", "if", "item", ".", "jid", "in", "self", ".", "_jids", ":", "if", "replace", ":", "self", ".", "remove_item", "(", "item", ".", "jid", ")", "else", ":", "raise", "ValueError", "(", "\"JID already in the roster\"", ")", "index", "=", "len", "(", "self", ".", "_items", ")", "self", ".", "_items", ".", "append", "(", "item", ")", "self", ".", "_jids", "[", "item", ".", "jid", "]", "=", "index" ]
32.545455
14.727273
def _reinit_daq_daemons(sender, instance, **kwargs): """ update the daq daemon configuration when changes be applied in the models """ if type(instance) is VISADevice: post_save.send_robust(sender=Device, instance=instance.visa_device) elif type(instance) is VISAVariable: post_save.send_robust(sender=Variable, instance=instance.visa_variable) elif type(instance) is VISADeviceHandler: # todo pass elif type(instance) is ExtendedVISAVariable: post_save.send_robust(sender=Variable, instance=Variable.objects.get(pk=instance.pk)) elif type(instance) is ExtendedVISADevice: post_save.send_robust(sender=Device, instance=Device.objects.get(pk=instance.pk))
[ "def", "_reinit_daq_daemons", "(", "sender", ",", "instance", ",", "*", "*", "kwargs", ")", ":", "if", "type", "(", "instance", ")", "is", "VISADevice", ":", "post_save", ".", "send_robust", "(", "sender", "=", "Device", ",", "instance", "=", "instance", ".", "visa_device", ")", "elif", "type", "(", "instance", ")", "is", "VISAVariable", ":", "post_save", ".", "send_robust", "(", "sender", "=", "Variable", ",", "instance", "=", "instance", ".", "visa_variable", ")", "elif", "type", "(", "instance", ")", "is", "VISADeviceHandler", ":", "# todo", "pass", "elif", "type", "(", "instance", ")", "is", "ExtendedVISAVariable", ":", "post_save", ".", "send_robust", "(", "sender", "=", "Variable", ",", "instance", "=", "Variable", ".", "objects", ".", "get", "(", "pk", "=", "instance", ".", "pk", ")", ")", "elif", "type", "(", "instance", ")", "is", "ExtendedVISADevice", ":", "post_save", ".", "send_robust", "(", "sender", "=", "Device", ",", "instance", "=", "Device", ".", "objects", ".", "get", "(", "pk", "=", "instance", ".", "pk", ")", ")" ]
48
20.133333
def get_requester(self): """ Returns an object to make authenticated requests. See python `requests` for the API. """ # TODO(abrahms): Perhaps pull this out into a factory function for # generating an EdxApi instance with the proper requester & credentials. session = requests.session() session.headers.update({ 'Authorization': 'Bearer {}'.format(self.credentials['access_token']) }) old_request = session.request def patched_request(*args, **kwargs): """ adds timeout param to session.request """ return old_request(*args, timeout=self.timeout, **kwargs) session.request = patched_request return session
[ "def", "get_requester", "(", "self", ")", ":", "# TODO(abrahms): Perhaps pull this out into a factory function for", "# generating an EdxApi instance with the proper requester & credentials.", "session", "=", "requests", ".", "session", "(", ")", "session", ".", "headers", ".", "update", "(", "{", "'Authorization'", ":", "'Bearer {}'", ".", "format", "(", "self", ".", "credentials", "[", "'access_token'", "]", ")", "}", ")", "old_request", "=", "session", ".", "request", "def", "patched_request", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"\n adds timeout param to session.request\n \"\"\"", "return", "old_request", "(", "*", "args", ",", "timeout", "=", "self", ".", "timeout", ",", "*", "*", "kwargs", ")", "session", ".", "request", "=", "patched_request", "return", "session" ]
35.428571
19.52381
def filter(self, cls_or_collection, query, raw = False,only = None,include = None): """ Filter objects from the database that correspond to a given set of properties. See :py:meth:`blitzdb.backends.base.Backend.filter` for documentation of individual parameters .. note:: This function supports all query operators that are available in SQLAlchemy and returns a query set that is based on a SQLAlchemy cursor. """ if not isinstance(cls_or_collection, six.string_types): collection = self.get_collection_for_cls(cls_or_collection) cls = cls_or_collection else: collection = cls_or_collection cls = self.get_cls_for_collection(collection) table = self._collection_tables[collection] joins = defaultdict(dict) joins_list = [] group_bys = [] havings = [] def compile_query(collection,query,table = None,path = None): if path is None: path = [] """ This function emits a list of WHERE statements that can be used to retrieve """ if table is None: table = self._collection_tables[collection] where_statements = [] if any([True if key.startswith('$') else False for key in query.keys()]): #this is a special operator query if len(query) > 1: raise AttributeError('Currently not supported!') operator = list(query.keys())[0][1:] if not operator in ('and','or','not'): raise AttributeError("Non-supported logical operator: $%s" % operator) if operator in ('and','or'): where_statements = [sq for expr in query['$%s' % operator] for sq in compile_query(collection,expr,path = path)] if operator == 'and': return [and_(*where_statements)] else: return [or_(*where_statements)] elif operator == 'not': return [not_(*compile_query(collection,query['$not'],table = table,path = path))] def compile_one_to_many_query(key,query,field_name,related_table,count_column,path): def prepare_subquery(tail,query_dict): d = {} if not tail: if isinstance(query_dict,dict): return query_dict.copy() if not isinstance(query_dict,Document): raise AttributeError("Must be a document!") if not query_dict.pk: raise AttributeError("Performing a query without a primary key!") return {'pk' : query_dict.pk} return {tail : query_dict} tail = key[len(field_name)+1:] if isinstance(query,Document) and not tail: query = {'pk' : query.pk} #to do: implement $size and $not: {$size} operators... if isinstance(query,dict) and len(query) == 1 and list(query.keys())[0] in ('$all','$in','$elemMatch','$nin'): #this is an $in/$all/$nin query query_type = list(query.keys())[0][1:] subquery = list(query.values())[0] if query_type == 'elemMatch': queries = compile_query(params['collection'], prepare_subquery(tail,query['$elemMatch']), table = related_table, path = path) return queries else: if isinstance(subquery,(ManyToManyProxy,QuerySet)): if tail: #this query has a tail query = {tail : query} queries = compile_query(params['collection'],query, table = related_table, path = path) return queries #this is a query with a ManyToManyProxy/QuerySet if isinstance(subquery,ManyToManyProxy): qs = subquery.get_queryset() else: qs = subquery if not query_type in ('in','nin','all'): raise AttributeError if query_type == 'all': op = 'in' else: op = query_type if query_type == 'all': cnt = func.count(count_column) condition = cnt == qs.get_count_select() havings.append(condition) return [getattr(related_table.c['pk'],op+'_')(qs.get_select(columns = ['pk']))] elif isinstance(subquery,(list,tuple)): if subquery and isinstance(subquery[0],dict) and len(subquery[0]) == 1 and \ list(subquery[0].keys())[0] == '$elemMatch': queries = [sq for v in subquery for sq in compile_query(params['collection'], prepare_subquery(tail,v['$elemMatch']), table = related_table, path = path)] else: queries = [sq for v in subquery for sq in compile_query(params['collection'], prepare_subquery(tail,v), table = related_table, path = path)] where_statement = or_(*queries) if query_type == 'nin': where_statement = not_(where_statement) if query_type == 'all' and len(queries) > 1: cnt = func.count(count_column) havings.append(cnt == len(queries)) return [where_statement] else: raise AttributeError("$in/$nin/$all query requires a list/tuple/QuerySet/ManyToManyProxy") else: return compile_query(params['collection'],prepare_subquery(tail,query), table = related_table, path = path) def compile_many_to_many_query(key,value,field_name,params,relationship_table,path): related_collection = params['collection'] related_table = self._collection_tables[related_collection] path_str = ".".join(path) if path_str in joins[relationship_table]: relationship_table_alias = joins[relationship_table][path_str] else: relationship_table_alias = relationship_table.alias() joins[relationship_table][path_str] = relationship_table_alias joins_list.append((relationship_table_alias, relationship_table_alias.c[params['pk_field_name']] == table.c['pk'])) if path_str in joins[related_table]: related_table_alias = joins[related_table][path_str] else: related_table_alias = related_table.alias() joins[related_table][path_str] = related_table_alias joins_list.append((related_table_alias,relationship_table_alias.c[params['related_pk_field_name']] == related_table_alias.c['pk'])) return compile_one_to_many_query(key,value,field_name,related_table_alias,relationship_table_alias.c[params['pk_field_name']],new_path) def prepare_special_query(field_name,params,query): def sanitize(value): if isinstance(value,(list,tuple)): return [v.pk if isinstance(v,Document) else v for v in value] return value column_name = params['column'] if '$not' in query: return [not_(*prepare_special_query(column_name,params,sanitize(query['$not'])))] elif '$in' in query: if not query['$in']: #we return an impossible condition since the $in query does not contain any values return [expression.cast(True,Boolean) == expression.cast(False,Boolean)] return [table.c[column_name].in_(sanitize(query['$in']))] elif '$nin' in query: if not query['$nin']: return [expression.cast(True,Boolean) == expression.cast(False,Boolean)] return [~table.c[column_name].in_(sanitize(query['$nin']))] elif '$eq' in query: return [table.c[column_name] == sanitize(query['$eq'])] elif '$ne' in query: return [table.c[column_name] != sanitize(query['$ne'])] elif '$gt' in query: return [table.c[column_name] > sanitize(query['$gt'])] elif '$gte' in query: return [table.c[column_name] >= sanitize(query['$gte'])] elif '$lt' in query: return [table.c[column_name] < sanitize(query['$lt'])] elif '$lte' in query: return [table.c[column_name] <= sanitize(query['$lte'])] elif '$exists' in query: if query['$exists']: return [table.c[column_name] != None] else: return [table.c[column_name] == None] elif '$like' in query: return [table.c[column_name].like(expression.cast(query['$like'],String))] elif '$ilike' in query: return [table.c[column_name].ilike(expression.cast(query['$ilike'],String))] elif '$regex' in query: if not self.engine.url.drivername in ('postgres','mysql','sqlite'): raise AttributeError("Regex queries not supported with %s engine!" % self.engine.url.drivername) return [table.c[column_name].op('REGEXP')(expression.cast(query['$regex'],String))] else: raise AttributeError("Invalid query!") #this is a normal, field-base query for key,value in query.items(): for field_name,params in self._index_fields[collection].items(): if key == field_name: if isinstance(value,re._pattern_type): value = {'$regex' : value.pattern} if isinstance(value,dict): #this is a special query where_statements.extend(prepare_special_query(field_name,params,value)) else: #this is a normal value query where_statements.append(table.c[params['column']] == expression.cast(value,params['type'])) break else: #we check the normal relationships for field_name,params in self._related_fields[collection].items(): if key.startswith(field_name): head,tail = key[:len(field_name)],key[len(field_name)+1:] new_path = path + [head] path_str = ".".join(new_path) #ManyToManyField if isinstance(params['field'],ManyToManyField): relationship_table = self._relationship_tables[collection][field_name] where_statements.extend(compile_many_to_many_query(key,value,field_name,params,relationship_table,path = new_path)) elif isinstance(params['field'],ForeignKeyField):#this is a normal ForeignKey relation if key == field_name: #this is a ForeignKey query if isinstance(value,dict): if len(value) == 1: key,query = list(value.items())[0] if key == '$exists': if not isinstance(query,bool): raise AttributeError("$exists operator requires a Boolean operator") if query: where_statements.append(table.c[params['column']] != None) else: where_statements.append(table.c[params['column']] == None) break elif not key in ('$in','$nin'): raise AttributeError("Invalid query!") query_type = key[1:] else: raise AttributeError("Invalid query!") else: query_type = 'exact' query = value if isinstance(query,(QuerySet,ManyToManyProxy)): if not query_type in ('in','nin'): raise AttributeError("QuerySet/ManyToManyProxy objects must be used in conjunction with $in/$nin when querying a ForeignKey relationship") if isinstance(query,ManyToManyProxy): qs = query.get_queryset() else: qs = query if qs.count is not None and qs.count == 0: raise AttributeError("$in/$nin query with empty QuerySet/ManyToManyProxy!") if qs.cls is not params['class']: raise AttributeError("Invalid QuerySet class!") condition = getattr(table.c[params['column']],query_type+'_')(qs.get_select(columns = ['pk'])) where_statements.append(condition) elif isinstance(query,(list,tuple)): if not query_type in ('in','nin'): raise AttributeError("Lists/tuples must be used in conjunction with $in/$nin when querying a ForeignKey relationship") if not query: raise AttributeError("in/nin query with empty list!") if query[0].__class__ is params['class']: if any((element.__class__ is not params['class'] for element in query)): raise AttributeError("Invalid document type in ForeignKey query") where_statements.append(getattr(table.c[params['column']],query_type+'_')([expression.cast(doc.pk,params['type']) for doc in query])) else: where_statements.append(getattr(table.c[params['column']],query_type+'_')([expression.cast(element,params['type']) for element in query])) elif isinstance(query,Document): #we need an exact clas match here... if query.__class__ is not params['class']: raise AttributeError("Invalid Document class!") where_statements.append(table.c[params['column']] == query.pk) else: where_statements.append(table.c[params['column']] == expression.cast(query,params['class'].Meta.PkType)) else: #we query a sub-field of the relation related_table = self._collection_tables[params['collection']] if path_str in joins[related_table]: related_table_alias = joins[related_table][path_str] else: related_table_alias = related_table.alias() joins[related_table][path_str] = related_table_alias joins_list.append((related_table_alias,table.c[params['column']] == related_table_alias.c['pk'])) where_statements.extend(compile_query(params['collection'],{tail : value},table = related_table_alias,path = new_path)) elif isinstance(params['field'],OneToManyField): related_table = self._collection_tables[params['collection']] if path_str in joins[related_table]: related_table_alias = joins[related_table][path_str] else: related_table_alias = related_table.alias() joins[related_table][path_str] = related_table_alias joins_list.append((related_table_alias,related_table_alias.c[params['backref']['column']] == table.c['pk'])) where_statements.extend(compile_one_to_many_query(key,value,field_name,related_table_alias,table.c.pk,new_path)) break else: raise AttributeError("Query over non-indexed field %s in collection %s!" % (key,collection)) return where_statements compiled_query = compile_query(collection,query) if len(compiled_query) > 1: compiled_query = and_(*compiled_query) elif compiled_query: compiled_query = compiled_query[0] else: compiled_query = None return QuerySet(backend = self, table = table, joins = joins_list, cls = cls, condition = compiled_query, raw = raw, group_bys = group_bys, only = only, include = include, havings = havings )
[ "def", "filter", "(", "self", ",", "cls_or_collection", ",", "query", ",", "raw", "=", "False", ",", "only", "=", "None", ",", "include", "=", "None", ")", ":", "if", "not", "isinstance", "(", "cls_or_collection", ",", "six", ".", "string_types", ")", ":", "collection", "=", "self", ".", "get_collection_for_cls", "(", "cls_or_collection", ")", "cls", "=", "cls_or_collection", "else", ":", "collection", "=", "cls_or_collection", "cls", "=", "self", ".", "get_cls_for_collection", "(", "collection", ")", "table", "=", "self", ".", "_collection_tables", "[", "collection", "]", "joins", "=", "defaultdict", "(", "dict", ")", "joins_list", "=", "[", "]", "group_bys", "=", "[", "]", "havings", "=", "[", "]", "def", "compile_query", "(", "collection", ",", "query", ",", "table", "=", "None", ",", "path", "=", "None", ")", ":", "if", "path", "is", "None", ":", "path", "=", "[", "]", "\"\"\"\n This function emits a list of WHERE statements that can be used to retrieve\n \"\"\"", "if", "table", "is", "None", ":", "table", "=", "self", ".", "_collection_tables", "[", "collection", "]", "where_statements", "=", "[", "]", "if", "any", "(", "[", "True", "if", "key", ".", "startswith", "(", "'$'", ")", "else", "False", "for", "key", "in", "query", ".", "keys", "(", ")", "]", ")", ":", "#this is a special operator query", "if", "len", "(", "query", ")", ">", "1", ":", "raise", "AttributeError", "(", "'Currently not supported!'", ")", "operator", "=", "list", "(", "query", ".", "keys", "(", ")", ")", "[", "0", "]", "[", "1", ":", "]", "if", "not", "operator", "in", "(", "'and'", ",", "'or'", ",", "'not'", ")", ":", "raise", "AttributeError", "(", "\"Non-supported logical operator: $%s\"", "%", "operator", ")", "if", "operator", "in", "(", "'and'", ",", "'or'", ")", ":", "where_statements", "=", "[", "sq", "for", "expr", "in", "query", "[", "'$%s'", "%", "operator", "]", "for", "sq", "in", "compile_query", "(", "collection", ",", "expr", ",", "path", "=", "path", ")", "]", "if", "operator", "==", "'and'", ":", "return", "[", "and_", "(", "*", "where_statements", ")", "]", "else", ":", "return", "[", "or_", "(", "*", "where_statements", ")", "]", "elif", "operator", "==", "'not'", ":", "return", "[", "not_", "(", "*", "compile_query", "(", "collection", ",", "query", "[", "'$not'", "]", ",", "table", "=", "table", ",", "path", "=", "path", ")", ")", "]", "def", "compile_one_to_many_query", "(", "key", ",", "query", ",", "field_name", ",", "related_table", ",", "count_column", ",", "path", ")", ":", "def", "prepare_subquery", "(", "tail", ",", "query_dict", ")", ":", "d", "=", "{", "}", "if", "not", "tail", ":", "if", "isinstance", "(", "query_dict", ",", "dict", ")", ":", "return", "query_dict", ".", "copy", "(", ")", "if", "not", "isinstance", "(", "query_dict", ",", "Document", ")", ":", "raise", "AttributeError", "(", "\"Must be a document!\"", ")", "if", "not", "query_dict", ".", "pk", ":", "raise", "AttributeError", "(", "\"Performing a query without a primary key!\"", ")", "return", "{", "'pk'", ":", "query_dict", ".", "pk", "}", "return", "{", "tail", ":", "query_dict", "}", "tail", "=", "key", "[", "len", "(", "field_name", ")", "+", "1", ":", "]", "if", "isinstance", "(", "query", ",", "Document", ")", "and", "not", "tail", ":", "query", "=", "{", "'pk'", ":", "query", ".", "pk", "}", "#to do: implement $size and $not: {$size} operators...", "if", "isinstance", "(", "query", ",", "dict", ")", "and", "len", "(", "query", ")", "==", "1", "and", "list", "(", "query", ".", "keys", "(", ")", ")", "[", "0", "]", "in", "(", "'$all'", ",", "'$in'", ",", "'$elemMatch'", ",", "'$nin'", ")", ":", "#this is an $in/$all/$nin query", "query_type", "=", "list", "(", "query", ".", "keys", "(", ")", ")", "[", "0", "]", "[", "1", ":", "]", "subquery", "=", "list", "(", "query", ".", "values", "(", ")", ")", "[", "0", "]", "if", "query_type", "==", "'elemMatch'", ":", "queries", "=", "compile_query", "(", "params", "[", "'collection'", "]", ",", "prepare_subquery", "(", "tail", ",", "query", "[", "'$elemMatch'", "]", ")", ",", "table", "=", "related_table", ",", "path", "=", "path", ")", "return", "queries", "else", ":", "if", "isinstance", "(", "subquery", ",", "(", "ManyToManyProxy", ",", "QuerySet", ")", ")", ":", "if", "tail", ":", "#this query has a tail", "query", "=", "{", "tail", ":", "query", "}", "queries", "=", "compile_query", "(", "params", "[", "'collection'", "]", ",", "query", ",", "table", "=", "related_table", ",", "path", "=", "path", ")", "return", "queries", "#this is a query with a ManyToManyProxy/QuerySet", "if", "isinstance", "(", "subquery", ",", "ManyToManyProxy", ")", ":", "qs", "=", "subquery", ".", "get_queryset", "(", ")", "else", ":", "qs", "=", "subquery", "if", "not", "query_type", "in", "(", "'in'", ",", "'nin'", ",", "'all'", ")", ":", "raise", "AttributeError", "if", "query_type", "==", "'all'", ":", "op", "=", "'in'", "else", ":", "op", "=", "query_type", "if", "query_type", "==", "'all'", ":", "cnt", "=", "func", ".", "count", "(", "count_column", ")", "condition", "=", "cnt", "==", "qs", ".", "get_count_select", "(", ")", "havings", ".", "append", "(", "condition", ")", "return", "[", "getattr", "(", "related_table", ".", "c", "[", "'pk'", "]", ",", "op", "+", "'_'", ")", "(", "qs", ".", "get_select", "(", "columns", "=", "[", "'pk'", "]", ")", ")", "]", "elif", "isinstance", "(", "subquery", ",", "(", "list", ",", "tuple", ")", ")", ":", "if", "subquery", "and", "isinstance", "(", "subquery", "[", "0", "]", ",", "dict", ")", "and", "len", "(", "subquery", "[", "0", "]", ")", "==", "1", "and", "list", "(", "subquery", "[", "0", "]", ".", "keys", "(", ")", ")", "[", "0", "]", "==", "'$elemMatch'", ":", "queries", "=", "[", "sq", "for", "v", "in", "subquery", "for", "sq", "in", "compile_query", "(", "params", "[", "'collection'", "]", ",", "prepare_subquery", "(", "tail", ",", "v", "[", "'$elemMatch'", "]", ")", ",", "table", "=", "related_table", ",", "path", "=", "path", ")", "]", "else", ":", "queries", "=", "[", "sq", "for", "v", "in", "subquery", "for", "sq", "in", "compile_query", "(", "params", "[", "'collection'", "]", ",", "prepare_subquery", "(", "tail", ",", "v", ")", ",", "table", "=", "related_table", ",", "path", "=", "path", ")", "]", "where_statement", "=", "or_", "(", "*", "queries", ")", "if", "query_type", "==", "'nin'", ":", "where_statement", "=", "not_", "(", "where_statement", ")", "if", "query_type", "==", "'all'", "and", "len", "(", "queries", ")", ">", "1", ":", "cnt", "=", "func", ".", "count", "(", "count_column", ")", "havings", ".", "append", "(", "cnt", "==", "len", "(", "queries", ")", ")", "return", "[", "where_statement", "]", "else", ":", "raise", "AttributeError", "(", "\"$in/$nin/$all query requires a list/tuple/QuerySet/ManyToManyProxy\"", ")", "else", ":", "return", "compile_query", "(", "params", "[", "'collection'", "]", ",", "prepare_subquery", "(", "tail", ",", "query", ")", ",", "table", "=", "related_table", ",", "path", "=", "path", ")", "def", "compile_many_to_many_query", "(", "key", ",", "value", ",", "field_name", ",", "params", ",", "relationship_table", ",", "path", ")", ":", "related_collection", "=", "params", "[", "'collection'", "]", "related_table", "=", "self", ".", "_collection_tables", "[", "related_collection", "]", "path_str", "=", "\".\"", ".", "join", "(", "path", ")", "if", "path_str", "in", "joins", "[", "relationship_table", "]", ":", "relationship_table_alias", "=", "joins", "[", "relationship_table", "]", "[", "path_str", "]", "else", ":", "relationship_table_alias", "=", "relationship_table", ".", "alias", "(", ")", "joins", "[", "relationship_table", "]", "[", "path_str", "]", "=", "relationship_table_alias", "joins_list", ".", "append", "(", "(", "relationship_table_alias", ",", "relationship_table_alias", ".", "c", "[", "params", "[", "'pk_field_name'", "]", "]", "==", "table", ".", "c", "[", "'pk'", "]", ")", ")", "if", "path_str", "in", "joins", "[", "related_table", "]", ":", "related_table_alias", "=", "joins", "[", "related_table", "]", "[", "path_str", "]", "else", ":", "related_table_alias", "=", "related_table", ".", "alias", "(", ")", "joins", "[", "related_table", "]", "[", "path_str", "]", "=", "related_table_alias", "joins_list", ".", "append", "(", "(", "related_table_alias", ",", "relationship_table_alias", ".", "c", "[", "params", "[", "'related_pk_field_name'", "]", "]", "==", "related_table_alias", ".", "c", "[", "'pk'", "]", ")", ")", "return", "compile_one_to_many_query", "(", "key", ",", "value", ",", "field_name", ",", "related_table_alias", ",", "relationship_table_alias", ".", "c", "[", "params", "[", "'pk_field_name'", "]", "]", ",", "new_path", ")", "def", "prepare_special_query", "(", "field_name", ",", "params", ",", "query", ")", ":", "def", "sanitize", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "[", "v", ".", "pk", "if", "isinstance", "(", "v", ",", "Document", ")", "else", "v", "for", "v", "in", "value", "]", "return", "value", "column_name", "=", "params", "[", "'column'", "]", "if", "'$not'", "in", "query", ":", "return", "[", "not_", "(", "*", "prepare_special_query", "(", "column_name", ",", "params", ",", "sanitize", "(", "query", "[", "'$not'", "]", ")", ")", ")", "]", "elif", "'$in'", "in", "query", ":", "if", "not", "query", "[", "'$in'", "]", ":", "#we return an impossible condition since the $in query does not contain any values", "return", "[", "expression", ".", "cast", "(", "True", ",", "Boolean", ")", "==", "expression", ".", "cast", "(", "False", ",", "Boolean", ")", "]", "return", "[", "table", ".", "c", "[", "column_name", "]", ".", "in_", "(", "sanitize", "(", "query", "[", "'$in'", "]", ")", ")", "]", "elif", "'$nin'", "in", "query", ":", "if", "not", "query", "[", "'$nin'", "]", ":", "return", "[", "expression", ".", "cast", "(", "True", ",", "Boolean", ")", "==", "expression", ".", "cast", "(", "False", ",", "Boolean", ")", "]", "return", "[", "~", "table", ".", "c", "[", "column_name", "]", ".", "in_", "(", "sanitize", "(", "query", "[", "'$nin'", "]", ")", ")", "]", "elif", "'$eq'", "in", "query", ":", "return", "[", "table", ".", "c", "[", "column_name", "]", "==", "sanitize", "(", "query", "[", "'$eq'", "]", ")", "]", "elif", "'$ne'", "in", "query", ":", "return", "[", "table", ".", "c", "[", "column_name", "]", "!=", "sanitize", "(", "query", "[", "'$ne'", "]", ")", "]", "elif", "'$gt'", "in", "query", ":", "return", "[", "table", ".", "c", "[", "column_name", "]", ">", "sanitize", "(", "query", "[", "'$gt'", "]", ")", "]", "elif", "'$gte'", "in", "query", ":", "return", "[", "table", ".", "c", "[", "column_name", "]", ">=", "sanitize", "(", "query", "[", "'$gte'", "]", ")", "]", "elif", "'$lt'", "in", "query", ":", "return", "[", "table", ".", "c", "[", "column_name", "]", "<", "sanitize", "(", "query", "[", "'$lt'", "]", ")", "]", "elif", "'$lte'", "in", "query", ":", "return", "[", "table", ".", "c", "[", "column_name", "]", "<=", "sanitize", "(", "query", "[", "'$lte'", "]", ")", "]", "elif", "'$exists'", "in", "query", ":", "if", "query", "[", "'$exists'", "]", ":", "return", "[", "table", ".", "c", "[", "column_name", "]", "!=", "None", "]", "else", ":", "return", "[", "table", ".", "c", "[", "column_name", "]", "==", "None", "]", "elif", "'$like'", "in", "query", ":", "return", "[", "table", ".", "c", "[", "column_name", "]", ".", "like", "(", "expression", ".", "cast", "(", "query", "[", "'$like'", "]", ",", "String", ")", ")", "]", "elif", "'$ilike'", "in", "query", ":", "return", "[", "table", ".", "c", "[", "column_name", "]", ".", "ilike", "(", "expression", ".", "cast", "(", "query", "[", "'$ilike'", "]", ",", "String", ")", ")", "]", "elif", "'$regex'", "in", "query", ":", "if", "not", "self", ".", "engine", ".", "url", ".", "drivername", "in", "(", "'postgres'", ",", "'mysql'", ",", "'sqlite'", ")", ":", "raise", "AttributeError", "(", "\"Regex queries not supported with %s engine!\"", "%", "self", ".", "engine", ".", "url", ".", "drivername", ")", "return", "[", "table", ".", "c", "[", "column_name", "]", ".", "op", "(", "'REGEXP'", ")", "(", "expression", ".", "cast", "(", "query", "[", "'$regex'", "]", ",", "String", ")", ")", "]", "else", ":", "raise", "AttributeError", "(", "\"Invalid query!\"", ")", "#this is a normal, field-base query", "for", "key", ",", "value", "in", "query", ".", "items", "(", ")", ":", "for", "field_name", ",", "params", "in", "self", ".", "_index_fields", "[", "collection", "]", ".", "items", "(", ")", ":", "if", "key", "==", "field_name", ":", "if", "isinstance", "(", "value", ",", "re", ".", "_pattern_type", ")", ":", "value", "=", "{", "'$regex'", ":", "value", ".", "pattern", "}", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "#this is a special query", "where_statements", ".", "extend", "(", "prepare_special_query", "(", "field_name", ",", "params", ",", "value", ")", ")", "else", ":", "#this is a normal value query", "where_statements", ".", "append", "(", "table", ".", "c", "[", "params", "[", "'column'", "]", "]", "==", "expression", ".", "cast", "(", "value", ",", "params", "[", "'type'", "]", ")", ")", "break", "else", ":", "#we check the normal relationships", "for", "field_name", ",", "params", "in", "self", ".", "_related_fields", "[", "collection", "]", ".", "items", "(", ")", ":", "if", "key", ".", "startswith", "(", "field_name", ")", ":", "head", ",", "tail", "=", "key", "[", ":", "len", "(", "field_name", ")", "]", ",", "key", "[", "len", "(", "field_name", ")", "+", "1", ":", "]", "new_path", "=", "path", "+", "[", "head", "]", "path_str", "=", "\".\"", ".", "join", "(", "new_path", ")", "#ManyToManyField", "if", "isinstance", "(", "params", "[", "'field'", "]", ",", "ManyToManyField", ")", ":", "relationship_table", "=", "self", ".", "_relationship_tables", "[", "collection", "]", "[", "field_name", "]", "where_statements", ".", "extend", "(", "compile_many_to_many_query", "(", "key", ",", "value", ",", "field_name", ",", "params", ",", "relationship_table", ",", "path", "=", "new_path", ")", ")", "elif", "isinstance", "(", "params", "[", "'field'", "]", ",", "ForeignKeyField", ")", ":", "#this is a normal ForeignKey relation", "if", "key", "==", "field_name", ":", "#this is a ForeignKey query", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "if", "len", "(", "value", ")", "==", "1", ":", "key", ",", "query", "=", "list", "(", "value", ".", "items", "(", ")", ")", "[", "0", "]", "if", "key", "==", "'$exists'", ":", "if", "not", "isinstance", "(", "query", ",", "bool", ")", ":", "raise", "AttributeError", "(", "\"$exists operator requires a Boolean operator\"", ")", "if", "query", ":", "where_statements", ".", "append", "(", "table", ".", "c", "[", "params", "[", "'column'", "]", "]", "!=", "None", ")", "else", ":", "where_statements", ".", "append", "(", "table", ".", "c", "[", "params", "[", "'column'", "]", "]", "==", "None", ")", "break", "elif", "not", "key", "in", "(", "'$in'", ",", "'$nin'", ")", ":", "raise", "AttributeError", "(", "\"Invalid query!\"", ")", "query_type", "=", "key", "[", "1", ":", "]", "else", ":", "raise", "AttributeError", "(", "\"Invalid query!\"", ")", "else", ":", "query_type", "=", "'exact'", "query", "=", "value", "if", "isinstance", "(", "query", ",", "(", "QuerySet", ",", "ManyToManyProxy", ")", ")", ":", "if", "not", "query_type", "in", "(", "'in'", ",", "'nin'", ")", ":", "raise", "AttributeError", "(", "\"QuerySet/ManyToManyProxy objects must be used in conjunction with $in/$nin when querying a ForeignKey relationship\"", ")", "if", "isinstance", "(", "query", ",", "ManyToManyProxy", ")", ":", "qs", "=", "query", ".", "get_queryset", "(", ")", "else", ":", "qs", "=", "query", "if", "qs", ".", "count", "is", "not", "None", "and", "qs", ".", "count", "==", "0", ":", "raise", "AttributeError", "(", "\"$in/$nin query with empty QuerySet/ManyToManyProxy!\"", ")", "if", "qs", ".", "cls", "is", "not", "params", "[", "'class'", "]", ":", "raise", "AttributeError", "(", "\"Invalid QuerySet class!\"", ")", "condition", "=", "getattr", "(", "table", ".", "c", "[", "params", "[", "'column'", "]", "]", ",", "query_type", "+", "'_'", ")", "(", "qs", ".", "get_select", "(", "columns", "=", "[", "'pk'", "]", ")", ")", "where_statements", ".", "append", "(", "condition", ")", "elif", "isinstance", "(", "query", ",", "(", "list", ",", "tuple", ")", ")", ":", "if", "not", "query_type", "in", "(", "'in'", ",", "'nin'", ")", ":", "raise", "AttributeError", "(", "\"Lists/tuples must be used in conjunction with $in/$nin when querying a ForeignKey relationship\"", ")", "if", "not", "query", ":", "raise", "AttributeError", "(", "\"in/nin query with empty list!\"", ")", "if", "query", "[", "0", "]", ".", "__class__", "is", "params", "[", "'class'", "]", ":", "if", "any", "(", "(", "element", ".", "__class__", "is", "not", "params", "[", "'class'", "]", "for", "element", "in", "query", ")", ")", ":", "raise", "AttributeError", "(", "\"Invalid document type in ForeignKey query\"", ")", "where_statements", ".", "append", "(", "getattr", "(", "table", ".", "c", "[", "params", "[", "'column'", "]", "]", ",", "query_type", "+", "'_'", ")", "(", "[", "expression", ".", "cast", "(", "doc", ".", "pk", ",", "params", "[", "'type'", "]", ")", "for", "doc", "in", "query", "]", ")", ")", "else", ":", "where_statements", ".", "append", "(", "getattr", "(", "table", ".", "c", "[", "params", "[", "'column'", "]", "]", ",", "query_type", "+", "'_'", ")", "(", "[", "expression", ".", "cast", "(", "element", ",", "params", "[", "'type'", "]", ")", "for", "element", "in", "query", "]", ")", ")", "elif", "isinstance", "(", "query", ",", "Document", ")", ":", "#we need an exact clas match here...", "if", "query", ".", "__class__", "is", "not", "params", "[", "'class'", "]", ":", "raise", "AttributeError", "(", "\"Invalid Document class!\"", ")", "where_statements", ".", "append", "(", "table", ".", "c", "[", "params", "[", "'column'", "]", "]", "==", "query", ".", "pk", ")", "else", ":", "where_statements", ".", "append", "(", "table", ".", "c", "[", "params", "[", "'column'", "]", "]", "==", "expression", ".", "cast", "(", "query", ",", "params", "[", "'class'", "]", ".", "Meta", ".", "PkType", ")", ")", "else", ":", "#we query a sub-field of the relation", "related_table", "=", "self", ".", "_collection_tables", "[", "params", "[", "'collection'", "]", "]", "if", "path_str", "in", "joins", "[", "related_table", "]", ":", "related_table_alias", "=", "joins", "[", "related_table", "]", "[", "path_str", "]", "else", ":", "related_table_alias", "=", "related_table", ".", "alias", "(", ")", "joins", "[", "related_table", "]", "[", "path_str", "]", "=", "related_table_alias", "joins_list", ".", "append", "(", "(", "related_table_alias", ",", "table", ".", "c", "[", "params", "[", "'column'", "]", "]", "==", "related_table_alias", ".", "c", "[", "'pk'", "]", ")", ")", "where_statements", ".", "extend", "(", "compile_query", "(", "params", "[", "'collection'", "]", ",", "{", "tail", ":", "value", "}", ",", "table", "=", "related_table_alias", ",", "path", "=", "new_path", ")", ")", "elif", "isinstance", "(", "params", "[", "'field'", "]", ",", "OneToManyField", ")", ":", "related_table", "=", "self", ".", "_collection_tables", "[", "params", "[", "'collection'", "]", "]", "if", "path_str", "in", "joins", "[", "related_table", "]", ":", "related_table_alias", "=", "joins", "[", "related_table", "]", "[", "path_str", "]", "else", ":", "related_table_alias", "=", "related_table", ".", "alias", "(", ")", "joins", "[", "related_table", "]", "[", "path_str", "]", "=", "related_table_alias", "joins_list", ".", "append", "(", "(", "related_table_alias", ",", "related_table_alias", ".", "c", "[", "params", "[", "'backref'", "]", "[", "'column'", "]", "]", "==", "table", ".", "c", "[", "'pk'", "]", ")", ")", "where_statements", ".", "extend", "(", "compile_one_to_many_query", "(", "key", ",", "value", ",", "field_name", ",", "related_table_alias", ",", "table", ".", "c", ".", "pk", ",", "new_path", ")", ")", "break", "else", ":", "raise", "AttributeError", "(", "\"Query over non-indexed field %s in collection %s!\"", "%", "(", "key", ",", "collection", ")", ")", "return", "where_statements", "compiled_query", "=", "compile_query", "(", "collection", ",", "query", ")", "if", "len", "(", "compiled_query", ")", ">", "1", ":", "compiled_query", "=", "and_", "(", "*", "compiled_query", ")", "elif", "compiled_query", ":", "compiled_query", "=", "compiled_query", "[", "0", "]", "else", ":", "compiled_query", "=", "None", "return", "QuerySet", "(", "backend", "=", "self", ",", "table", "=", "table", ",", "joins", "=", "joins_list", ",", "cls", "=", "cls", ",", "condition", "=", "compiled_query", ",", "raw", "=", "raw", ",", "group_bys", "=", "group_bys", ",", "only", "=", "only", ",", "include", "=", "include", ",", "havings", "=", "havings", ")" ]
59.095522
31.322388
def _memoize(self, name, getter, *args, **kwargs): """ Cache a stable expensive-to-get item value for later (optimized) retrieval. """ field = "custom_m_" + name cached = self.fetch(field) if cached: value = cached else: value = getter(*args, **kwargs) self._make_it_so("caching %s=%r for" % (name, value,), ["custom.set"], field[7:], value) self._fields[field] = value return value
[ "def", "_memoize", "(", "self", ",", "name", ",", "getter", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "field", "=", "\"custom_m_\"", "+", "name", "cached", "=", "self", ".", "fetch", "(", "field", ")", "if", "cached", ":", "value", "=", "cached", "else", ":", "value", "=", "getter", "(", "*", "args", ",", "*", "*", "kwargs", ")", "self", ".", "_make_it_so", "(", "\"caching %s=%r for\"", "%", "(", "name", ",", "value", ",", ")", ",", "[", "\"custom.set\"", "]", ",", "field", "[", "7", ":", "]", ",", "value", ")", "self", ".", "_fields", "[", "field", "]", "=", "value", "return", "value" ]
39.583333
14.083333
def on_pytoml_dumps(self, pytoml, config, dictionary, **kwargs): """ The `pytoml <https://pypi.org/project/pytoml/>`_ dumps method. :param module pytoml: The ``pytoml`` module :param class config: The instance's config class :param dict dictionary: The dictionary to serialize :returns: The TOML serialization :rtype: str """ inline_tables = set(kwargs.get("inline_tables", [])) if len(inline_tables) > 0: warnings.warn("pytoml does not support 'inline_tables' argument") return pytoml.dumps(dictionary)
[ "def", "on_pytoml_dumps", "(", "self", ",", "pytoml", ",", "config", ",", "dictionary", ",", "*", "*", "kwargs", ")", ":", "inline_tables", "=", "set", "(", "kwargs", ".", "get", "(", "\"inline_tables\"", ",", "[", "]", ")", ")", "if", "len", "(", "inline_tables", ")", ">", "0", ":", "warnings", ".", "warn", "(", "\"pytoml does not support 'inline_tables' argument\"", ")", "return", "pytoml", ".", "dumps", "(", "dictionary", ")" ]
41.714286
16.785714
def run_all(logdir, verbose=False): """Generate waves of the shapes defined above. Arguments: logdir: the directory into which to store all the runs' data verbose: if true, print out each run's name as it begins """ waves = [sine_wave, square_wave, triangle_wave, bisine_wave, bisine_wahwah_wave] for (i, wave_constructor) in enumerate(waves): wave_name = wave_constructor.__name__ run_name = 'wave:%02d,%s' % (i + 1, wave_name) if verbose: print('--- Running: %s' % run_name) run(logdir, run_name, wave_name, wave_constructor)
[ "def", "run_all", "(", "logdir", ",", "verbose", "=", "False", ")", ":", "waves", "=", "[", "sine_wave", ",", "square_wave", ",", "triangle_wave", ",", "bisine_wave", ",", "bisine_wahwah_wave", "]", "for", "(", "i", ",", "wave_constructor", ")", "in", "enumerate", "(", "waves", ")", ":", "wave_name", "=", "wave_constructor", ".", "__name__", "run_name", "=", "'wave:%02d,%s'", "%", "(", "i", "+", "1", ",", "wave_name", ")", "if", "verbose", ":", "print", "(", "'--- Running: %s'", "%", "run_name", ")", "run", "(", "logdir", ",", "run_name", ",", "wave_name", ",", "wave_constructor", ")" ]
37.666667
12.533333
def track_event(self, name: str, properties: Dict[str, object] = None, measurements: Dict[str, object] = None) -> None: """ Send information about a single event that has occurred in the context of the application. :param name: the data to associate to this event. :param properties: the set of custom properties the client wants attached to this data item. (defaults to: None) :param measurements: the set of custom measurements the client wants to attach to this data item. (defaults to: None) """ pass
[ "def", "track_event", "(", "self", ",", "name", ":", "str", ",", "properties", ":", "Dict", "[", "str", ",", "object", "]", "=", "None", ",", "measurements", ":", "Dict", "[", "str", ",", "object", "]", "=", "None", ")", "->", "None", ":", "pass" ]
63.777778
36.333333
def getConfig(self): """Returns dictionary of config entries for Munin Graph. @return: Dictionary of config entries. """ return {'graph': self._graphAttrDict, 'fields': [(field_name, self._fieldAttrDict.get(field_name)) for field_name in self._fieldNameList]}
[ "def", "getConfig", "(", "self", ")", ":", "return", "{", "'graph'", ":", "self", ".", "_graphAttrDict", ",", "'fields'", ":", "[", "(", "field_name", ",", "self", ".", "_fieldAttrDict", ".", "get", "(", "field_name", ")", ")", "for", "field_name", "in", "self", ".", "_fieldNameList", "]", "}" ]
38.333333
17.555556
def __unset_binding(self, dependency, service, reference): # type: (Any, Any, ServiceReference) -> None """ Removes a service from the component :param dependency: The dependency handler :param service: The injected service :param reference: The reference of the injected service """ # Call the component back self.__safe_field_callback( dependency.get_field(), constants.IPOPO_CALLBACK_UNBIND_FIELD, service, reference, ) self.safe_callback(constants.IPOPO_CALLBACK_UNBIND, service, reference) # Update the injected field setattr(self.instance, dependency.get_field(), dependency.get_value()) # Unget the service self.bundle_context.unget_service(reference)
[ "def", "__unset_binding", "(", "self", ",", "dependency", ",", "service", ",", "reference", ")", ":", "# type: (Any, Any, ServiceReference) -> None", "# Call the component back", "self", ".", "__safe_field_callback", "(", "dependency", ".", "get_field", "(", ")", ",", "constants", ".", "IPOPO_CALLBACK_UNBIND_FIELD", ",", "service", ",", "reference", ",", ")", "self", ".", "safe_callback", "(", "constants", ".", "IPOPO_CALLBACK_UNBIND", ",", "service", ",", "reference", ")", "# Update the injected field", "setattr", "(", "self", ".", "instance", ",", "dependency", ".", "get_field", "(", ")", ",", "dependency", ".", "get_value", "(", ")", ")", "# Unget the service", "self", ".", "bundle_context", ".", "unget_service", "(", "reference", ")" ]
33.625
18.041667
def b58ToC32(b58check, version=-1): """ >>> b58ToC32('1FzTxL9Mxnm2fdmnQEArfhzJHevwbvcH6d') 'SP2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKNRV9EJ7' >>> b58ToC32('3GgUssdoWh5QkoUDXKqT6LMESBDf8aqp2y') 'SM2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQVX8X0G' >>> b58ToC32('mvWRFPELmpCHSkFQ7o9EVdCd9eXeUTa9T8') 'ST2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQYAC0RQ' >>> b58ToC32('2N8EgwcZq89akxb6mCTTKiHLVeXRpxjuy98') 'SN2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKP6D2ZK9' >>> b58ToC32('1FzTxL9Mxnm2fdmnQEArfhzJHevwbvcH6d', 22) 'SP2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKNRV9EJ7' >>> b58ToC32('1FzTxL9Mxnm2fdmnQEArfhzJHevwbvcH6d', 0) 'S02J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKPVKG2CE' >>> b58ToC32('1FzTxL9Mxnm2fdmnQEArfhzJHevwbvcH6d', 31) 'SZ2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQ9H6DPR' >>> b58ToC32('1FzTxL9Mxnm2fdmnQEArfhzJHevwbvcH6d', 20) 'SM2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQVX8X0G' >>> b58ToC32('1FzTxL9Mxnm2fdmnQEArfhzJHevwbvcH6d', 26) 'ST2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKQYAC0RQ' >>> b58ToC32('1FzTxL9Mxnm2fdmnQEArfhzJHevwbvcH6d', 21) 'SN2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKP6D2ZK9' """ addr_version_byte, addr_bin, addr_checksum = keylib.b58check.b58check_unpack(b58check) addr_version = ord(addr_version_byte) addr_hash160 = addr_bin.encode('hex') stacks_version = None if version < 0: stacks_version = addr_version if ADDR_BITCOIN_TO_STACKS.get(addr_version) is not None: stacks_version = ADDR_BITCOIN_TO_STACKS[addr_version] else: stacks_version = version return c32address(stacks_version, addr_hash160)
[ "def", "b58ToC32", "(", "b58check", ",", "version", "=", "-", "1", ")", ":", "addr_version_byte", ",", "addr_bin", ",", "addr_checksum", "=", "keylib", ".", "b58check", ".", "b58check_unpack", "(", "b58check", ")", "addr_version", "=", "ord", "(", "addr_version_byte", ")", "addr_hash160", "=", "addr_bin", ".", "encode", "(", "'hex'", ")", "stacks_version", "=", "None", "if", "version", "<", "0", ":", "stacks_version", "=", "addr_version", "if", "ADDR_BITCOIN_TO_STACKS", ".", "get", "(", "addr_version", ")", "is", "not", "None", ":", "stacks_version", "=", "ADDR_BITCOIN_TO_STACKS", "[", "addr_version", "]", "else", ":", "stacks_version", "=", "version", "return", "c32address", "(", "stacks_version", ",", "addr_hash160", ")" ]
42.081081
14.837838
def _fully_random_weights(n_features, lam_scale, prng): """Generate a symmetric random matrix with zeros along the diagonal.""" weights = np.zeros((n_features, n_features)) n_off_diag = int((n_features ** 2 - n_features) / 2) weights[np.triu_indices(n_features, k=1)] = 0.1 * lam_scale * prng.randn( n_off_diag ) + (0.25 * lam_scale) weights[weights < 0] = 0 weights = weights + weights.T return weights
[ "def", "_fully_random_weights", "(", "n_features", ",", "lam_scale", ",", "prng", ")", ":", "weights", "=", "np", ".", "zeros", "(", "(", "n_features", ",", "n_features", ")", ")", "n_off_diag", "=", "int", "(", "(", "n_features", "**", "2", "-", "n_features", ")", "/", "2", ")", "weights", "[", "np", ".", "triu_indices", "(", "n_features", ",", "k", "=", "1", ")", "]", "=", "0.1", "*", "lam_scale", "*", "prng", ".", "randn", "(", "n_off_diag", ")", "+", "(", "0.25", "*", "lam_scale", ")", "weights", "[", "weights", "<", "0", "]", "=", "0", "weights", "=", "weights", "+", "weights", ".", "T", "return", "weights" ]
43.4
15.3
def merge(self, other): """ We can merge unless the merge results in an empty set -- a contradiction """ other = self.coerce(other) if self.is_equal(other): # pick among dependencies return self elif other.is_entailed_by(self): # other is a subset of self return self elif self.is_entailed_by(other): # self is a subset of other. self.values = other.values.copy() elif self.is_contradictory(other): raise Contradiction("Cannot merge set with %s" % (str(other))) else: # merge mutual information if self.values: self.values = self.values.intersection(other.values) else: self.values = other.values.copy() return self
[ "def", "merge", "(", "self", ",", "other", ")", ":", "other", "=", "self", ".", "coerce", "(", "other", ")", "if", "self", ".", "is_equal", "(", "other", ")", ":", "# pick among dependencies", "return", "self", "elif", "other", ".", "is_entailed_by", "(", "self", ")", ":", "# other is a subset of self", "return", "self", "elif", "self", ".", "is_entailed_by", "(", "other", ")", ":", "# self is a subset of other.", "self", ".", "values", "=", "other", ".", "values", ".", "copy", "(", ")", "elif", "self", ".", "is_contradictory", "(", "other", ")", ":", "raise", "Contradiction", "(", "\"Cannot merge set with %s\"", "%", "(", "str", "(", "other", ")", ")", ")", "else", ":", "# merge mutual information", "if", "self", ".", "values", ":", "self", ".", "values", "=", "self", ".", "values", ".", "intersection", "(", "other", ".", "values", ")", "else", ":", "self", ".", "values", "=", "other", ".", "values", ".", "copy", "(", ")", "return", "self" ]
34.666667
11.583333
def get_metrics(self, name=None): """Get metrics for this operator. Args: name(str, optional): Only return metrics matching `name`, where `name` can be a regular expression. If `name` is not supplied, then all metrics for this operator are returned. Returns: list(Metric): List of matching metrics. Retrieving a list of metrics whose name contains the string "temperatureSensor" could be performed as followed Example: >>> from streamsx import rest >>> sc = rest.StreamingAnalyticsConnection() >>> instances = sc.get_instances() >>> operator = instances[0].get_operators()[0] >>> metrics = op.get_metrics(name='*temperatureSensor*') """ return self._get_elements(self.metrics, 'metrics', Metric, name=name)
[ "def", "get_metrics", "(", "self", ",", "name", "=", "None", ")", ":", "return", "self", ".", "_get_elements", "(", "self", ".", "metrics", ",", "'metrics'", ",", "Metric", ",", "name", "=", "name", ")" ]
44.684211
27.421053
def visitMembersDef(self, ctx: jsgParser.MembersDefContext): """ membersDef: COMMA | member+ (BAR altMemberDef)* (BAR lastComma)? ; altMemberDef: member* ; member: pairDef COMMA? lastComma: COMMA ; """ if not self._name: self._name = self._context.anon_id() if ctx.COMMA(): # lone comma - wild card self._strict = False if not ctx.BAR(): # member+ self.visitChildren(ctx) else: entry = 1 self._add_choice(entry, ctx.member()) # add first brance (member+) for alt in ctx.altMemberDef(): entry += 1 self._add_choice(entry, alt.member()) if ctx.lastComma(): entry += 1 self._add_choice(entry, [])
[ "def", "visitMembersDef", "(", "self", ",", "ctx", ":", "jsgParser", ".", "MembersDefContext", ")", ":", "if", "not", "self", ".", "_name", ":", "self", ".", "_name", "=", "self", ".", "_context", ".", "anon_id", "(", ")", "if", "ctx", ".", "COMMA", "(", ")", ":", "# lone comma - wild card", "self", ".", "_strict", "=", "False", "if", "not", "ctx", ".", "BAR", "(", ")", ":", "# member+", "self", ".", "visitChildren", "(", "ctx", ")", "else", ":", "entry", "=", "1", "self", ".", "_add_choice", "(", "entry", ",", "ctx", ".", "member", "(", ")", ")", "# add first brance (member+)", "for", "alt", "in", "ctx", ".", "altMemberDef", "(", ")", ":", "entry", "+=", "1", "self", ".", "_add_choice", "(", "entry", ",", "alt", ".", "member", "(", ")", ")", "if", "ctx", ".", "lastComma", "(", ")", ":", "entry", "+=", "1", "self", ".", "_add_choice", "(", "entry", ",", "[", "]", ")" ]
41.190476
13.238095
def setModelData( self, editor, model, index ): """ Sets the data for the given index from the editor's value. :param editor | <QWidget> model | <QAbstractItemModel> index | <QModelIndex> """ tree = self.parent() querywidget = tree.parent() factory = querywidget.factory() item = tree.itemFromIndex(index) value = factory.editorData(editor) item.setValue(value)
[ "def", "setModelData", "(", "self", ",", "editor", ",", "model", ",", "index", ")", ":", "tree", "=", "self", ".", "parent", "(", ")", "querywidget", "=", "tree", ".", "parent", "(", ")", "factory", "=", "querywidget", ".", "factory", "(", ")", "item", "=", "tree", ".", "itemFromIndex", "(", "index", ")", "value", "=", "factory", ".", "editorData", "(", "editor", ")", "item", ".", "setValue", "(", "value", ")" ]
34.933333
9.733333
def p_common_scalar_magic_method(p): 'common_scalar : METHOD_C' p[0] = ast.MagicConstant(p[1].upper(), None, lineno=p.lineno(1))
[ "def", "p_common_scalar_magic_method", "(", "p", ")", ":", "p", "[", "0", "]", "=", "ast", ".", "MagicConstant", "(", "p", "[", "1", "]", ".", "upper", "(", ")", ",", "None", ",", "lineno", "=", "p", ".", "lineno", "(", "1", ")", ")" ]
44.666667
14
def terminate_ex(self, nodes, threads=False, attempts=3): """Wrapper method for terminate. :param nodes: Nodes to be destroyed. :type nodes: ``list`` :param attempts: The amount of attempts for retrying to terminate failed instances. :type attempts: ``int`` :param threads: Whether to use the threaded approach or not. :type threads: ``bool`` """ while nodes and attempts > 0: if threads: nodes = self.terminate_with_threads(nodes) else: nodes = self.terminate(nodes) if nodes: logger.info("Attempt to terminate the remaining instances once more.") attempts -= 1 return nodes
[ "def", "terminate_ex", "(", "self", ",", "nodes", ",", "threads", "=", "False", ",", "attempts", "=", "3", ")", ":", "while", "nodes", "and", "attempts", ">", "0", ":", "if", "threads", ":", "nodes", "=", "self", ".", "terminate_with_threads", "(", "nodes", ")", "else", ":", "nodes", "=", "self", ".", "terminate", "(", "nodes", ")", "if", "nodes", ":", "logger", ".", "info", "(", "\"Attempt to terminate the remaining instances once more.\"", ")", "attempts", "-=", "1", "return", "nodes" ]
34.227273
20.363636
def normalize(self, inplace=False, **kwargs): """Normalize data to a given value. Currently only supports normalizing to a specific time. Parameters ---------- inplace: bool, default False if True, do operation inplace and return None kwargs: the values on which to normalize (e.g., `year=2005`) """ if len(kwargs) > 1 or self.time_col not in kwargs: raise ValueError('Only time(year)-based normalization supported') ret = copy.deepcopy(self) if not inplace else self df = ret.data # change all below if supporting more in the future cols = self.time_col value = kwargs[self.time_col] x = df.set_index(IAMC_IDX) x['value'] /= x[x[cols] == value]['value'] ret.data = x.reset_index() if not inplace: return ret
[ "def", "normalize", "(", "self", ",", "inplace", "=", "False", ",", "*", "*", "kwargs", ")", ":", "if", "len", "(", "kwargs", ")", ">", "1", "or", "self", ".", "time_col", "not", "in", "kwargs", ":", "raise", "ValueError", "(", "'Only time(year)-based normalization supported'", ")", "ret", "=", "copy", ".", "deepcopy", "(", "self", ")", "if", "not", "inplace", "else", "self", "df", "=", "ret", ".", "data", "# change all below if supporting more in the future", "cols", "=", "self", ".", "time_col", "value", "=", "kwargs", "[", "self", ".", "time_col", "]", "x", "=", "df", ".", "set_index", "(", "IAMC_IDX", ")", "x", "[", "'value'", "]", "/=", "x", "[", "x", "[", "cols", "]", "==", "value", "]", "[", "'value'", "]", "ret", ".", "data", "=", "x", ".", "reset_index", "(", ")", "if", "not", "inplace", ":", "return", "ret" ]
39.090909
15.181818
def create_rst(self, nb, in_dir, odir): """Create the rst file from the notebook node""" raw_rst, resources = nbconvert.export_by_name('rst', nb) # remove ipython magics rst_content = '' i0 = 0 m = None # HACK: we insert the bokeh style sheets here as well, since for some # themes (e.g. the sphinx_rtd_theme) it is not sufficient to include # the style sheets only via app.add_stylesheet bokeh_str = '' if 'bokeh' in raw_rst and self.insert_bokeh: bokeh_str += self.BOKEH_TEMPLATE.format( version=self.insert_bokeh) if 'bokeh' in raw_rst and self.insert_bokeh_widgets: bokeh_str += self.BOKEH_WIDGETS_TEMPLATE.format( version=self.insert_bokeh_widgets) for m in code_blocks.finditer(raw_rst): lines = m.group().splitlines(True) header, content = lines[0], ''.join(lines[1:]) no_magics = magic_patt.sub('\g<1>', content) # if the code cell only contained magic commands, we skip it if no_magics.strip(): rst_content += ( raw_rst[i0:m.start()] + bokeh_str + header + no_magics) bokeh_str = '' i0 = m.end() else: rst_content += raw_rst[i0:m.start()] i0 = m.end() if m is not None: rst_content += bokeh_str + raw_rst[m.end():] else: rst_content = raw_rst rst_content = '.. _%s:\n\n' % self.reference + \ rst_content url = self.url if url is not None: rst_content += self.CODE_DOWNLOAD_NBVIEWER.format( pyfile=os.path.basename(self.py_file), nbfile=os.path.basename(self.outfile), url=url) else: rst_content += self.CODE_DOWNLOAD.format( pyfile=os.path.basename(self.py_file), nbfile=os.path.basename(self.outfile)) supplementary_files = self.supplementary_files other_supplementary_files = self.other_supplementary_files if supplementary_files or other_supplementary_files: for f in (supplementary_files or []) + ( other_supplementary_files or []): if not os.path.exists(os.path.join(odir, f)): copyfile(os.path.join(in_dir, f), os.path.join(odir, f)) if supplementary_files: rst_content += self.data_download(supplementary_files) rst_file = self.get_out_file() outputs = sorted(resources['outputs'], key=rst_content.find) base = os.path.join('images', os.path.splitext( os.path.basename(self.infile))[0] + '_%i.png') out_map = {os.path.basename(original): base % i for i, original in enumerate(outputs)} for original, final in six.iteritems(out_map): rst_content = rst_content.replace(original, final) with open(rst_file, 'w') \ as f: f.write(rst_content.rstrip() + '\n') pictures = [] for original in outputs: fname = os.path.join(odir, out_map[os.path.basename(original)]) pictures.append(fname) if six.PY3: f = open(fname, 'w+b') else: f = open(fname, 'w') f.write(resources['outputs'][original]) f.close() self.pictures = pictures
[ "def", "create_rst", "(", "self", ",", "nb", ",", "in_dir", ",", "odir", ")", ":", "raw_rst", ",", "resources", "=", "nbconvert", ".", "export_by_name", "(", "'rst'", ",", "nb", ")", "# remove ipython magics", "rst_content", "=", "''", "i0", "=", "0", "m", "=", "None", "# HACK: we insert the bokeh style sheets here as well, since for some", "# themes (e.g. the sphinx_rtd_theme) it is not sufficient to include", "# the style sheets only via app.add_stylesheet", "bokeh_str", "=", "''", "if", "'bokeh'", "in", "raw_rst", "and", "self", ".", "insert_bokeh", ":", "bokeh_str", "+=", "self", ".", "BOKEH_TEMPLATE", ".", "format", "(", "version", "=", "self", ".", "insert_bokeh", ")", "if", "'bokeh'", "in", "raw_rst", "and", "self", ".", "insert_bokeh_widgets", ":", "bokeh_str", "+=", "self", ".", "BOKEH_WIDGETS_TEMPLATE", ".", "format", "(", "version", "=", "self", ".", "insert_bokeh_widgets", ")", "for", "m", "in", "code_blocks", ".", "finditer", "(", "raw_rst", ")", ":", "lines", "=", "m", ".", "group", "(", ")", ".", "splitlines", "(", "True", ")", "header", ",", "content", "=", "lines", "[", "0", "]", ",", "''", ".", "join", "(", "lines", "[", "1", ":", "]", ")", "no_magics", "=", "magic_patt", ".", "sub", "(", "'\\g<1>'", ",", "content", ")", "# if the code cell only contained magic commands, we skip it", "if", "no_magics", ".", "strip", "(", ")", ":", "rst_content", "+=", "(", "raw_rst", "[", "i0", ":", "m", ".", "start", "(", ")", "]", "+", "bokeh_str", "+", "header", "+", "no_magics", ")", "bokeh_str", "=", "''", "i0", "=", "m", ".", "end", "(", ")", "else", ":", "rst_content", "+=", "raw_rst", "[", "i0", ":", "m", ".", "start", "(", ")", "]", "i0", "=", "m", ".", "end", "(", ")", "if", "m", "is", "not", "None", ":", "rst_content", "+=", "bokeh_str", "+", "raw_rst", "[", "m", ".", "end", "(", ")", ":", "]", "else", ":", "rst_content", "=", "raw_rst", "rst_content", "=", "'.. _%s:\\n\\n'", "%", "self", ".", "reference", "+", "rst_content", "url", "=", "self", ".", "url", "if", "url", "is", "not", "None", ":", "rst_content", "+=", "self", ".", "CODE_DOWNLOAD_NBVIEWER", ".", "format", "(", "pyfile", "=", "os", ".", "path", ".", "basename", "(", "self", ".", "py_file", ")", ",", "nbfile", "=", "os", ".", "path", ".", "basename", "(", "self", ".", "outfile", ")", ",", "url", "=", "url", ")", "else", ":", "rst_content", "+=", "self", ".", "CODE_DOWNLOAD", ".", "format", "(", "pyfile", "=", "os", ".", "path", ".", "basename", "(", "self", ".", "py_file", ")", ",", "nbfile", "=", "os", ".", "path", ".", "basename", "(", "self", ".", "outfile", ")", ")", "supplementary_files", "=", "self", ".", "supplementary_files", "other_supplementary_files", "=", "self", ".", "other_supplementary_files", "if", "supplementary_files", "or", "other_supplementary_files", ":", "for", "f", "in", "(", "supplementary_files", "or", "[", "]", ")", "+", "(", "other_supplementary_files", "or", "[", "]", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "odir", ",", "f", ")", ")", ":", "copyfile", "(", "os", ".", "path", ".", "join", "(", "in_dir", ",", "f", ")", ",", "os", ".", "path", ".", "join", "(", "odir", ",", "f", ")", ")", "if", "supplementary_files", ":", "rst_content", "+=", "self", ".", "data_download", "(", "supplementary_files", ")", "rst_file", "=", "self", ".", "get_out_file", "(", ")", "outputs", "=", "sorted", "(", "resources", "[", "'outputs'", "]", ",", "key", "=", "rst_content", ".", "find", ")", "base", "=", "os", ".", "path", ".", "join", "(", "'images'", ",", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "self", ".", "infile", ")", ")", "[", "0", "]", "+", "'_%i.png'", ")", "out_map", "=", "{", "os", ".", "path", ".", "basename", "(", "original", ")", ":", "base", "%", "i", "for", "i", ",", "original", "in", "enumerate", "(", "outputs", ")", "}", "for", "original", ",", "final", "in", "six", ".", "iteritems", "(", "out_map", ")", ":", "rst_content", "=", "rst_content", ".", "replace", "(", "original", ",", "final", ")", "with", "open", "(", "rst_file", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "rst_content", ".", "rstrip", "(", ")", "+", "'\\n'", ")", "pictures", "=", "[", "]", "for", "original", "in", "outputs", ":", "fname", "=", "os", ".", "path", ".", "join", "(", "odir", ",", "out_map", "[", "os", ".", "path", ".", "basename", "(", "original", ")", "]", ")", "pictures", ".", "append", "(", "fname", ")", "if", "six", ".", "PY3", ":", "f", "=", "open", "(", "fname", ",", "'w+b'", ")", "else", ":", "f", "=", "open", "(", "fname", ",", "'w'", ")", "f", ".", "write", "(", "resources", "[", "'outputs'", "]", "[", "original", "]", ")", "f", ".", "close", "(", ")", "self", ".", "pictures", "=", "pictures" ]
44.038462
16.346154
def send_signature_request(self, test_mode=False, files=None, file_urls=None, title=None, subject=None, message=None, signing_redirect_url=None, signers=None, cc_email_addresses=None, form_fields_per_document=None, use_text_tags=False, hide_text_tags=False, metadata=None, ux_version=None, allow_decline=False): ''' Creates and sends a new SignatureRequest with the submitted documents Creates and sends a new SignatureRequest with the submitted documents. If form_fields_per_document is not specified, a signature page will be affixed where all signers will be required to add their signature, signifying their agreement to all contained documents. Args: test_mode (bool, optional): Whether this is a test, the signature request will not be legally binding if set to True. Defaults to False. files (list of str): The uploaded file(s) to send for signature file_urls (list of str): URLs of the file for HelloSign to download to send for signature. Use either `files` or `file_urls` title (str, optional): The title you want to assign to the SignatureRequest subject (str, optional): The subject in the email that will be sent to the signers message (str, optional): The custom message in the email that will be sent to the signers signing_redirect_url (str, optional): The URL you want the signer redirected to after they successfully sign. signers (list of dict): A list of signers, which each has the following attributes: name (str): The name of the signer email_address (str): Email address of the signer order (str, optional): The order the signer is required to sign in pin (str, optional): The 4- to 12-character access code that will secure this signer's signature page cc_email_addresses (list, optional): A list of email addresses that should be CC'd form_fields_per_document (str): The fields that should appear on the document, expressed as a serialized JSON data structure which is a list of lists of the form fields. Please refer to the API reference of HelloSign for more details (https://www.hellosign.com/api/reference#SignatureRequest) use_text_tags (bool, optional): Use text tags in the provided file(s) to create form fields hide_text_tags (bool, optional): Hide text tag areas metadata (dict, optional): Metadata to associate with the signature request ux_version (int): UX version, either 1 (default) or 2. allow_decline(bool, optional): Allows signers to decline to sign a document if set to 1. Defaults to 0. Returns: A SignatureRequest object ''' self._check_required_fields({ "signers": signers }, [{ "files": files, "file_urls": file_urls }] ) params = { 'test_mode': test_mode, 'files': files, 'file_urls': file_urls, 'title': title, 'subject': subject, 'message': message, 'signing_redirect_url': signing_redirect_url, 'signers': signers, 'cc_email_addresses': cc_email_addresses, 'form_fields_per_document': form_fields_per_document, 'use_text_tags': use_text_tags, 'hide_text_tags': hide_text_tags, 'metadata': metadata, 'allow_decline': allow_decline } if ux_version is not None: params['ux_version'] = ux_version return self._send_signature_request(**params)
[ "def", "send_signature_request", "(", "self", ",", "test_mode", "=", "False", ",", "files", "=", "None", ",", "file_urls", "=", "None", ",", "title", "=", "None", ",", "subject", "=", "None", ",", "message", "=", "None", ",", "signing_redirect_url", "=", "None", ",", "signers", "=", "None", ",", "cc_email_addresses", "=", "None", ",", "form_fields_per_document", "=", "None", ",", "use_text_tags", "=", "False", ",", "hide_text_tags", "=", "False", ",", "metadata", "=", "None", ",", "ux_version", "=", "None", ",", "allow_decline", "=", "False", ")", ":", "self", ".", "_check_required_fields", "(", "{", "\"signers\"", ":", "signers", "}", ",", "[", "{", "\"files\"", ":", "files", ",", "\"file_urls\"", ":", "file_urls", "}", "]", ")", "params", "=", "{", "'test_mode'", ":", "test_mode", ",", "'files'", ":", "files", ",", "'file_urls'", ":", "file_urls", ",", "'title'", ":", "title", ",", "'subject'", ":", "subject", ",", "'message'", ":", "message", ",", "'signing_redirect_url'", ":", "signing_redirect_url", ",", "'signers'", ":", "signers", ",", "'cc_email_addresses'", ":", "cc_email_addresses", ",", "'form_fields_per_document'", ":", "form_fields_per_document", ",", "'use_text_tags'", ":", "use_text_tags", ",", "'hide_text_tags'", ":", "hide_text_tags", ",", "'metadata'", ":", "metadata", ",", "'allow_decline'", ":", "allow_decline", "}", "if", "ux_version", "is", "not", "None", ":", "params", "[", "'ux_version'", "]", "=", "ux_version", "return", "self", ".", "_send_signature_request", "(", "*", "*", "params", ")" ]
49.227848
41.835443
def getRoom(self, _id): """ Retrieve a room from it's id """ if SockJSRoomHandler._room.has_key(self._gcls() + _id): return SockJSRoomHandler._room[self._gcls() + _id] return None
[ "def", "getRoom", "(", "self", ",", "_id", ")", ":", "if", "SockJSRoomHandler", ".", "_room", ".", "has_key", "(", "self", ".", "_gcls", "(", ")", "+", "_id", ")", ":", "return", "SockJSRoomHandler", ".", "_room", "[", "self", ".", "_gcls", "(", ")", "+", "_id", "]", "return", "None" ]
42.2
16.6
def _gtu8(ins): """ Compares & pops top 2 operands out of the stack, and checks if the 1st operand > 2nd operand (top of the stack). Pushes 0 if False, 1 if True. 8 bit unsigned version """ output = _8bit_oper(ins.quad[2], ins.quad[3], reversed_=True) output.append('cp h') output.append('sbc a, a') output.append('push af') return output
[ "def", "_gtu8", "(", "ins", ")", ":", "output", "=", "_8bit_oper", "(", "ins", ".", "quad", "[", "2", "]", ",", "ins", ".", "quad", "[", "3", "]", ",", "reversed_", "=", "True", ")", "output", ".", "append", "(", "'cp h'", ")", "output", ".", "append", "(", "'sbc a, a'", ")", "output", ".", "append", "(", "'push af'", ")", "return", "output" ]
29.230769
17.230769
def _extract_remote_exception(self, pantsd_pid, nailgun_error): """Given a NailgunError, returns a Terminated exception with additional info (where possible). This method will include the entire exception log for either the `pid` in the NailgunError, or failing that, the `pid` of the pantsd instance. """ sources = [pantsd_pid] if nailgun_error.pid is not None: sources = [abs(nailgun_error.pid)] + sources exception_text = None for source in sources: log_path = ExceptionSink.exceptions_log_path(for_pid=source) exception_text = maybe_read_file(log_path) if exception_text: break exception_suffix = '\nRemote exception:\n{}'.format(exception_text) if exception_text else '' return self.Terminated('abruptly lost active connection to pantsd runner: {!r}{}'.format( nailgun_error, exception_suffix))
[ "def", "_extract_remote_exception", "(", "self", ",", "pantsd_pid", ",", "nailgun_error", ")", ":", "sources", "=", "[", "pantsd_pid", "]", "if", "nailgun_error", ".", "pid", "is", "not", "None", ":", "sources", "=", "[", "abs", "(", "nailgun_error", ".", "pid", ")", "]", "+", "sources", "exception_text", "=", "None", "for", "source", "in", "sources", ":", "log_path", "=", "ExceptionSink", ".", "exceptions_log_path", "(", "for_pid", "=", "source", ")", "exception_text", "=", "maybe_read_file", "(", "log_path", ")", "if", "exception_text", ":", "break", "exception_suffix", "=", "'\\nRemote exception:\\n{}'", ".", "format", "(", "exception_text", ")", "if", "exception_text", "else", "''", "return", "self", ".", "Terminated", "(", "'abruptly lost active connection to pantsd runner: {!r}{}'", ".", "format", "(", "nailgun_error", ",", "exception_suffix", ")", ")" ]
43.05
22.8
def ping(self, id): """ Pings the motor with the specified id. .. note:: The motor id should always be included in [0, 253]. 254 is used for broadcast. """ pp = self._protocol.DxlPingPacket(id) try: self._send_packet(pp, error_handler=None) return True except DxlTimeoutError: return False
[ "def", "ping", "(", "self", ",", "id", ")", ":", "pp", "=", "self", ".", "_protocol", ".", "DxlPingPacket", "(", "id", ")", "try", ":", "self", ".", "_send_packet", "(", "pp", ",", "error_handler", "=", "None", ")", "return", "True", "except", "DxlTimeoutError", ":", "return", "False" ]
28.615385
22.230769
def write(self, fptr): """Write a Palette box to file. """ self._validate(writing=True) bytes_per_row = sum(self.bits_per_component) / 8 bytes_per_palette = bytes_per_row * self.palette.shape[0] box_length = 8 + 3 + self.palette.shape[1] + bytes_per_palette # Write the usual (L, T) header. write_buffer = struct.pack('>I4s', int(box_length), b'pclr') fptr.write(write_buffer) # NE, NPC write_buffer = struct.pack('>HB', self.palette.shape[0], self.palette.shape[1]) fptr.write(write_buffer) # Bits Per Sample. Signed components aren't supported. bps_signed = [x - 1 for x in self.bits_per_component] write_buffer = struct.pack('>' + 'B' * self.palette.shape[1], *bps_signed) fptr.write(write_buffer) # C(i,j) fptr.write(memoryview(self.palette))
[ "def", "write", "(", "self", ",", "fptr", ")", ":", "self", ".", "_validate", "(", "writing", "=", "True", ")", "bytes_per_row", "=", "sum", "(", "self", ".", "bits_per_component", ")", "/", "8", "bytes_per_palette", "=", "bytes_per_row", "*", "self", ".", "palette", ".", "shape", "[", "0", "]", "box_length", "=", "8", "+", "3", "+", "self", ".", "palette", ".", "shape", "[", "1", "]", "+", "bytes_per_palette", "# Write the usual (L, T) header.", "write_buffer", "=", "struct", ".", "pack", "(", "'>I4s'", ",", "int", "(", "box_length", ")", ",", "b'pclr'", ")", "fptr", ".", "write", "(", "write_buffer", ")", "# NE, NPC", "write_buffer", "=", "struct", ".", "pack", "(", "'>HB'", ",", "self", ".", "palette", ".", "shape", "[", "0", "]", ",", "self", ".", "palette", ".", "shape", "[", "1", "]", ")", "fptr", ".", "write", "(", "write_buffer", ")", "# Bits Per Sample. Signed components aren't supported.", "bps_signed", "=", "[", "x", "-", "1", "for", "x", "in", "self", ".", "bits_per_component", "]", "write_buffer", "=", "struct", ".", "pack", "(", "'>'", "+", "'B'", "*", "self", ".", "palette", ".", "shape", "[", "1", "]", ",", "*", "bps_signed", ")", "fptr", ".", "write", "(", "write_buffer", ")", "# C(i,j)", "fptr", ".", "write", "(", "memoryview", "(", "self", ".", "palette", ")", ")" ]
37.64
19.08
def _metrics_get_endpoints(options): """ Determine the start and end dates based on user-supplied options. """ if bool(options.start) ^ bool(options.end): log.error('--start and --end must be specified together') sys.exit(1) if options.start and options.end: start = options.start end = options.end else: end = datetime.utcnow() start = end - timedelta(options.days) return start, end
[ "def", "_metrics_get_endpoints", "(", "options", ")", ":", "if", "bool", "(", "options", ".", "start", ")", "^", "bool", "(", "options", ".", "end", ")", ":", "log", ".", "error", "(", "'--start and --end must be specified together'", ")", "sys", ".", "exit", "(", "1", ")", "if", "options", ".", "start", "and", "options", ".", "end", ":", "start", "=", "options", ".", "start", "end", "=", "options", ".", "end", "else", ":", "end", "=", "datetime", ".", "utcnow", "(", ")", "start", "=", "end", "-", "timedelta", "(", "options", ".", "days", ")", "return", "start", ",", "end" ]
31.5
16.428571
def as_dict(self): """ Returns a dictionary representation of the ChemicalEnvironments object :return: """ return {"@module": self.__class__.__module__, "@class": self.__class__.__name__, "coord_geoms": jsanitize(self.coord_geoms)}
[ "def", "as_dict", "(", "self", ")", ":", "return", "{", "\"@module\"", ":", "self", ".", "__class__", ".", "__module__", ",", "\"@class\"", ":", "self", ".", "__class__", ".", "__name__", ",", "\"coord_geoms\"", ":", "jsanitize", "(", "self", ".", "coord_geoms", ")", "}" ]
37
15.75
def get_rules_from_disk(self): ''' Recursively traverse the yara/rules directory for rules ''' # Try to find the yara rules directory relative to the worker my_dir = os.path.dirname(os.path.realpath(__file__)) yara_rule_path = os.path.join(my_dir, 'yara/rules') if not os.path.exists(yara_rule_path): raise RuntimeError('yara could not find yara rules directory under: %s' % my_dir) # Okay load in all the rules under the yara rule path self.rules = yara.load_rules(rules_rootpath=yara_rule_path) # Save rules to Workbench self.save_rules_to_workbench(self.rules) return self.rules
[ "def", "get_rules_from_disk", "(", "self", ")", ":", "# Try to find the yara rules directory relative to the worker", "my_dir", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "realpath", "(", "__file__", ")", ")", "yara_rule_path", "=", "os", ".", "path", ".", "join", "(", "my_dir", ",", "'yara/rules'", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "yara_rule_path", ")", ":", "raise", "RuntimeError", "(", "'yara could not find yara rules directory under: %s'", "%", "my_dir", ")", "# Okay load in all the rules under the yara rule path", "self", ".", "rules", "=", "yara", ".", "load_rules", "(", "rules_rootpath", "=", "yara_rule_path", ")", "# Save rules to Workbench", "self", ".", "save_rules_to_workbench", "(", "self", ".", "rules", ")", "return", "self", ".", "rules" ]
41.375
25.375
def polyline(*points): """Converts a list of points to a Path composed of lines connecting those points (i.e. a linear spline or polyline). See also `polygon()`.""" return Path(*[Line(points[i], points[i+1]) for i in range(len(points) - 1)])
[ "def", "polyline", "(", "*", "points", ")", ":", "return", "Path", "(", "*", "[", "Line", "(", "points", "[", "i", "]", ",", "points", "[", "i", "+", "1", "]", ")", "for", "i", "in", "range", "(", "len", "(", "points", ")", "-", "1", ")", "]", ")" ]
53.8
7
def energy(q, v): """Compute the kinetic and potential energy of the planetary system""" # Number of points N: int = len(q) # Initialize arrays to zero of the correct size T: np.ndarray = np.zeros(N) U: np.ndarray = np.zeros(N) # Add up kinetic energy of each body for i in range(B): # Kinetic energy is 1/2 mv^2 m = mass[i] vi = v[:, slices[i]] T += 0.5 * m * np.sum(vi * vi, axis=1) # Add up potential energy of each pair of bodies for i in range(B): for j in range(i+1, B): # Masses of these two bodies mi = mass[i] mj = mass[j] # Positions of body i and j qi: np.ndarray = q[:, slices[i]] qj: np.ndarray = q[:, slices[j]] # Potential energy is -G m1 m2 / r dv_ij = qj - qi r_ij = np.linalg.norm(dv_ij, axis=1) U -= G * mi * mj * 1.0 / r_ij # Total energy H = T + U H = T + U return H, T, U
[ "def", "energy", "(", "q", ",", "v", ")", ":", "# Number of points", "N", ":", "int", "=", "len", "(", "q", ")", "# Initialize arrays to zero of the correct size", "T", ":", "np", ".", "ndarray", "=", "np", ".", "zeros", "(", "N", ")", "U", ":", "np", ".", "ndarray", "=", "np", ".", "zeros", "(", "N", ")", "# Add up kinetic energy of each body", "for", "i", "in", "range", "(", "B", ")", ":", "# Kinetic energy is 1/2 mv^2", "m", "=", "mass", "[", "i", "]", "vi", "=", "v", "[", ":", ",", "slices", "[", "i", "]", "]", "T", "+=", "0.5", "*", "m", "*", "np", ".", "sum", "(", "vi", "*", "vi", ",", "axis", "=", "1", ")", "# Add up potential energy of each pair of bodies", "for", "i", "in", "range", "(", "B", ")", ":", "for", "j", "in", "range", "(", "i", "+", "1", ",", "B", ")", ":", "# Masses of these two bodies", "mi", "=", "mass", "[", "i", "]", "mj", "=", "mass", "[", "j", "]", "# Positions of body i and j", "qi", ":", "np", ".", "ndarray", "=", "q", "[", ":", ",", "slices", "[", "i", "]", "]", "qj", ":", "np", ".", "ndarray", "=", "q", "[", ":", ",", "slices", "[", "j", "]", "]", "# Potential energy is -G m1 m2 / r", "dv_ij", "=", "qj", "-", "qi", "r_ij", "=", "np", ".", "linalg", ".", "norm", "(", "dv_ij", ",", "axis", "=", "1", ")", "U", "-=", "G", "*", "mi", "*", "mj", "*", "1.0", "/", "r_ij", "# Total energy H = T + U", "H", "=", "T", "+", "U", "return", "H", ",", "T", ",", "U" ]
27.621622
16.162162
def from_meta(cls, meta, meta_all=None): """Copy DocstringMeta from another instance.""" if len(meta.args) == 2: name = meta.args[1] meta_type = None for x in meta_all: if x.args[1] == name and x.args[0] == 'type': meta_type = x.description break return cls(args=meta.args, description=meta.description, type=meta_type) else: return cls(args=meta.args, description=meta.description)
[ "def", "from_meta", "(", "cls", ",", "meta", ",", "meta_all", "=", "None", ")", ":", "if", "len", "(", "meta", ".", "args", ")", "==", "2", ":", "name", "=", "meta", ".", "args", "[", "1", "]", "meta_type", "=", "None", "for", "x", "in", "meta_all", ":", "if", "x", ".", "args", "[", "1", "]", "==", "name", "and", "x", ".", "args", "[", "0", "]", "==", "'type'", ":", "meta_type", "=", "x", ".", "description", "break", "return", "cls", "(", "args", "=", "meta", ".", "args", ",", "description", "=", "meta", ".", "description", ",", "type", "=", "meta_type", ")", "else", ":", "return", "cls", "(", "args", "=", "meta", ".", "args", ",", "description", "=", "meta", ".", "description", ")" ]
39.307692
16.923077
def read_values(target_usage): """read feature report values""" # browse all devices all_devices = hid.HidDeviceFilter().get_devices() if not all_devices: print("Can't find any non system HID device connected") else: # search for our target usage usage_found = False for device in all_devices: try: device.open() # browse feature reports for report in device.find_feature_reports(): if target_usage in report: # we found our usage report.get() # print result print("The value:", list(report[target_usage])) print("All the report: {0}".format(report.get_raw_data())) usage_found = True finally: device.close() if not usage_found: print("The target device was found, but the requested usage does not exist!\n")
[ "def", "read_values", "(", "target_usage", ")", ":", "# browse all devices\r", "all_devices", "=", "hid", ".", "HidDeviceFilter", "(", ")", ".", "get_devices", "(", ")", "if", "not", "all_devices", ":", "print", "(", "\"Can't find any non system HID device connected\"", ")", "else", ":", "# search for our target usage\r", "usage_found", "=", "False", "for", "device", "in", "all_devices", ":", "try", ":", "device", ".", "open", "(", ")", "# browse feature reports\r", "for", "report", "in", "device", ".", "find_feature_reports", "(", ")", ":", "if", "target_usage", "in", "report", ":", "# we found our usage\r", "report", ".", "get", "(", ")", "# print result\r", "print", "(", "\"The value:\"", ",", "list", "(", "report", "[", "target_usage", "]", ")", ")", "print", "(", "\"All the report: {0}\"", ".", "format", "(", "report", ".", "get_raw_data", "(", ")", ")", ")", "usage_found", "=", "True", "finally", ":", "device", ".", "close", "(", ")", "if", "not", "usage_found", ":", "print", "(", "\"The target device was found, but the requested usage does not exist!\\n\"", ")" ]
39.730769
15.615385
def network_start(name, **kwargs): ''' Start a defined virtual network. :param name: virtual network name :param connection: libvirt connection URI, overriding defaults :param username: username to connect with, overriding defaults :param password: password to connect with, overriding defaults .. versionadded:: 2019.2.0 CLI Example: .. code-block:: bash salt '*' virt.network_start default ''' conn = __get_conn(**kwargs) try: net = conn.networkLookupByName(name) return not bool(net.create()) finally: conn.close()
[ "def", "network_start", "(", "name", ",", "*", "*", "kwargs", ")", ":", "conn", "=", "__get_conn", "(", "*", "*", "kwargs", ")", "try", ":", "net", "=", "conn", ".", "networkLookupByName", "(", "name", ")", "return", "not", "bool", "(", "net", ".", "create", "(", ")", ")", "finally", ":", "conn", ".", "close", "(", ")" ]
25.391304
22
def glsl_type(self): """ GLSL declaration strings required for a variable to hold this data. """ if self.dtype is None: return None dtshape = self.dtype[0].shape n = dtshape[0] if dtshape else 1 if n > 1: dtype = 'vec%d' % n else: dtype = 'float' if 'f' in self.dtype[0].base.kind else 'int' return 'attribute', dtype
[ "def", "glsl_type", "(", "self", ")", ":", "if", "self", ".", "dtype", "is", "None", ":", "return", "None", "dtshape", "=", "self", ".", "dtype", "[", "0", "]", ".", "shape", "n", "=", "dtshape", "[", "0", "]", "if", "dtshape", "else", "1", "if", "n", ">", "1", ":", "dtype", "=", "'vec%d'", "%", "n", "else", ":", "dtype", "=", "'float'", "if", "'f'", "in", "self", ".", "dtype", "[", "0", "]", ".", "base", ".", "kind", "else", "'int'", "return", "'attribute'", ",", "dtype" ]
33.833333
12.333333
def delete_cloud_integration(self, id, **kwargs): # noqa: E501 """Delete a specific cloud integration # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_cloud_integration(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: (required) :return: ResponseContainerCloudIntegration If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_cloud_integration_with_http_info(id, **kwargs) # noqa: E501 else: (data) = self.delete_cloud_integration_with_http_info(id, **kwargs) # noqa: E501 return data
[ "def", "delete_cloud_integration", "(", "self", ",", "id", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "return", "self", ".", "delete_cloud_integration_with_http_info", "(", "id", ",", "*", "*", "kwargs", ")", "# noqa: E501", "else", ":", "(", "data", ")", "=", "self", ".", "delete_cloud_integration_with_http_info", "(", "id", ",", "*", "*", "kwargs", ")", "# noqa: E501", "return", "data" ]
43.238095
20.047619
def close(self): """ Closes connection to database. """ if getattr(self, '_connection', None): logger.debug('Closing postgresql connection.') self._connection.close() self._connection = None if getattr(self, '_engine', None): self._engine.dispose()
[ "def", "close", "(", "self", ")", ":", "if", "getattr", "(", "self", ",", "'_connection'", ",", "None", ")", ":", "logger", ".", "debug", "(", "'Closing postgresql connection.'", ")", "self", ".", "_connection", ".", "close", "(", ")", "self", ".", "_connection", "=", "None", "if", "getattr", "(", "self", ",", "'_engine'", ",", "None", ")", ":", "self", ".", "_engine", ".", "dispose", "(", ")" ]
39.125
8.125
def count(self, **kwargs): """ Performs a COUNT statement on the model's table in the replica database. :param select: Column to be counted. :type select: string :param where: WHERE clause of the SELECT statement. This can be a plain string, a dict or an array. :type where: string, dict, array :param db: Database name from your ``jardin_conf.py``, overrides the default database set in the model declaration. :type db: string :param role: One of ``('master', 'replica')`` to override the default. :type role: string :returns: integer """ if 'select' in kwargs: kwargs['select'] = {'cnt': 'COUNT(%s)' % kwargs['select']} else: kwargs['select'] = {'cnt': 'COUNT(*)'} res = self.db_adapter( db_name=kwargs.get('db'), role=kwargs.get('role', 'replica') ).select(**kwargs) return res.cnt[0]
[ "def", "count", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "'select'", "in", "kwargs", ":", "kwargs", "[", "'select'", "]", "=", "{", "'cnt'", ":", "'COUNT(%s)'", "%", "kwargs", "[", "'select'", "]", "}", "else", ":", "kwargs", "[", "'select'", "]", "=", "{", "'cnt'", ":", "'COUNT(*)'", "}", "res", "=", "self", ".", "db_adapter", "(", "db_name", "=", "kwargs", ".", "get", "(", "'db'", ")", ",", "role", "=", "kwargs", ".", "get", "(", "'role'", ",", "'replica'", ")", ")", ".", "select", "(", "*", "*", "kwargs", ")", "return", "res", ".", "cnt", "[", "0", "]" ]
39.75
21
def _fsync_files(filenames): """Call fsync() a list of file names The filenames should be absolute paths already. """ touched_directories = set() mode = os.O_RDONLY # Windows if hasattr(os, 'O_BINARY'): mode |= os.O_BINARY for filename in filenames: fd = os.open(filename, mode) os.fsync(fd) os.close(fd) touched_directories.add(os.path.dirname(filename)) # Some OSes also require us to fsync the directory where we've # created files or subdirectories. if hasattr(os, 'O_DIRECTORY'): for dirname in touched_directories: fd = os.open(dirname, os.O_RDONLY | os.O_DIRECTORY) os.fsync(fd) os.close(fd)
[ "def", "_fsync_files", "(", "filenames", ")", ":", "touched_directories", "=", "set", "(", ")", "mode", "=", "os", ".", "O_RDONLY", "# Windows", "if", "hasattr", "(", "os", ",", "'O_BINARY'", ")", ":", "mode", "|=", "os", ".", "O_BINARY", "for", "filename", "in", "filenames", ":", "fd", "=", "os", ".", "open", "(", "filename", ",", "mode", ")", "os", ".", "fsync", "(", "fd", ")", "os", ".", "close", "(", "fd", ")", "touched_directories", ".", "add", "(", "os", ".", "path", ".", "dirname", "(", "filename", ")", ")", "# Some OSes also require us to fsync the directory where we've", "# created files or subdirectories.", "if", "hasattr", "(", "os", ",", "'O_DIRECTORY'", ")", ":", "for", "dirname", "in", "touched_directories", ":", "fd", "=", "os", ".", "open", "(", "dirname", ",", "os", ".", "O_RDONLY", "|", "os", ".", "O_DIRECTORY", ")", "os", ".", "fsync", "(", "fd", ")", "os", ".", "close", "(", "fd", ")" ]
26.148148
18.62963
def iso_to_gregorian(iso_year, iso_week, iso_day): "Gregorian calendar date for the given ISO year, week and day" year_start = iso_year_start(iso_year) return year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1)
[ "def", "iso_to_gregorian", "(", "iso_year", ",", "iso_week", ",", "iso_day", ")", ":", "year_start", "=", "iso_year_start", "(", "iso_year", ")", "return", "year_start", "+", "datetime", ".", "timedelta", "(", "days", "=", "iso_day", "-", "1", ",", "weeks", "=", "iso_week", "-", "1", ")" ]
59.25
19.25
def project_from_files( files, func_wrapper=_astroid_wrapper, project_name="no name", black_list=("CVS",) ): """return a Project from a list of files or modules""" # build the project representation astroid_manager = manager.AstroidManager() project = Project(project_name) for something in files: if not os.path.exists(something): fpath = modutils.file_from_modpath(something.split(".")) elif os.path.isdir(something): fpath = os.path.join(something, "__init__.py") else: fpath = something ast = func_wrapper(astroid_manager.ast_from_file, fpath) if ast is None: continue # XXX why is first file defining the project.path ? project.path = project.path or ast.file project.add_module(ast) base_name = ast.name # recurse in package except if __init__ was explicitly given if ast.package and something.find("__init__") == -1: # recurse on others packages / modules if this is a package for fpath in modutils.get_module_files( os.path.dirname(ast.file), black_list ): ast = func_wrapper(astroid_manager.ast_from_file, fpath) if ast is None or ast.name == base_name: continue project.add_module(ast) return project
[ "def", "project_from_files", "(", "files", ",", "func_wrapper", "=", "_astroid_wrapper", ",", "project_name", "=", "\"no name\"", ",", "black_list", "=", "(", "\"CVS\"", ",", ")", ")", ":", "# build the project representation", "astroid_manager", "=", "manager", ".", "AstroidManager", "(", ")", "project", "=", "Project", "(", "project_name", ")", "for", "something", "in", "files", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "something", ")", ":", "fpath", "=", "modutils", ".", "file_from_modpath", "(", "something", ".", "split", "(", "\".\"", ")", ")", "elif", "os", ".", "path", ".", "isdir", "(", "something", ")", ":", "fpath", "=", "os", ".", "path", ".", "join", "(", "something", ",", "\"__init__.py\"", ")", "else", ":", "fpath", "=", "something", "ast", "=", "func_wrapper", "(", "astroid_manager", ".", "ast_from_file", ",", "fpath", ")", "if", "ast", "is", "None", ":", "continue", "# XXX why is first file defining the project.path ?", "project", ".", "path", "=", "project", ".", "path", "or", "ast", ".", "file", "project", ".", "add_module", "(", "ast", ")", "base_name", "=", "ast", ".", "name", "# recurse in package except if __init__ was explicitly given", "if", "ast", ".", "package", "and", "something", ".", "find", "(", "\"__init__\"", ")", "==", "-", "1", ":", "# recurse on others packages / modules if this is a package", "for", "fpath", "in", "modutils", ".", "get_module_files", "(", "os", ".", "path", ".", "dirname", "(", "ast", ".", "file", ")", ",", "black_list", ")", ":", "ast", "=", "func_wrapper", "(", "astroid_manager", ".", "ast_from_file", ",", "fpath", ")", "if", "ast", "is", "None", "or", "ast", ".", "name", "==", "base_name", ":", "continue", "project", ".", "add_module", "(", "ast", ")", "return", "project" ]
42.59375
16.65625
def _step(self, model: TrainingModel, batch: mx.io.DataBatch, checkpoint_interval: int, metric_train: mx.metric.EvalMetric, metric_loss: Optional[mx.metric.EvalMetric] = None): """ Performs an update to model given a batch and updates metrics. """ if model.monitor is not None: model.monitor.tic() #################### # Forward & Backward #################### model.run_forward_backward(batch, metric_train) # If using an extended optimizer, provide extra state information about the current batch optimizer = model.optimizer if metric_loss is not None and isinstance(optimizer, SockeyeOptimizer): # Loss for this batch metric_loss.reset() metric_loss.update(batch.label, model.module.get_outputs()) [(_, m_val)] = metric_loss.get_name_value() batch_state = BatchState(metric_val=m_val) optimizer.pre_update_batch(batch_state) ######## # UPDATE ######## if self.update_interval == 1 or self.state.batches % self.update_interval == 0: # Gradient rescaling gradient_norm = None if self.state.updates > 0 and (self.state.updates + 1) % checkpoint_interval == 0: # compute values for logging to metrics (before rescaling...) gradient_norm = self.state.gradient_norm = model.get_global_gradient_norm() self.state.gradients = model.get_gradients() # note: C.GRADIENT_CLIPPING_TYPE_ABS is handled by the mxnet optimizer directly if self.optimizer_config.gradient_clipping_type == C.GRADIENT_CLIPPING_TYPE_NORM: if gradient_norm is None: gradient_norm = model.get_global_gradient_norm() # clip gradients if gradient_norm > self.optimizer_config.gradient_clipping_threshold: ratio = self.optimizer_config.gradient_clipping_threshold / gradient_norm model.rescale_gradients(ratio) model.update() if self.update_interval > 1: model.zero_gradients() self.state.updates += 1 if model.monitor is not None: results = model.monitor.toc() if results: for _, k, v in results: logger.info('Monitor: Batch [{:d}] {:s} {:s}'.format(self.state.updates, k, v))
[ "def", "_step", "(", "self", ",", "model", ":", "TrainingModel", ",", "batch", ":", "mx", ".", "io", ".", "DataBatch", ",", "checkpoint_interval", ":", "int", ",", "metric_train", ":", "mx", ".", "metric", ".", "EvalMetric", ",", "metric_loss", ":", "Optional", "[", "mx", ".", "metric", ".", "EvalMetric", "]", "=", "None", ")", ":", "if", "model", ".", "monitor", "is", "not", "None", ":", "model", ".", "monitor", ".", "tic", "(", ")", "####################", "# Forward & Backward", "####################", "model", ".", "run_forward_backward", "(", "batch", ",", "metric_train", ")", "# If using an extended optimizer, provide extra state information about the current batch", "optimizer", "=", "model", ".", "optimizer", "if", "metric_loss", "is", "not", "None", "and", "isinstance", "(", "optimizer", ",", "SockeyeOptimizer", ")", ":", "# Loss for this batch", "metric_loss", ".", "reset", "(", ")", "metric_loss", ".", "update", "(", "batch", ".", "label", ",", "model", ".", "module", ".", "get_outputs", "(", ")", ")", "[", "(", "_", ",", "m_val", ")", "]", "=", "metric_loss", ".", "get_name_value", "(", ")", "batch_state", "=", "BatchState", "(", "metric_val", "=", "m_val", ")", "optimizer", ".", "pre_update_batch", "(", "batch_state", ")", "########", "# UPDATE", "########", "if", "self", ".", "update_interval", "==", "1", "or", "self", ".", "state", ".", "batches", "%", "self", ".", "update_interval", "==", "0", ":", "# Gradient rescaling", "gradient_norm", "=", "None", "if", "self", ".", "state", ".", "updates", ">", "0", "and", "(", "self", ".", "state", ".", "updates", "+", "1", ")", "%", "checkpoint_interval", "==", "0", ":", "# compute values for logging to metrics (before rescaling...)", "gradient_norm", "=", "self", ".", "state", ".", "gradient_norm", "=", "model", ".", "get_global_gradient_norm", "(", ")", "self", ".", "state", ".", "gradients", "=", "model", ".", "get_gradients", "(", ")", "# note: C.GRADIENT_CLIPPING_TYPE_ABS is handled by the mxnet optimizer directly", "if", "self", ".", "optimizer_config", ".", "gradient_clipping_type", "==", "C", ".", "GRADIENT_CLIPPING_TYPE_NORM", ":", "if", "gradient_norm", "is", "None", ":", "gradient_norm", "=", "model", ".", "get_global_gradient_norm", "(", ")", "# clip gradients", "if", "gradient_norm", ">", "self", ".", "optimizer_config", ".", "gradient_clipping_threshold", ":", "ratio", "=", "self", ".", "optimizer_config", ".", "gradient_clipping_threshold", "/", "gradient_norm", "model", ".", "rescale_gradients", "(", "ratio", ")", "model", ".", "update", "(", ")", "if", "self", ".", "update_interval", ">", "1", ":", "model", ".", "zero_gradients", "(", ")", "self", ".", "state", ".", "updates", "+=", "1", "if", "model", ".", "monitor", "is", "not", "None", ":", "results", "=", "model", ".", "monitor", ".", "toc", "(", ")", "if", "results", ":", "for", "_", ",", "k", ",", "v", "in", "results", ":", "logger", ".", "info", "(", "'Monitor: Batch [{:d}] {:s} {:s}'", ".", "format", "(", "self", ".", "state", ".", "updates", ",", "k", ",", "v", ")", ")" ]
40.95082
22.918033
def get_integrator(integrator): """Return the scipy.integrator indicated by an index, name, or integrator_function >> get_integrator(0) """ integrator_types = set(['trapz', 'cumtrapz', 'simps', 'romb']) integrator_funcs = [integrate.trapz, integrate.cumtrapz, integrate.simps, integrate.romb] if isinstance(integrator, int) and 0 <= integrator < len(integrator_types): integrator = integrator_types[integrator] if isinstance(integrator, basestring) and integrator in integrator_types: return getattr(integrate, integrator) elif integrator in integrator_funcs: return integrator else: print('Unsupported integration rule: {0}'.format(integrator)) print('Expecting one of these sample-based integration rules: %s' % (str(list(integrator_types)))) raise AttributeError return integrator
[ "def", "get_integrator", "(", "integrator", ")", ":", "integrator_types", "=", "set", "(", "[", "'trapz'", ",", "'cumtrapz'", ",", "'simps'", ",", "'romb'", "]", ")", "integrator_funcs", "=", "[", "integrate", ".", "trapz", ",", "integrate", ".", "cumtrapz", ",", "integrate", ".", "simps", ",", "integrate", ".", "romb", "]", "if", "isinstance", "(", "integrator", ",", "int", ")", "and", "0", "<=", "integrator", "<", "len", "(", "integrator_types", ")", ":", "integrator", "=", "integrator_types", "[", "integrator", "]", "if", "isinstance", "(", "integrator", ",", "basestring", ")", "and", "integrator", "in", "integrator_types", ":", "return", "getattr", "(", "integrate", ",", "integrator", ")", "elif", "integrator", "in", "integrator_funcs", ":", "return", "integrator", "else", ":", "print", "(", "'Unsupported integration rule: {0}'", ".", "format", "(", "integrator", ")", ")", "print", "(", "'Expecting one of these sample-based integration rules: %s'", "%", "(", "str", "(", "list", "(", "integrator_types", ")", ")", ")", ")", "raise", "AttributeError", "return", "integrator" ]
45
23.473684
def get_section_metrics(cls): """ Get the mapping between metrics and sections in Manuscripts report :return: a dict with the mapping between metrics and sections in Manuscripts report """ return { "overview": { "activity_metrics": [Commits], "author_metrics": [Authors], "bmi_metrics": [], "time_to_close_metrics": [], "projects_metrics": [Projects] }, "com_channels": { "activity_metrics": [], "author_metrics": [] }, "project_activity": { # TODO: Authors is not activity but we need two metrics here "metrics": [Commits, Authors] }, "project_community": { "author_metrics": [Authors], "people_top_metrics": [Authors], "orgs_top_metrics": [Organizations], }, "project_process": { "bmi_metrics": [], "time_to_close_metrics": [], "time_to_close_title": "", "time_to_close_review_metrics": [], "time_to_close_review_title": "", "patchsets_metrics": [] } }
[ "def", "get_section_metrics", "(", "cls", ")", ":", "return", "{", "\"overview\"", ":", "{", "\"activity_metrics\"", ":", "[", "Commits", "]", ",", "\"author_metrics\"", ":", "[", "Authors", "]", ",", "\"bmi_metrics\"", ":", "[", "]", ",", "\"time_to_close_metrics\"", ":", "[", "]", ",", "\"projects_metrics\"", ":", "[", "Projects", "]", "}", ",", "\"com_channels\"", ":", "{", "\"activity_metrics\"", ":", "[", "]", ",", "\"author_metrics\"", ":", "[", "]", "}", ",", "\"project_activity\"", ":", "{", "# TODO: Authors is not activity but we need two metrics here", "\"metrics\"", ":", "[", "Commits", ",", "Authors", "]", "}", ",", "\"project_community\"", ":", "{", "\"author_metrics\"", ":", "[", "Authors", "]", ",", "\"people_top_metrics\"", ":", "[", "Authors", "]", ",", "\"orgs_top_metrics\"", ":", "[", "Organizations", "]", ",", "}", ",", "\"project_process\"", ":", "{", "\"bmi_metrics\"", ":", "[", "]", ",", "\"time_to_close_metrics\"", ":", "[", "]", ",", "\"time_to_close_title\"", ":", "\"\"", ",", "\"time_to_close_review_metrics\"", ":", "[", "]", ",", "\"time_to_close_review_title\"", ":", "\"\"", ",", "\"patchsets_metrics\"", ":", "[", "]", "}", "}" ]
35.444444
13.833333
def cielab_to_msh (cielab): """Convert CIE L*a*b* to Moreland's Msh colorspace. *cielab* should be of shape (*, 3). Return value will have same shape. """ msh = np.empty_like (cielab) msh[...,M] = np.sqrt ((cielab**2).sum (axis=-1)) msh[...,S] = np.arccos (cielab[...,L] / msh[...,M]) msh[...,H] = np.arctan2 (cielab[...,B], cielab[...,A]) return msh
[ "def", "cielab_to_msh", "(", "cielab", ")", ":", "msh", "=", "np", ".", "empty_like", "(", "cielab", ")", "msh", "[", "...", ",", "M", "]", "=", "np", ".", "sqrt", "(", "(", "cielab", "**", "2", ")", ".", "sum", "(", "axis", "=", "-", "1", ")", ")", "msh", "[", "...", ",", "S", "]", "=", "np", ".", "arccos", "(", "cielab", "[", "...", ",", "L", "]", "/", "msh", "[", "...", ",", "M", "]", ")", "msh", "[", "...", ",", "H", "]", "=", "np", ".", "arctan2", "(", "cielab", "[", "...", ",", "B", "]", ",", "cielab", "[", "...", ",", "A", "]", ")", "return", "msh" ]
29
16.538462
def add_conditional_clause(self, clause): """ Adds a iff clause to this statement :param clause: The clause that will be added to the iff statement :type clause: ConditionalClause """ clause.set_context_id(self.context_counter) self.context_counter += clause.get_context_size() self.conditionals.append(clause)
[ "def", "add_conditional_clause", "(", "self", ",", "clause", ")", ":", "clause", ".", "set_context_id", "(", "self", ".", "context_counter", ")", "self", ".", "context_counter", "+=", "clause", ".", "get_context_size", "(", ")", "self", ".", "conditionals", ".", "append", "(", "clause", ")" ]
36.6
10.6
def _wrap_value_with_context(self, tokens: List[Token], start: int, end: int) -> Extraction: """Wraps the final result""" return Extraction(' '.join([x.orth_ if isinstance(x, Token) else x for x in tokens[start:end]]), self.name, start_token=start, end_token=end, start_char=tokens[start].idx if isinstance(tokens[start], Token) else -1, end_char=tokens[end - 1].idx + len(tokens[end - 1].orth_) if isinstance(tokens[end - 1], Token) else -1 )
[ "def", "_wrap_value_with_context", "(", "self", ",", "tokens", ":", "List", "[", "Token", "]", ",", "start", ":", "int", ",", "end", ":", "int", ")", "->", "Extraction", ":", "return", "Extraction", "(", "' '", ".", "join", "(", "[", "x", ".", "orth_", "if", "isinstance", "(", "x", ",", "Token", ")", "else", "x", "for", "x", "in", "tokens", "[", "start", ":", "end", "]", "]", ")", ",", "self", ".", "name", ",", "start_token", "=", "start", ",", "end_token", "=", "end", ",", "start_char", "=", "tokens", "[", "start", "]", ".", "idx", "if", "isinstance", "(", "tokens", "[", "start", "]", ",", "Token", ")", "else", "-", "1", ",", "end_char", "=", "tokens", "[", "end", "-", "1", "]", ".", "idx", "+", "len", "(", "tokens", "[", "end", "-", "1", "]", ".", "orth_", ")", "if", "isinstance", "(", "tokens", "[", "end", "-", "1", "]", ",", "Token", ")", "else", "-", "1", ")" ]
70.4
34.2
def angle(v1, v2): """Return the angle in radians between vectors 'v1' and 'v2'.""" v1_u = unit_vector(v1) v2_u = unit_vector(v2) return np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0))
[ "def", "angle", "(", "v1", ",", "v2", ")", ":", "v1_u", "=", "unit_vector", "(", "v1", ")", "v2_u", "=", "unit_vector", "(", "v2", ")", "return", "np", ".", "arccos", "(", "np", ".", "clip", "(", "np", ".", "dot", "(", "v1_u", ",", "v2_u", ")", ",", "-", "1.0", ",", "1.0", ")", ")" ]
39.6
14
def send(self, request, socket, context, *args): """ When an event is sent, run all relevant handlers. Relevant handlers are those without a channel pattern when the given socket is not subscribed to any particular channel, or the handlers with a channel pattern that matches any of the channels that the given socket is subscribed to. In the case of subscribe/unsubscribe, match the channel arg being sent to the channel pattern. """ for handler, pattern in self.handlers: no_channel = not pattern and not socket.channels if self.name.endswith("subscribe") and pattern: matches = [pattern.match(args[0])] else: matches = [pattern.match(c) for c in socket.channels if pattern] if no_channel or filter(None, matches): handler(request, socket, context, *args)
[ "def", "send", "(", "self", ",", "request", ",", "socket", ",", "context", ",", "*", "args", ")", ":", "for", "handler", ",", "pattern", "in", "self", ".", "handlers", ":", "no_channel", "=", "not", "pattern", "and", "not", "socket", ".", "channels", "if", "self", ".", "name", ".", "endswith", "(", "\"subscribe\"", ")", "and", "pattern", ":", "matches", "=", "[", "pattern", ".", "match", "(", "args", "[", "0", "]", ")", "]", "else", ":", "matches", "=", "[", "pattern", ".", "match", "(", "c", ")", "for", "c", "in", "socket", ".", "channels", "if", "pattern", "]", "if", "no_channel", "or", "filter", "(", "None", ",", "matches", ")", ":", "handler", "(", "request", ",", "socket", ",", "context", ",", "*", "args", ")" ]
48.210526
17.894737
def send_spyder_msg(self, spyder_msg_type, content=None, data=None): """ Publish custom messages to the Spyder frontend. Parameters ---------- spyder_msg_type: str The spyder message type content: dict The (JSONable) content of the message data: any Any object that is serializable by cloudpickle (should be most things). Will arrive as cloudpickled bytes in `.buffers[0]`. """ import cloudpickle if content is None: content = {} content['spyder_msg_type'] = spyder_msg_type msg = self.session.send( self.iopub_socket, 'spyder_msg', content=content, buffers=[cloudpickle.dumps(data, protocol=PICKLE_PROTOCOL)], parent=self._parent_header, ) self.log.debug(msg)
[ "def", "send_spyder_msg", "(", "self", ",", "spyder_msg_type", ",", "content", "=", "None", ",", "data", "=", "None", ")", ":", "import", "cloudpickle", "if", "content", "is", "None", ":", "content", "=", "{", "}", "content", "[", "'spyder_msg_type'", "]", "=", "spyder_msg_type", "msg", "=", "self", ".", "session", ".", "send", "(", "self", ".", "iopub_socket", ",", "'spyder_msg'", ",", "content", "=", "content", ",", "buffers", "=", "[", "cloudpickle", ".", "dumps", "(", "data", ",", "protocol", "=", "PICKLE_PROTOCOL", ")", "]", ",", "parent", "=", "self", ".", "_parent_header", ",", ")", "self", ".", "log", ".", "debug", "(", "msg", ")" ]
31
18.5
def _index_keys_for(self, idx_name, *ids_and_fcs): '''Returns a generator of index triples. Returns a generator of index keys for the ``ids_and_fcs`` pairs given. The index keys have the form ``(idx_name, idx_val, content_id)``. :type idx_name: unicode :type ids_and_fcs: ``[(content_id, FeatureCollection)]`` :rtype: generator of ``(str, str, str)`` ''' idx = self._index(idx_name) icreate, itrans = idx['create'], idx['transform'] if isinstance(idx_name, unicode): idx_name = idx_name.encode('utf-8') for cid_fc in ids_and_fcs: content_id = cid_fc[0] # Be sure to dedup index_values or else we may # suffer duplicate_pkey errors down the line. seen_values = set() for index_value in icreate(itrans, cid_fc): if index_value and index_value not in seen_values: yield (index_value, idx_name, content_id) seen_values.add(index_value)
[ "def", "_index_keys_for", "(", "self", ",", "idx_name", ",", "*", "ids_and_fcs", ")", ":", "idx", "=", "self", ".", "_index", "(", "idx_name", ")", "icreate", ",", "itrans", "=", "idx", "[", "'create'", "]", ",", "idx", "[", "'transform'", "]", "if", "isinstance", "(", "idx_name", ",", "unicode", ")", ":", "idx_name", "=", "idx_name", ".", "encode", "(", "'utf-8'", ")", "for", "cid_fc", "in", "ids_and_fcs", ":", "content_id", "=", "cid_fc", "[", "0", "]", "# Be sure to dedup index_values or else we may", "# suffer duplicate_pkey errors down the line.", "seen_values", "=", "set", "(", ")", "for", "index_value", "in", "icreate", "(", "itrans", ",", "cid_fc", ")", ":", "if", "index_value", "and", "index_value", "not", "in", "seen_values", ":", "yield", "(", "index_value", ",", "idx_name", ",", "content_id", ")", "seen_values", ".", "add", "(", "index_value", ")" ]
39.769231
18.384615
def optimal_reroot(self, force_positive=True, slope=None): """ determine the best root and reroot the tree to this value. Note that this can change the parent child relations of the tree and values associated with branches rather than nodes (e.g. confidence) might need to be re-evaluated afterwards Parameters ---------- force_positive : bool, optional if True, the search for a root will only consider positive rate estimates slope : float, optional if given, it will find the optimal root given a fixed rate. If slope==0, this corresponds to minimal root-to-tip variance rooting (min_dev) Returns ------- dict regression parameters """ best_root = self.find_best_root(force_positive=force_positive, slope=slope) best_node = best_root["node"] x = best_root["split"] if x<1e-5: new_node = best_node elif x>1.0-1e-5: new_node = best_node.up else: # create new node in the branch and root the tree to it new_node = Phylo.BaseTree.Clade() # insert the new node in the middle of the branch # by simple re-wiring the links on the both sides of the branch # and fix the branch lengths new_node.branch_length = best_node.branch_length*(1-x) new_node.up = best_node.up new_node.clades = [best_node] new_node.up.clades = [k if k!=best_node else new_node for k in best_node.up.clades] best_node.branch_length *= x best_node.up = new_node new_node.rtt_regression = best_root self.tree.root_with_outgroup(new_node) self.tree.ladderize() for n in self.tree.get_nonterminals(order='postorder'): for c in n: c.up=n return best_root
[ "def", "optimal_reroot", "(", "self", ",", "force_positive", "=", "True", ",", "slope", "=", "None", ")", ":", "best_root", "=", "self", ".", "find_best_root", "(", "force_positive", "=", "force_positive", ",", "slope", "=", "slope", ")", "best_node", "=", "best_root", "[", "\"node\"", "]", "x", "=", "best_root", "[", "\"split\"", "]", "if", "x", "<", "1e-5", ":", "new_node", "=", "best_node", "elif", "x", ">", "1.0", "-", "1e-5", ":", "new_node", "=", "best_node", ".", "up", "else", ":", "# create new node in the branch and root the tree to it", "new_node", "=", "Phylo", ".", "BaseTree", ".", "Clade", "(", ")", "# insert the new node in the middle of the branch", "# by simple re-wiring the links on the both sides of the branch", "# and fix the branch lengths", "new_node", ".", "branch_length", "=", "best_node", ".", "branch_length", "*", "(", "1", "-", "x", ")", "new_node", ".", "up", "=", "best_node", ".", "up", "new_node", ".", "clades", "=", "[", "best_node", "]", "new_node", ".", "up", ".", "clades", "=", "[", "k", "if", "k", "!=", "best_node", "else", "new_node", "for", "k", "in", "best_node", ".", "up", ".", "clades", "]", "best_node", ".", "branch_length", "*=", "x", "best_node", ".", "up", "=", "new_node", "new_node", ".", "rtt_regression", "=", "best_root", "self", ".", "tree", ".", "root_with_outgroup", "(", "new_node", ")", "self", ".", "tree", ".", "ladderize", "(", ")", "for", "n", "in", "self", ".", "tree", ".", "get_nonterminals", "(", "order", "=", "'postorder'", ")", ":", "for", "c", "in", "n", ":", "c", ".", "up", "=", "n", "return", "best_root" ]
36.433962
20.886792
def broadcast_event(self, event, *args): """ This is sent to all in the sockets in this particular Namespace, including itself. """ pkt = dict(type="event", name=event, args=args, endpoint=self.ns_name) for sessid, socket in six.iteritems(self.socket.server.sockets): socket.send_packet(pkt)
[ "def", "broadcast_event", "(", "self", ",", "event", ",", "*", "args", ")", ":", "pkt", "=", "dict", "(", "type", "=", "\"event\"", ",", "name", "=", "event", ",", "args", "=", "args", ",", "endpoint", "=", "self", ".", "ns_name", ")", "for", "sessid", ",", "socket", "in", "six", ".", "iteritems", "(", "self", ".", "socket", ".", "server", ".", "sockets", ")", ":", "socket", ".", "send_packet", "(", "pkt", ")" ]
33.166667
12.833333
def get(self, split_id: str) -> Split: """ load transaction by id """ query = ( self.query .filter(Split.guid == split_id) ) return query.one()
[ "def", "get", "(", "self", ",", "split_id", ":", "str", ")", "->", "Split", ":", "query", "=", "(", "self", ".", "query", ".", "filter", "(", "Split", ".", "guid", "==", "split_id", ")", ")", "return", "query", ".", "one", "(", ")" ]
27.571429
13
def _merge_wf_outputs(new, cur, parallel): """Merge outputs for a sub-workflow, replacing variables changed in later steps. ignore_ids are those used internally in a sub-workflow but not exposed to subsequent steps """ new_ids = set([]) out = [] for v in new: outv = {} outv["source"] = v["id"] outv["id"] = "%s" % get_base_id(v["id"]) outv["type"] = v["type"] if "secondaryFiles" in v: outv["secondaryFiles"] = v["secondaryFiles"] if tz.get_in(["outputBinding", "secondaryFiles"], v): outv["secondaryFiles"] = tz.get_in(["outputBinding", "secondaryFiles"], v) new_ids.add(outv["id"]) out.append(outv) for outv in cur: if outv["id"] not in new_ids: out.append(outv) return out
[ "def", "_merge_wf_outputs", "(", "new", ",", "cur", ",", "parallel", ")", ":", "new_ids", "=", "set", "(", "[", "]", ")", "out", "=", "[", "]", "for", "v", "in", "new", ":", "outv", "=", "{", "}", "outv", "[", "\"source\"", "]", "=", "v", "[", "\"id\"", "]", "outv", "[", "\"id\"", "]", "=", "\"%s\"", "%", "get_base_id", "(", "v", "[", "\"id\"", "]", ")", "outv", "[", "\"type\"", "]", "=", "v", "[", "\"type\"", "]", "if", "\"secondaryFiles\"", "in", "v", ":", "outv", "[", "\"secondaryFiles\"", "]", "=", "v", "[", "\"secondaryFiles\"", "]", "if", "tz", ".", "get_in", "(", "[", "\"outputBinding\"", ",", "\"secondaryFiles\"", "]", ",", "v", ")", ":", "outv", "[", "\"secondaryFiles\"", "]", "=", "tz", ".", "get_in", "(", "[", "\"outputBinding\"", ",", "\"secondaryFiles\"", "]", ",", "v", ")", "new_ids", ".", "add", "(", "outv", "[", "\"id\"", "]", ")", "out", ".", "append", "(", "outv", ")", "for", "outv", "in", "cur", ":", "if", "outv", "[", "\"id\"", "]", "not", "in", "new_ids", ":", "out", ".", "append", "(", "outv", ")", "return", "out" ]
36.181818
17.681818
def main(): """The main entry """ args = getArguments() try: if args.action == 'init': # Ask for init while True: print 'Initialize the path [%s] will cause any files or dirs be removed, continue?[y/n]' % args.basePath, text = raw_input() if text.lower() == 'n': print 'Will not initialize the path, exit' return 1 elif text.lower() == 'y': # Initialize manager = CertificateManager(args.basePath) manager.init() return 0 else: print 'Invalid input' elif args.action == 'verify': # Verify the certificate manager = CertificateManager(args.basePath) manager.verifyCertificate(args.name) return 0 elif args.action == 'createRootCert': # Create the root cert manager = CertificateManager(args.basePath) manager.createRootCertificate(args.noPass, int(args.keyLength)) return 0 elif args.action == 'createServerCert': # Create the server cert manager = CertificateManager(args.basePath) manager.createServerCertificate(args.name, not args.usePass, args.keyLength, args.days) return 0 elif args.action == 'createClientCert': # Create the client cert manager = CertificateManager(args.basePath) manager.createClientCertificate(args.name, not args.usePass, args.keyLength, args.days) return 0 else: logger.error('Unknown argument action [%s]', args.action) return 1 except ValueError as error: logger.error(error.message) return 1 except KeyboardInterrupt: logger.error('User interrupted') return 1 except: logger.exception('Unhandled exception occurred') return 1
[ "def", "main", "(", ")", ":", "args", "=", "getArguments", "(", ")", "try", ":", "if", "args", ".", "action", "==", "'init'", ":", "# Ask for init", "while", "True", ":", "print", "'Initialize the path [%s] will cause any files or dirs be removed, continue?[y/n]'", "%", "args", ".", "basePath", ",", "text", "=", "raw_input", "(", ")", "if", "text", ".", "lower", "(", ")", "==", "'n'", ":", "print", "'Will not initialize the path, exit'", "return", "1", "elif", "text", ".", "lower", "(", ")", "==", "'y'", ":", "# Initialize", "manager", "=", "CertificateManager", "(", "args", ".", "basePath", ")", "manager", ".", "init", "(", ")", "return", "0", "else", ":", "print", "'Invalid input'", "elif", "args", ".", "action", "==", "'verify'", ":", "# Verify the certificate", "manager", "=", "CertificateManager", "(", "args", ".", "basePath", ")", "manager", ".", "verifyCertificate", "(", "args", ".", "name", ")", "return", "0", "elif", "args", ".", "action", "==", "'createRootCert'", ":", "# Create the root cert", "manager", "=", "CertificateManager", "(", "args", ".", "basePath", ")", "manager", ".", "createRootCertificate", "(", "args", ".", "noPass", ",", "int", "(", "args", ".", "keyLength", ")", ")", "return", "0", "elif", "args", ".", "action", "==", "'createServerCert'", ":", "# Create the server cert", "manager", "=", "CertificateManager", "(", "args", ".", "basePath", ")", "manager", ".", "createServerCertificate", "(", "args", ".", "name", ",", "not", "args", ".", "usePass", ",", "args", ".", "keyLength", ",", "args", ".", "days", ")", "return", "0", "elif", "args", ".", "action", "==", "'createClientCert'", ":", "# Create the client cert", "manager", "=", "CertificateManager", "(", "args", ".", "basePath", ")", "manager", ".", "createClientCertificate", "(", "args", ".", "name", ",", "not", "args", ".", "usePass", ",", "args", ".", "keyLength", ",", "args", ".", "days", ")", "return", "0", "else", ":", "logger", ".", "error", "(", "'Unknown argument action [%s]'", ",", "args", ".", "action", ")", "return", "1", "except", "ValueError", "as", "error", ":", "logger", ".", "error", "(", "error", ".", "message", ")", "return", "1", "except", "KeyboardInterrupt", ":", "logger", ".", "error", "(", "'User interrupted'", ")", "return", "1", "except", ":", "logger", ".", "exception", "(", "'Unhandled exception occurred'", ")", "return", "1" ]
38.25
16.634615
def get_application(self, id=None, name=None): """ Get application object by name or id. """ log.info("Picking application: %s (%s)" % (name, id)) return self.applications[id or name]
[ "def", "get_application", "(", "self", ",", "id", "=", "None", ",", "name", "=", "None", ")", ":", "log", ".", "info", "(", "\"Picking application: %s (%s)\"", "%", "(", "name", ",", "id", ")", ")", "return", "self", ".", "applications", "[", "id", "or", "name", "]" ]
42.2
6.2
def GetCellStyle (self, column, row = None): """ get the "style" of a cell note: this merges default, column, row and the cell's style into the actual style for you returning a dictionary of styles The values directly from excel with the relavent XML element appended to the name. Examples of style key/value pairs --key--------------------value----------------------- Alignment_ss:Vertical : Bottom Font_ss:Color : #0000FF Font_x:Family : Decorative Font_ss:Size : 11 Font_ss:FontName : Algerian Interior_ss:Pattern : Solid Interior_ss:Color : #808000 """ if row == None: (row, column) = ParseCellSpec(column) style = { } # get style for cell if row in self: merge(style, self[row].GetCellStyle(column)) # merge with row style if row in self.rowStyles: merge(style, self.rowStyles[row]) # merge with column style if isinstance(column, str): column = ColumnToIndex(column) if column in self.columnStyles: merge(style, self.columnStyles[column]) # merge with default style merge(style, self.defaultStyle) return style
[ "def", "GetCellStyle", "(", "self", ",", "column", ",", "row", "=", "None", ")", ":", "if", "row", "==", "None", ":", "(", "row", ",", "column", ")", "=", "ParseCellSpec", "(", "column", ")", "style", "=", "{", "}", "# get style for cell\r", "if", "row", "in", "self", ":", "merge", "(", "style", ",", "self", "[", "row", "]", ".", "GetCellStyle", "(", "column", ")", ")", "# merge with row style\r", "if", "row", "in", "self", ".", "rowStyles", ":", "merge", "(", "style", ",", "self", ".", "rowStyles", "[", "row", "]", ")", "# merge with column style\r", "if", "isinstance", "(", "column", ",", "str", ")", ":", "column", "=", "ColumnToIndex", "(", "column", ")", "if", "column", "in", "self", ".", "columnStyles", ":", "merge", "(", "style", ",", "self", ".", "columnStyles", "[", "column", "]", ")", "# merge with default style\r", "merge", "(", "style", ",", "self", ".", "defaultStyle", ")", "return", "style" ]
52.935484
18.741935
def browserify_file(entry_point, output_file, babelify=False, export_as=None): """ Browserify a single javascript entry point plus non-external dependencies into a single javascript file. Generates source maps in debug mode. Minifies the output in release mode. By default, it is not possible to ``require()`` any exports from the entry point or included files. If ``export_as`` is specified, any module exports in the specified entry point are exposed for ``require()`` with the name specified by ``export_as``. """ from .modules import browserify if not isinstance(entry_point, str): raise RuntimeError('Browserify File compiler takes a single entry point as input.') return { 'dependencies_fn': browserify.browserify_deps_file, 'compiler_fn': browserify.browserify_compile_file, 'input': entry_point, 'output': output_file, 'kwargs': { 'babelify': babelify, 'export_as': export_as, }, }
[ "def", "browserify_file", "(", "entry_point", ",", "output_file", ",", "babelify", "=", "False", ",", "export_as", "=", "None", ")", ":", "from", ".", "modules", "import", "browserify", "if", "not", "isinstance", "(", "entry_point", ",", "str", ")", ":", "raise", "RuntimeError", "(", "'Browserify File compiler takes a single entry point as input.'", ")", "return", "{", "'dependencies_fn'", ":", "browserify", ".", "browserify_deps_file", ",", "'compiler_fn'", ":", "browserify", ".", "browserify_compile_file", ",", "'input'", ":", "entry_point", ",", "'output'", ":", "output_file", ",", "'kwargs'", ":", "{", "'babelify'", ":", "babelify", ",", "'export_as'", ":", "export_as", ",", "}", ",", "}" ]
38.423077
22.192308
def _generate_ffmpeg_cmd( self, cmd: List[str], input_source: Optional[str], output: Optional[str], extra_cmd: Optional[str] = None, ) -> None: """Generate ffmpeg command line.""" self._argv = [self._ffmpeg] # start command init if input_source is not None: self._put_input(input_source) self._argv.extend(cmd) # exists a extra cmd from customer if extra_cmd is not None: self._argv.extend(shlex.split(extra_cmd)) self._merge_filters() self._put_output(output)
[ "def", "_generate_ffmpeg_cmd", "(", "self", ",", "cmd", ":", "List", "[", "str", "]", ",", "input_source", ":", "Optional", "[", "str", "]", ",", "output", ":", "Optional", "[", "str", "]", ",", "extra_cmd", ":", "Optional", "[", "str", "]", "=", "None", ",", ")", "->", "None", ":", "self", ".", "_argv", "=", "[", "self", ".", "_ffmpeg", "]", "# start command init", "if", "input_source", "is", "not", "None", ":", "self", ".", "_put_input", "(", "input_source", ")", "self", ".", "_argv", ".", "extend", "(", "cmd", ")", "# exists a extra cmd from customer", "if", "extra_cmd", "is", "not", "None", ":", "self", ".", "_argv", ".", "extend", "(", "shlex", ".", "split", "(", "extra_cmd", ")", ")", "self", ".", "_merge_filters", "(", ")", "self", ".", "_put_output", "(", "output", ")" ]
27.761905
13.904762
def _ensure_sparse_format(spmatrix, accept_sparse, dtype, order, copy, force_all_finite): """Convert a sparse matrix to a given format. Checks the sparse format of spmatrix and converts if necessary. Parameters ---------- spmatrix : scipy sparse matrix Input to validate and convert. accept_sparse : string, list of string or None (default=None) String[s] representing allowed sparse matrix formats ('csc', 'csr', 'coo', 'dok', 'bsr', 'lil', 'dia'). None means that sparse matrix input will raise an error. If the input is sparse but not in the allowed format, it will be converted to the first listed format. dtype : string, type or None (default=none) Data type of result. If None, the dtype of the input is preserved. order : 'F', 'C' or None (default=None) Whether an array will be forced to be fortran or c-style. copy : boolean (default=False) Whether a forced copy will be triggered. If copy=False, a copy might be triggered by a conversion. force_all_finite : boolean (default=True) Whether to raise an error on np.inf and np.nan in X. Returns ------- spmatrix_converted : scipy sparse matrix. Matrix that is ensured to have an allowed type. """ if accept_sparse is None: raise TypeError('A sparse matrix was passed, but dense ' 'data is required. Use X.toarray() to ' 'convert to a dense numpy array.') sparse_type = spmatrix.format if dtype is None: dtype = spmatrix.dtype if sparse_type in accept_sparse: # correct type if dtype == spmatrix.dtype: # correct dtype if copy: spmatrix = spmatrix.copy() else: # convert dtype spmatrix = spmatrix.astype(dtype) else: # create new spmatrix = spmatrix.asformat(accept_sparse[0]).astype(dtype) if force_all_finite: if not hasattr(spmatrix, "data"): warnings.warn("Can't check %s sparse matrix for nan or inf." % spmatrix.format) else: _assert_all_finite(spmatrix.data) if hasattr(spmatrix, "data"): spmatrix.data = np.array(spmatrix.data, copy=False, order=order) return spmatrix
[ "def", "_ensure_sparse_format", "(", "spmatrix", ",", "accept_sparse", ",", "dtype", ",", "order", ",", "copy", ",", "force_all_finite", ")", ":", "if", "accept_sparse", "is", "None", ":", "raise", "TypeError", "(", "'A sparse matrix was passed, but dense '", "'data is required. Use X.toarray() to '", "'convert to a dense numpy array.'", ")", "sparse_type", "=", "spmatrix", ".", "format", "if", "dtype", "is", "None", ":", "dtype", "=", "spmatrix", ".", "dtype", "if", "sparse_type", "in", "accept_sparse", ":", "# correct type", "if", "dtype", "==", "spmatrix", ".", "dtype", ":", "# correct dtype", "if", "copy", ":", "spmatrix", "=", "spmatrix", ".", "copy", "(", ")", "else", ":", "# convert dtype", "spmatrix", "=", "spmatrix", ".", "astype", "(", "dtype", ")", "else", ":", "# create new", "spmatrix", "=", "spmatrix", ".", "asformat", "(", "accept_sparse", "[", "0", "]", ")", ".", "astype", "(", "dtype", ")", "if", "force_all_finite", ":", "if", "not", "hasattr", "(", "spmatrix", ",", "\"data\"", ")", ":", "warnings", ".", "warn", "(", "\"Can't check %s sparse matrix for nan or inf.\"", "%", "spmatrix", ".", "format", ")", "else", ":", "_assert_all_finite", "(", "spmatrix", ".", "data", ")", "if", "hasattr", "(", "spmatrix", ",", "\"data\"", ")", ":", "spmatrix", ".", "data", "=", "np", ".", "array", "(", "spmatrix", ".", "data", ",", "copy", "=", "False", ",", "order", "=", "order", ")", "return", "spmatrix" ]
36.873016
19.984127
def copy(self, name, exclude_from_current=False, deep_copy=False): """ Creates a copy of this cell. Parameters ---------- name : string The name of the cell. exclude_from_current : bool If ``True``, the cell will not be included in the global list of cells maintained by ``gdspy``. deep_copy : bool If ``False``, the new cell will contain only references to the existing elements. If ``True``, copies of all elements are also created. Returns ------- out : ``Cell`` The new copy of this cell. """ new_cell = Cell(name, exclude_from_current) if deep_copy: new_cell.elements = libCopy.deepcopy(self.elements) new_cell.labels = libCopy.deepcopy(self.labels) for ref in new_cell.get_dependencies(True): if ref._bb_valid: ref._bb_valid = False else: new_cell.elements = list(self.elements) new_cell.labels = list(self.labels) return new_cell
[ "def", "copy", "(", "self", ",", "name", ",", "exclude_from_current", "=", "False", ",", "deep_copy", "=", "False", ")", ":", "new_cell", "=", "Cell", "(", "name", ",", "exclude_from_current", ")", "if", "deep_copy", ":", "new_cell", ".", "elements", "=", "libCopy", ".", "deepcopy", "(", "self", ".", "elements", ")", "new_cell", ".", "labels", "=", "libCopy", ".", "deepcopy", "(", "self", ".", "labels", ")", "for", "ref", "in", "new_cell", ".", "get_dependencies", "(", "True", ")", ":", "if", "ref", ".", "_bb_valid", ":", "ref", ".", "_bb_valid", "=", "False", "else", ":", "new_cell", ".", "elements", "=", "list", "(", "self", ".", "elements", ")", "new_cell", ".", "labels", "=", "list", "(", "self", ".", "labels", ")", "return", "new_cell" ]
34.625
16.8125
def add_parser_arguments(parser, args, group=None, prefix=DATA_PREFIX): """ Helper method that populates parser arguments. The argument values can be later retrieved with `extract_arguments` method. The `args` argument to this method should be a dict with strings as keys and dicts as values. The keys will be used as keys in returned data. Their values will be passed as kwargs to `parser.add_argument`. There is special value `arg` that will be used as argument name if present, otherwise a name will be generated based on the key. If `group` is a string, it will be used as group header in help output. """ if group: parser = parser.add_argument_group(group) for arg, kwargs in iteritems(args): arg_name = kwargs.pop('arg', arg.replace('_', '-')) if 'metavar' not in kwargs: kwargs['metavar'] = arg.upper() if 'dest' in kwargs: kwargs['dest'] = prefix + kwargs['dest'] else: kwargs['dest'] = prefix + arg parser.add_argument('--' + arg_name, **kwargs)
[ "def", "add_parser_arguments", "(", "parser", ",", "args", ",", "group", "=", "None", ",", "prefix", "=", "DATA_PREFIX", ")", ":", "if", "group", ":", "parser", "=", "parser", ".", "add_argument_group", "(", "group", ")", "for", "arg", ",", "kwargs", "in", "iteritems", "(", "args", ")", ":", "arg_name", "=", "kwargs", ".", "pop", "(", "'arg'", ",", "arg", ".", "replace", "(", "'_'", ",", "'-'", ")", ")", "if", "'metavar'", "not", "in", "kwargs", ":", "kwargs", "[", "'metavar'", "]", "=", "arg", ".", "upper", "(", ")", "if", "'dest'", "in", "kwargs", ":", "kwargs", "[", "'dest'", "]", "=", "prefix", "+", "kwargs", "[", "'dest'", "]", "else", ":", "kwargs", "[", "'dest'", "]", "=", "prefix", "+", "arg", "parser", ".", "add_argument", "(", "'--'", "+", "arg_name", ",", "*", "*", "kwargs", ")" ]
44.375
19.791667
def closeEvent(self, event): """save the name of the last open dataset.""" max_dataset_history = self.value('max_dataset_history') keep_recent_datasets(max_dataset_history, self.info) settings.setValue('window/geometry', self.saveGeometry()) settings.setValue('window/state', self.saveState()) event.accept()
[ "def", "closeEvent", "(", "self", ",", "event", ")", ":", "max_dataset_history", "=", "self", ".", "value", "(", "'max_dataset_history'", ")", "keep_recent_datasets", "(", "max_dataset_history", ",", "self", ".", "info", ")", "settings", ".", "setValue", "(", "'window/geometry'", ",", "self", ".", "saveGeometry", "(", ")", ")", "settings", ".", "setValue", "(", "'window/state'", ",", "self", ".", "saveState", "(", ")", ")", "event", ".", "accept", "(", ")" ]
38.888889
21.888889
def api_server(connection, server_class): """ Establishes an API Server on the supplied connection Arguments: - connection (xbahn.connection.Connection) - server_class (xbahn.api.Server) Returns: - server_class: server instance """ # run api server on connection return server_class( link=xbahn.connection.link.Link( # use the connection to receive messages receive=connection, # use the connection to respond to received messages respond=connection ) )
[ "def", "api_server", "(", "connection", ",", "server_class", ")", ":", "# run api server on connection", "return", "server_class", "(", "link", "=", "xbahn", ".", "connection", ".", "link", ".", "Link", "(", "# use the connection to receive messages", "receive", "=", "connection", ",", "# use the connection to respond to received messages", "respond", "=", "connection", ")", ")" ]
26.47619
16.47619
def object_path(self, key): '''return the object path for `key`.''' return os.path.join(self.root_path, self.relative_object_path(key))
[ "def", "object_path", "(", "self", ",", "key", ")", ":", "return", "os", ".", "path", ".", "join", "(", "self", ".", "root_path", ",", "self", ".", "relative_object_path", "(", "key", ")", ")" ]
47
15.666667
def _make_inputnode(self, frequency): """ Generates an input node for the given frequency. It also adds implicit file format conversion nodes to the pipeline. Parameters ---------- frequency : str The frequency (i.e. 'per_session', 'per_visit', 'per_subject' or 'per_study') of the input node to retrieve """ # Check to see whether there are any outputs for the given frequency inputs = list(self.frequency_inputs(frequency)) # Get list of input names for the requested frequency, addding fields # to hold iterator IDs input_names = [i.name for i in inputs] input_names.extend(self.study.FREQUENCIES[frequency]) if not input_names: raise ArcanaError( "No inputs to '{}' pipeline for requested freqency '{}'" .format(self.name, frequency)) # Generate input node and connect it to appropriate nodes inputnode = self.add('{}_inputnode'.format(frequency), IdentityInterface(fields=input_names)) # Loop through list of nodes connected to study data specs and # connect them to the newly created input node for input in inputs: # @ReservedAssignment # Keep track of previous conversion nodes to avoid replicating the # conversion for inputs that are used in multiple places prev_conv_nodes = {} for (node, node_in, format, # @ReservedAssignment @IgnorePep8 conv_kwargs) in self._input_conns[input.name]: # If fileset formats differ between study and pipeline # inputs create converter node (if one hasn't been already) # and connect input to that before connecting to inputnode if self.requires_conversion(input, format): try: conv = format.converter_from(input.format, **conv_kwargs) except ArcanaNoConverterError as e: e.msg += ( "which is required to convert '{}' from {} to {} " "for '{}' input of '{}' node".format( input.name, input.format, format, node_in, node.name)) raise e try: in_node = prev_conv_nodes[format.name] except KeyError: in_node = prev_conv_nodes[format.name] = self.add( 'conv_{}_to_{}_format'.format(input.name, format.name), conv.interface, inputs={conv.input: (inputnode, input.name)}, requirements=conv.requirements, mem_gb=conv.mem_gb, wall_time=conv.wall_time) in_node_out = conv.output else: in_node = inputnode in_node_out = input.name self.connect(in_node, in_node_out, node, node_in) # Connect iterator inputs for iterator, conns in self._iterator_conns.items(): # Check to see if this is the right frequency for the iterator # input, i.e. if it is the only iterator for this frequency if self.study.FREQUENCIES[frequency] == (iterator,): for (node, node_in, format) in conns: # @ReservedAssignment self.connect(inputnode, iterator, node, node_in) return inputnode
[ "def", "_make_inputnode", "(", "self", ",", "frequency", ")", ":", "# Check to see whether there are any outputs for the given frequency", "inputs", "=", "list", "(", "self", ".", "frequency_inputs", "(", "frequency", ")", ")", "# Get list of input names for the requested frequency, addding fields", "# to hold iterator IDs", "input_names", "=", "[", "i", ".", "name", "for", "i", "in", "inputs", "]", "input_names", ".", "extend", "(", "self", ".", "study", ".", "FREQUENCIES", "[", "frequency", "]", ")", "if", "not", "input_names", ":", "raise", "ArcanaError", "(", "\"No inputs to '{}' pipeline for requested freqency '{}'\"", ".", "format", "(", "self", ".", "name", ",", "frequency", ")", ")", "# Generate input node and connect it to appropriate nodes", "inputnode", "=", "self", ".", "add", "(", "'{}_inputnode'", ".", "format", "(", "frequency", ")", ",", "IdentityInterface", "(", "fields", "=", "input_names", ")", ")", "# Loop through list of nodes connected to study data specs and", "# connect them to the newly created input node", "for", "input", "in", "inputs", ":", "# @ReservedAssignment", "# Keep track of previous conversion nodes to avoid replicating the", "# conversion for inputs that are used in multiple places", "prev_conv_nodes", "=", "{", "}", "for", "(", "node", ",", "node_in", ",", "format", ",", "# @ReservedAssignment @IgnorePep8", "conv_kwargs", ")", "in", "self", ".", "_input_conns", "[", "input", ".", "name", "]", ":", "# If fileset formats differ between study and pipeline", "# inputs create converter node (if one hasn't been already)", "# and connect input to that before connecting to inputnode", "if", "self", ".", "requires_conversion", "(", "input", ",", "format", ")", ":", "try", ":", "conv", "=", "format", ".", "converter_from", "(", "input", ".", "format", ",", "*", "*", "conv_kwargs", ")", "except", "ArcanaNoConverterError", "as", "e", ":", "e", ".", "msg", "+=", "(", "\"which is required to convert '{}' from {} to {} \"", "\"for '{}' input of '{}' node\"", ".", "format", "(", "input", ".", "name", ",", "input", ".", "format", ",", "format", ",", "node_in", ",", "node", ".", "name", ")", ")", "raise", "e", "try", ":", "in_node", "=", "prev_conv_nodes", "[", "format", ".", "name", "]", "except", "KeyError", ":", "in_node", "=", "prev_conv_nodes", "[", "format", ".", "name", "]", "=", "self", ".", "add", "(", "'conv_{}_to_{}_format'", ".", "format", "(", "input", ".", "name", ",", "format", ".", "name", ")", ",", "conv", ".", "interface", ",", "inputs", "=", "{", "conv", ".", "input", ":", "(", "inputnode", ",", "input", ".", "name", ")", "}", ",", "requirements", "=", "conv", ".", "requirements", ",", "mem_gb", "=", "conv", ".", "mem_gb", ",", "wall_time", "=", "conv", ".", "wall_time", ")", "in_node_out", "=", "conv", ".", "output", "else", ":", "in_node", "=", "inputnode", "in_node_out", "=", "input", ".", "name", "self", ".", "connect", "(", "in_node", ",", "in_node_out", ",", "node", ",", "node_in", ")", "# Connect iterator inputs", "for", "iterator", ",", "conns", "in", "self", ".", "_iterator_conns", ".", "items", "(", ")", ":", "# Check to see if this is the right frequency for the iterator", "# input, i.e. if it is the only iterator for this frequency", "if", "self", ".", "study", ".", "FREQUENCIES", "[", "frequency", "]", "==", "(", "iterator", ",", ")", ":", "for", "(", "node", ",", "node_in", ",", "format", ")", "in", "conns", ":", "# @ReservedAssignment", "self", ".", "connect", "(", "inputnode", ",", "iterator", ",", "node", ",", "node_in", ")", "return", "inputnode" ]
52.757143
20.414286
def smart_url(url, obj=None): """ URLs that start with @ are reversed, using the passed in arguments. Otherwise a straight % substitution is applied. """ if url.find("@") >= 0: (args, value) = url.split('@') if args: val = getattr(obj, args, None) return reverse(value, args=[val]) else: return reverse(value) else: if obj is None: return url else: return url % obj.id
[ "def", "smart_url", "(", "url", ",", "obj", "=", "None", ")", ":", "if", "url", ".", "find", "(", "\"@\"", ")", ">=", "0", ":", "(", "args", ",", "value", ")", "=", "url", ".", "split", "(", "'@'", ")", "if", "args", ":", "val", "=", "getattr", "(", "obj", ",", "args", ",", "None", ")", "return", "reverse", "(", "value", ",", "args", "=", "[", "val", "]", ")", "else", ":", "return", "reverse", "(", "value", ")", "else", ":", "if", "obj", "is", "None", ":", "return", "url", "else", ":", "return", "url", "%", "obj", ".", "id" ]
25.052632
16.631579
async def response_writer(self, stream, response_queue): """ :py:func:`asyncio.coroutine` Worker for write_response with current connection. Get data to response from queue, this is for right order of responses. Exits if received :py:class:`None`. :param stream: command connection stream :type connection: :py:class:`aioftp.StreamIO` :param response_queue: :type response_queue: :py:class:`asyncio.Queue` """ while True: args = await response_queue.get() try: await self.write_response(stream, *args) finally: response_queue.task_done()
[ "async", "def", "response_writer", "(", "self", ",", "stream", ",", "response_queue", ")", ":", "while", "True", ":", "args", "=", "await", "response_queue", ".", "get", "(", ")", "try", ":", "await", "self", ".", "write_response", "(", "stream", ",", "*", "args", ")", "finally", ":", "response_queue", ".", "task_done", "(", ")" ]
33.85
18.15
def __process_by_python(self): """! @brief Performs processing using python implementation. """ for amount in range(self.__kmin, self.__kmax): centers = self.__initializer(self.__data, amount).initialize() instance = kmeans(self.__data, centers, ccore=True) instance.process() self.__wce.append(instance.get_total_wce()) self.__calculate_elbows() self.__find_optimal_kvalue()
[ "def", "__process_by_python", "(", "self", ")", ":", "for", "amount", "in", "range", "(", "self", ".", "__kmin", ",", "self", ".", "__kmax", ")", ":", "centers", "=", "self", ".", "__initializer", "(", "self", ".", "__data", ",", "amount", ")", ".", "initialize", "(", ")", "instance", "=", "kmeans", "(", "self", ".", "__data", ",", "centers", ",", "ccore", "=", "True", ")", "instance", ".", "process", "(", ")", "self", ".", "__wce", ".", "append", "(", "instance", ".", "get_total_wce", "(", ")", ")", "self", ".", "__calculate_elbows", "(", ")", "self", ".", "__find_optimal_kvalue", "(", ")" ]
33.857143
18.5
def post_create_app(cls, app, **settings): """Init the extension for our chosen ORM Backend, if possible. This method will ensure that the ``db`` proxy is set to the right extension and that that extension is properly created and configured. Since it needs to call ``init_app`` it MUST be a Post Create Hook. If the chosen backend is PeeWee and no ``DATABASE`` config value is provided, we will delay initializing the extension until one is. Args: app (flask.Flask): The Flask application that was just made through the :meth:`create_app` factory that we should bind extensions to. Kwargs: orm_backend (str): If you want to explicitly specify an ORM Backend to use, you should send it in this kwarg. Valid values are either: ``'peewee'`` or ``'sqlalchemy'``. peewee_database (str): An explicit database connection URI we should immeditately add to the configuration that should be used to configure the PeeWee ORM Backend. This will result in the ``DATABASE`` key being set to this value in the config and will result in the PeeWee Flask extension being initialized IMMEDIATELY and not delayed until the next call to :meth:`configure`. Returns: flask.Flask: Returns the app it was given once this is done. Raises: RuntimeError: This is raised if we are asked to create the PeeWee ORM, but are not given a database URI in either the ``DATABASE`` config value, or the explicit ``peewee_database`` setting. """ global _SELECTED_BACKEND backend = settings.pop('orm_backend', None) backend = _discover_ideal_backend(backend) # did not specify a backend, bail early if backend is MISSING: return app _swap_backends_error = ('Cannot swap ORM backends after one is ' 'declared!') if backend == _PEEWEE_BACKEND: if (_SELECTED_BACKEND is not MISSING and _SELECTED_BACKEND != _PEEWEE_EXT): raise RuntimeError(_swap_backends_error) # @TODO (orm): Does this really need to be ``peewee_database``? can # it be ``orm_database``? database_uri = settings.pop('peewee_database', None) if database_uri: app.config['DATABASE'] = database_uri if 'DATABASE' not in app.config: # since there is no DATABASE in the config, we need to wait # until we init this; so we'll just do it after configure is # called. try: app.add_post_configure_callback( partial(cls._init_peewee_ext, app), run_once=True ) except NotImplementedError: # this composed app doesn't implement multi-stage # configuration, so there's no way we can proceed without # an explicit DB =/; yes it's possible this could swallow # another error, but if it does... the easiest fix is to do # the same # @TODO (docs): Multi Stage Configuration should be in # the docs err_msg = """\ The app you are trying to construct does not support Multi Stage Configuration and no connection info for the database was given at creation! Please call `create_app` again and provide your database connection string as the `peewee_database` kwarg!\ """ raise RuntimeError(err_msg) else: # the DATABASE is already present, go ahead and just init now cls._init_peewee_ext(app) _SELECTED_BACKEND = _PEEWEE_EXT elif backend == _SQLALCHEMY_BACKEND: # @TODO (orm): Finish SQLA implementation # do sqla bootstrap code if (_SELECTED_BACKEND is not MISSING and _SELECTED_BACKEND != _SQLA_EXT): raise RuntimeError(_swap_backends_error) _SELECTED_BACKEND = _SQLA_EXT _SQLA_EXT.init_app(app) else: err_msg = ("Explicit ORM backend provided, but could not recognize" " the value! Valid values are: '{}' and '{}';" " received: '{}' instead!") err_msg = err_msg.format(_PEEWEE_BACKEND, _SQLALCHEMY_BACKEND, backend) raise RuntimeError(err_msg) return app
[ "def", "post_create_app", "(", "cls", ",", "app", ",", "*", "*", "settings", ")", ":", "global", "_SELECTED_BACKEND", "backend", "=", "settings", ".", "pop", "(", "'orm_backend'", ",", "None", ")", "backend", "=", "_discover_ideal_backend", "(", "backend", ")", "# did not specify a backend, bail early", "if", "backend", "is", "MISSING", ":", "return", "app", "_swap_backends_error", "=", "(", "'Cannot swap ORM backends after one is '", "'declared!'", ")", "if", "backend", "==", "_PEEWEE_BACKEND", ":", "if", "(", "_SELECTED_BACKEND", "is", "not", "MISSING", "and", "_SELECTED_BACKEND", "!=", "_PEEWEE_EXT", ")", ":", "raise", "RuntimeError", "(", "_swap_backends_error", ")", "# @TODO (orm): Does this really need to be ``peewee_database``? can", "# it be ``orm_database``?", "database_uri", "=", "settings", ".", "pop", "(", "'peewee_database'", ",", "None", ")", "if", "database_uri", ":", "app", ".", "config", "[", "'DATABASE'", "]", "=", "database_uri", "if", "'DATABASE'", "not", "in", "app", ".", "config", ":", "# since there is no DATABASE in the config, we need to wait", "# until we init this; so we'll just do it after configure is", "# called.", "try", ":", "app", ".", "add_post_configure_callback", "(", "partial", "(", "cls", ".", "_init_peewee_ext", ",", "app", ")", ",", "run_once", "=", "True", ")", "except", "NotImplementedError", ":", "# this composed app doesn't implement multi-stage", "# configuration, so there's no way we can proceed without", "# an explicit DB =/; yes it's possible this could swallow", "# another error, but if it does... the easiest fix is to do", "# the same", "# @TODO (docs): Multi Stage Configuration should be in", "# the docs", "err_msg", "=", "\"\"\"\\\n The app you are trying to construct does not support\n Multi Stage Configuration and no connection info for the\n database was given at creation! Please call `create_app`\n again and provide your database connection string as the\n `peewee_database` kwarg!\\\n \"\"\"", "raise", "RuntimeError", "(", "err_msg", ")", "else", ":", "# the DATABASE is already present, go ahead and just init now", "cls", ".", "_init_peewee_ext", "(", "app", ")", "_SELECTED_BACKEND", "=", "_PEEWEE_EXT", "elif", "backend", "==", "_SQLALCHEMY_BACKEND", ":", "# @TODO (orm): Finish SQLA implementation", "# do sqla bootstrap code", "if", "(", "_SELECTED_BACKEND", "is", "not", "MISSING", "and", "_SELECTED_BACKEND", "!=", "_SQLA_EXT", ")", ":", "raise", "RuntimeError", "(", "_swap_backends_error", ")", "_SELECTED_BACKEND", "=", "_SQLA_EXT", "_SQLA_EXT", ".", "init_app", "(", "app", ")", "else", ":", "err_msg", "=", "(", "\"Explicit ORM backend provided, but could not recognize\"", "\" the value! Valid values are: '{}' and '{}';\"", "\" received: '{}' instead!\"", ")", "err_msg", "=", "err_msg", ".", "format", "(", "_PEEWEE_BACKEND", ",", "_SQLALCHEMY_BACKEND", ",", "backend", ")", "raise", "RuntimeError", "(", "err_msg", ")", "return", "app" ]
44.583333
23.231481
def merge_cookies(cookiejar, cookies): """Add cookies to cookiejar and returns a merged CookieJar. :param cookiejar: CookieJar object to add the cookies to. :param cookies: Dictionary or CookieJar object to be added. :rtype: CookieJar """ if not isinstance(cookiejar, cookielib.CookieJar): raise ValueError('You can only merge into CookieJar') if isinstance(cookies, dict): cookiejar = cookiejar_from_dict( cookies, cookiejar=cookiejar, overwrite=False) elif isinstance(cookies, cookielib.CookieJar): try: cookiejar.update(cookies) except AttributeError: for cookie_in_jar in cookies: cookiejar.set_cookie(cookie_in_jar) return cookiejar
[ "def", "merge_cookies", "(", "cookiejar", ",", "cookies", ")", ":", "if", "not", "isinstance", "(", "cookiejar", ",", "cookielib", ".", "CookieJar", ")", ":", "raise", "ValueError", "(", "'You can only merge into CookieJar'", ")", "if", "isinstance", "(", "cookies", ",", "dict", ")", ":", "cookiejar", "=", "cookiejar_from_dict", "(", "cookies", ",", "cookiejar", "=", "cookiejar", ",", "overwrite", "=", "False", ")", "elif", "isinstance", "(", "cookies", ",", "cookielib", ".", "CookieJar", ")", ":", "try", ":", "cookiejar", ".", "update", "(", "cookies", ")", "except", "AttributeError", ":", "for", "cookie_in_jar", "in", "cookies", ":", "cookiejar", ".", "set_cookie", "(", "cookie_in_jar", ")", "return", "cookiejar" ]
35.238095
15.619048
def check_restructuredtext(self): """ Checks if the long string fields are reST-compliant. """ # Warn that this command is deprecated # Don't use self.warn() because it will cause the check to fail. Command.warn( self, "This command has been deprecated. Use `twine check` instead: " "https://packaging.python.org/guides/making-a-pypi-friendly-readme" "#validating-restructuredtext-markup" ) data = self.distribution.get_long_description() content_type = getattr( self.distribution.metadata, 'long_description_content_type', None) if content_type: content_type, _ = cgi.parse_header(content_type) if content_type != 'text/x-rst': self.warn( "Not checking long description content type '%s', this " "command only checks 'text/x-rst'." % content_type) return # None or empty string should both trigger this branch. if not data or data == 'UNKNOWN': self.warn( "The project's long_description is either missing or empty.") return stream = _WarningStream() markup = render(data, stream=stream) if markup is None: self.warn( "The project's long_description has invalid markup which will " "not be rendered on PyPI. The following syntax errors were " "detected:\n%s" % stream) return self.announce( "The project's long description is valid RST.", level=distutils.log.INFO)
[ "def", "check_restructuredtext", "(", "self", ")", ":", "# Warn that this command is deprecated", "# Don't use self.warn() because it will cause the check to fail.", "Command", ".", "warn", "(", "self", ",", "\"This command has been deprecated. Use `twine check` instead: \"", "\"https://packaging.python.org/guides/making-a-pypi-friendly-readme\"", "\"#validating-restructuredtext-markup\"", ")", "data", "=", "self", ".", "distribution", ".", "get_long_description", "(", ")", "content_type", "=", "getattr", "(", "self", ".", "distribution", ".", "metadata", ",", "'long_description_content_type'", ",", "None", ")", "if", "content_type", ":", "content_type", ",", "_", "=", "cgi", ".", "parse_header", "(", "content_type", ")", "if", "content_type", "!=", "'text/x-rst'", ":", "self", ".", "warn", "(", "\"Not checking long description content type '%s', this \"", "\"command only checks 'text/x-rst'.\"", "%", "content_type", ")", "return", "# None or empty string should both trigger this branch.", "if", "not", "data", "or", "data", "==", "'UNKNOWN'", ":", "self", ".", "warn", "(", "\"The project's long_description is either missing or empty.\"", ")", "return", "stream", "=", "_WarningStream", "(", ")", "markup", "=", "render", "(", "data", ",", "stream", "=", "stream", ")", "if", "markup", "is", "None", ":", "self", ".", "warn", "(", "\"The project's long_description has invalid markup which will \"", "\"not be rendered on PyPI. The following syntax errors were \"", "\"detected:\\n%s\"", "%", "stream", ")", "return", "self", ".", "announce", "(", "\"The project's long description is valid RST.\"", ",", "level", "=", "distutils", ".", "log", ".", "INFO", ")" ]
37.454545
21.454545
def set_pointer0d(subseqs): """Set_pointer function for 0-dimensional link sequences.""" print(' . set_pointer0d') lines = Lines() lines.add(1, 'cpdef inline set_pointer0d' '(self, str name, pointerutils.PDouble value):') for seq in subseqs: lines.add(2, 'if name == "%s":' % seq.name) lines.add(3, 'self.%s = value.p_value' % seq.name) return lines
[ "def", "set_pointer0d", "(", "subseqs", ")", ":", "print", "(", "' . set_pointer0d'", ")", "lines", "=", "Lines", "(", ")", "lines", ".", "add", "(", "1", ",", "'cpdef inline set_pointer0d'", "'(self, str name, pointerutils.PDouble value):'", ")", "for", "seq", "in", "subseqs", ":", "lines", ".", "add", "(", "2", ",", "'if name == \"%s\":'", "%", "seq", ".", "name", ")", "lines", ".", "add", "(", "3", ",", "'self.%s = value.p_value'", "%", "seq", ".", "name", ")", "return", "lines" ]
44.4
14.2
def store(self, value, l, dir_only): """Group patterns by literals and potential magic patterns.""" if l and value in (b'', ''): return globstar = value in (b'**', '**') and self.globstar magic = self.is_magic(value) if magic: value = compile(value, self.flags) l.append(WcGlob(value, magic, globstar, dir_only, False))
[ "def", "store", "(", "self", ",", "value", ",", "l", ",", "dir_only", ")", ":", "if", "l", "and", "value", "in", "(", "b''", ",", "''", ")", ":", "return", "globstar", "=", "value", "in", "(", "b'**'", ",", "'**'", ")", "and", "self", ".", "globstar", "magic", "=", "self", ".", "is_magic", "(", "value", ")", "if", "magic", ":", "value", "=", "compile", "(", "value", ",", "self", ".", "flags", ")", "l", ".", "append", "(", "WcGlob", "(", "value", ",", "magic", ",", "globstar", ",", "dir_only", ",", "False", ")", ")" ]
34.818182
17
def _parse_ip_stats_link_show(raw_result): """ Parse the 'ip -s link show dev <dev>' command raw output. :param str raw_result: vtysh raw result string. :rtype: dict :return: The parsed result of the show interface command in a \ dictionary of the form: :: { 'rx_bytes': 0, 'rx_packets': 0, 'rx_errors': 0, 'rx_dropped': 0, 'rx_overrun': 0, 'rx_mcast': 0, 'tx_bytes': 0, 'tx_packets': 0, 'tx_errors': 0, 'tx_dropped': 0, 'tx_carrier': 0, 'tx_collisions': 0, } """ show_re = ( r'.+?RX:.*?\n' r'\s*(?P<rx_bytes>\d+)\s+(?P<rx_packets>\d+)\s+(?P<rx_errors>\d+)\s+' r'(?P<rx_dropped>\d+)\s+(?P<rx_overrun>\d+)\s+(?P<rx_mcast>\d+)' r'.+?TX:.*?\n' r'\s*(?P<tx_bytes>\d+)\s+(?P<tx_packets>\d+)\s+(?P<tx_errors>\d+)\s+' r'(?P<tx_dropped>\d+)\s+(?P<tx_carrier>\d+)\s+(?P<tx_collisions>\d+)' ) re_result = match(show_re, raw_result, DOTALL) result = None if (re_result): result = re_result.groupdict() for key, value in result.items(): if value is not None: if value.isdigit(): result[key] = int(value) return result
[ "def", "_parse_ip_stats_link_show", "(", "raw_result", ")", ":", "show_re", "=", "(", "r'.+?RX:.*?\\n'", "r'\\s*(?P<rx_bytes>\\d+)\\s+(?P<rx_packets>\\d+)\\s+(?P<rx_errors>\\d+)\\s+'", "r'(?P<rx_dropped>\\d+)\\s+(?P<rx_overrun>\\d+)\\s+(?P<rx_mcast>\\d+)'", "r'.+?TX:.*?\\n'", "r'\\s*(?P<tx_bytes>\\d+)\\s+(?P<tx_packets>\\d+)\\s+(?P<tx_errors>\\d+)\\s+'", "r'(?P<tx_dropped>\\d+)\\s+(?P<tx_carrier>\\d+)\\s+(?P<tx_collisions>\\d+)'", ")", "re_result", "=", "match", "(", "show_re", ",", "raw_result", ",", "DOTALL", ")", "result", "=", "None", "if", "(", "re_result", ")", ":", "result", "=", "re_result", ".", "groupdict", "(", ")", "for", "key", ",", "value", "in", "result", ".", "items", "(", ")", ":", "if", "value", "is", "not", "None", ":", "if", "value", ".", "isdigit", "(", ")", ":", "result", "[", "key", "]", "=", "int", "(", "value", ")", "return", "result" ]
27.0625
20.6875
def inline_css(self, html): """Inlines CSS defined in external style sheets. """ premailer = Premailer(html) inlined_html = premailer.transform(pretty_print=True) return inlined_html
[ "def", "inline_css", "(", "self", ",", "html", ")", ":", "premailer", "=", "Premailer", "(", "html", ")", "inlined_html", "=", "premailer", ".", "transform", "(", "pretty_print", "=", "True", ")", "return", "inlined_html" ]
36.166667
8.666667
def create_toggle_view_action(self): """Associate a toggle view action with each plugin""" title = self.get_plugin_title() if self.CONF_SECTION == 'editor': title = _('Editor') if self.shortcut is not None: action = create_action(self, title, toggled=lambda checked: self.toggle_view(checked), shortcut=QKeySequence(self.shortcut), context=Qt.WidgetShortcut) else: action = create_action(self, title, toggled=lambda checked: self.toggle_view(checked)) self.toggle_view_action = action
[ "def", "create_toggle_view_action", "(", "self", ")", ":", "title", "=", "self", ".", "get_plugin_title", "(", ")", "if", "self", ".", "CONF_SECTION", "==", "'editor'", ":", "title", "=", "_", "(", "'Editor'", ")", "if", "self", ".", "shortcut", "is", "not", "None", ":", "action", "=", "create_action", "(", "self", ",", "title", ",", "toggled", "=", "lambda", "checked", ":", "self", ".", "toggle_view", "(", "checked", ")", ",", "shortcut", "=", "QKeySequence", "(", "self", ".", "shortcut", ")", ",", "context", "=", "Qt", ".", "WidgetShortcut", ")", "else", ":", "action", "=", "create_action", "(", "self", ",", "title", ",", "toggled", "=", "lambda", "checked", ":", "self", ".", "toggle_view", "(", "checked", ")", ")", "self", ".", "toggle_view_action", "=", "action" ]
49.285714
14.071429
def run(namespace=None, action_prefix='action_', args=None): """Run the script. Participating actions are looked up in the caller's namespace if no namespace is given, otherwise in the dict provided. Only items that start with action_prefix are processed as actions. If you want to use all items in the namespace provided as actions set action_prefix to an empty string. :param namespace: An optional dict where the functions are looked up in. By default the local namespace of the caller is used. :param action_prefix: The prefix for the functions. Everything else is ignored. :param args: the arguments for the function. If not specified :data:`sys.argv` without the first argument is used. """ if namespace is None: namespace = sys._getframe(1).f_locals actions = find_actions(namespace, action_prefix) if args is None: args = sys.argv[1:] if not args or args[0] in ('-h', '--help'): return print_usage(actions) elif args[0] not in actions: fail('Unknown action \'%s\'' % args[0]) arguments = {} types = {} key_to_arg = {} long_options = [] formatstring = '' func, doc, arg_def = actions[args.pop(0)] for idx, (arg, shortcut, default, option_type) in enumerate(arg_def): real_arg = arg.replace('-', '_') if shortcut: formatstring += shortcut if not isinstance(default, bool): formatstring += ':' key_to_arg['-' + shortcut] = real_arg long_options.append(isinstance(default, bool) and arg or arg + '=') key_to_arg['--' + arg] = real_arg key_to_arg[idx] = real_arg types[real_arg] = option_type arguments[real_arg] = default try: optlist, posargs = getopt.gnu_getopt(args, formatstring, long_options) except getopt.GetoptError as e: fail(str(e)) specified_arguments = set() for key, value in enumerate(posargs): try: arg = key_to_arg[key] except IndexError: fail('Too many parameters') specified_arguments.add(arg) try: arguments[arg] = converters[types[arg]](value) except ValueError: fail('Invalid value for argument %s (%s): %s' % (key, arg, value)) for key, value in optlist: arg = key_to_arg[key] if arg in specified_arguments: fail('Argument \'%s\' is specified twice' % arg) if types[arg] == 'boolean': if arg.startswith('no_'): value = 'no' else: value = 'yes' try: arguments[arg] = converters[types[arg]](value) except ValueError: fail('Invalid value for \'%s\': %s' % (key, value)) newargs = {} for k, v in iteritems(arguments): newargs[k.startswith('no_') and k[3:] or k] = v arguments = newargs return func(**arguments)
[ "def", "run", "(", "namespace", "=", "None", ",", "action_prefix", "=", "'action_'", ",", "args", "=", "None", ")", ":", "if", "namespace", "is", "None", ":", "namespace", "=", "sys", ".", "_getframe", "(", "1", ")", ".", "f_locals", "actions", "=", "find_actions", "(", "namespace", ",", "action_prefix", ")", "if", "args", "is", "None", ":", "args", "=", "sys", ".", "argv", "[", "1", ":", "]", "if", "not", "args", "or", "args", "[", "0", "]", "in", "(", "'-h'", ",", "'--help'", ")", ":", "return", "print_usage", "(", "actions", ")", "elif", "args", "[", "0", "]", "not", "in", "actions", ":", "fail", "(", "'Unknown action \\'%s\\''", "%", "args", "[", "0", "]", ")", "arguments", "=", "{", "}", "types", "=", "{", "}", "key_to_arg", "=", "{", "}", "long_options", "=", "[", "]", "formatstring", "=", "''", "func", ",", "doc", ",", "arg_def", "=", "actions", "[", "args", ".", "pop", "(", "0", ")", "]", "for", "idx", ",", "(", "arg", ",", "shortcut", ",", "default", ",", "option_type", ")", "in", "enumerate", "(", "arg_def", ")", ":", "real_arg", "=", "arg", ".", "replace", "(", "'-'", ",", "'_'", ")", "if", "shortcut", ":", "formatstring", "+=", "shortcut", "if", "not", "isinstance", "(", "default", ",", "bool", ")", ":", "formatstring", "+=", "':'", "key_to_arg", "[", "'-'", "+", "shortcut", "]", "=", "real_arg", "long_options", ".", "append", "(", "isinstance", "(", "default", ",", "bool", ")", "and", "arg", "or", "arg", "+", "'='", ")", "key_to_arg", "[", "'--'", "+", "arg", "]", "=", "real_arg", "key_to_arg", "[", "idx", "]", "=", "real_arg", "types", "[", "real_arg", "]", "=", "option_type", "arguments", "[", "real_arg", "]", "=", "default", "try", ":", "optlist", ",", "posargs", "=", "getopt", ".", "gnu_getopt", "(", "args", ",", "formatstring", ",", "long_options", ")", "except", "getopt", ".", "GetoptError", "as", "e", ":", "fail", "(", "str", "(", "e", ")", ")", "specified_arguments", "=", "set", "(", ")", "for", "key", ",", "value", "in", "enumerate", "(", "posargs", ")", ":", "try", ":", "arg", "=", "key_to_arg", "[", "key", "]", "except", "IndexError", ":", "fail", "(", "'Too many parameters'", ")", "specified_arguments", ".", "add", "(", "arg", ")", "try", ":", "arguments", "[", "arg", "]", "=", "converters", "[", "types", "[", "arg", "]", "]", "(", "value", ")", "except", "ValueError", ":", "fail", "(", "'Invalid value for argument %s (%s): %s'", "%", "(", "key", ",", "arg", ",", "value", ")", ")", "for", "key", ",", "value", "in", "optlist", ":", "arg", "=", "key_to_arg", "[", "key", "]", "if", "arg", "in", "specified_arguments", ":", "fail", "(", "'Argument \\'%s\\' is specified twice'", "%", "arg", ")", "if", "types", "[", "arg", "]", "==", "'boolean'", ":", "if", "arg", ".", "startswith", "(", "'no_'", ")", ":", "value", "=", "'no'", "else", ":", "value", "=", "'yes'", "try", ":", "arguments", "[", "arg", "]", "=", "converters", "[", "types", "[", "arg", "]", "]", "(", "value", ")", "except", "ValueError", ":", "fail", "(", "'Invalid value for \\'%s\\': %s'", "%", "(", "key", ",", "value", ")", ")", "newargs", "=", "{", "}", "for", "k", ",", "v", "in", "iteritems", "(", "arguments", ")", ":", "newargs", "[", "k", ".", "startswith", "(", "'no_'", ")", "and", "k", "[", "3", ":", "]", "or", "k", "]", "=", "v", "arguments", "=", "newargs", "return", "func", "(", "*", "*", "arguments", ")" ]
36.6625
17.4375
def delete(self): """Extend to the delete the session from storage """ self.clear() if os.path.isfile(self._filename): os.unlink(self._filename) else: LOGGER.debug('Session file did not exist: %s', self._filename)
[ "def", "delete", "(", "self", ")", ":", "self", ".", "clear", "(", ")", "if", "os", ".", "path", ".", "isfile", "(", "self", ".", "_filename", ")", ":", "os", ".", "unlink", "(", "self", ".", "_filename", ")", "else", ":", "LOGGER", ".", "debug", "(", "'Session file did not exist: %s'", ",", "self", ".", "_filename", ")" ]
30
16.555556
def __conn_listener(self, state): """ Connection event listener :param state: The new connection state """ if state == KazooState.CONNECTED: self.__online = True if not self.__connected: self.__connected = True self._logger.info("Connected to ZooKeeper") self._queue.enqueue(self.on_first_connection) else: self._logger.warning("Re-connected to ZooKeeper") self._queue.enqueue(self.on_client_reconnection) elif state == KazooState.SUSPENDED: self._logger.warning("Connection suspended") self.__online = False elif state == KazooState.LOST: self.__online = False self.__connected = False if self.__stop: self._logger.info("Disconnected from ZooKeeper (requested)") else: self._logger.warning("Connection lost")
[ "def", "__conn_listener", "(", "self", ",", "state", ")", ":", "if", "state", "==", "KazooState", ".", "CONNECTED", ":", "self", ".", "__online", "=", "True", "if", "not", "self", ".", "__connected", ":", "self", ".", "__connected", "=", "True", "self", ".", "_logger", ".", "info", "(", "\"Connected to ZooKeeper\"", ")", "self", ".", "_queue", ".", "enqueue", "(", "self", ".", "on_first_connection", ")", "else", ":", "self", ".", "_logger", ".", "warning", "(", "\"Re-connected to ZooKeeper\"", ")", "self", ".", "_queue", ".", "enqueue", "(", "self", ".", "on_client_reconnection", ")", "elif", "state", "==", "KazooState", ".", "SUSPENDED", ":", "self", ".", "_logger", ".", "warning", "(", "\"Connection suspended\"", ")", "self", ".", "__online", "=", "False", "elif", "state", "==", "KazooState", ".", "LOST", ":", "self", ".", "__online", "=", "False", "self", ".", "__connected", "=", "False", "if", "self", ".", "__stop", ":", "self", ".", "_logger", ".", "info", "(", "\"Disconnected from ZooKeeper (requested)\"", ")", "else", ":", "self", ".", "_logger", ".", "warning", "(", "\"Connection lost\"", ")" ]
37
13.538462
def convertTime(self, time): """Convert a datetime object representing a time into a human-ready string that can be read, spoken aloud, etc. Args: time (datetime.date): A datetime object to be converted into text. Returns: A string representation of the input time, ignoring any day-related information. """ # if ':00', ignore reporting minutes m_format = "" if time.minute: m_format = ":%M" timeString = time.strftime("%I" + m_format + " %p") # if '07:30', cast to '7:30' if not int(timeString[0]): timeString = timeString[1:] return timeString
[ "def", "convertTime", "(", "self", ",", "time", ")", ":", "# if ':00', ignore reporting minutes", "m_format", "=", "\"\"", "if", "time", ".", "minute", ":", "m_format", "=", "\":%M\"", "timeString", "=", "time", ".", "strftime", "(", "\"%I\"", "+", "m_format", "+", "\" %p\"", ")", "# if '07:30', cast to '7:30'", "if", "not", "int", "(", "timeString", "[", "0", "]", ")", ":", "timeString", "=", "timeString", "[", "1", ":", "]", "return", "timeString" ]
29.73913
20.173913
def _on_watermark_notification(self, notif): """Handle a watermark notification.""" # Update the conversation: if self.get_user(notif.user_id).is_self: logger.info('latest_read_timestamp for {} updated to {}' .format(self.id_, notif.read_timestamp)) self_conversation_state = ( self._conversation.self_conversation_state ) self_conversation_state.self_read_state.latest_read_timestamp = ( parsers.to_timestamp(notif.read_timestamp) ) # Update the participants' watermarks: previous_timestamp = self._watermarks.get( notif.user_id, datetime.datetime.min.replace(tzinfo=datetime.timezone.utc) ) if notif.read_timestamp > previous_timestamp: logger.info(('latest_read_timestamp for conv {} participant {}' + ' updated to {}').format(self.id_, notif.user_id.chat_id, notif.read_timestamp)) self._watermarks[notif.user_id] = notif.read_timestamp
[ "def", "_on_watermark_notification", "(", "self", ",", "notif", ")", ":", "# Update the conversation:", "if", "self", ".", "get_user", "(", "notif", ".", "user_id", ")", ".", "is_self", ":", "logger", ".", "info", "(", "'latest_read_timestamp for {} updated to {}'", ".", "format", "(", "self", ".", "id_", ",", "notif", ".", "read_timestamp", ")", ")", "self_conversation_state", "=", "(", "self", ".", "_conversation", ".", "self_conversation_state", ")", "self_conversation_state", ".", "self_read_state", ".", "latest_read_timestamp", "=", "(", "parsers", ".", "to_timestamp", "(", "notif", ".", "read_timestamp", ")", ")", "# Update the participants' watermarks:", "previous_timestamp", "=", "self", ".", "_watermarks", ".", "get", "(", "notif", ".", "user_id", ",", "datetime", ".", "datetime", ".", "min", ".", "replace", "(", "tzinfo", "=", "datetime", ".", "timezone", ".", "utc", ")", ")", "if", "notif", ".", "read_timestamp", ">", "previous_timestamp", ":", "logger", ".", "info", "(", "(", "'latest_read_timestamp for conv {} participant {}'", "+", "' updated to {}'", ")", ".", "format", "(", "self", ".", "id_", ",", "notif", ".", "user_id", ".", "chat_id", ",", "notif", ".", "read_timestamp", ")", ")", "self", ".", "_watermarks", "[", "notif", ".", "user_id", "]", "=", "notif", ".", "read_timestamp" ]
50.565217
19.521739
def gerrymanderNodeFilenames(self): ''' When creating nodes, the filename needs to be relative to ``conf.py``, so it will include ``self.root_directory``. However, when generating the API, the file we are writing to is in the same directory as the generated node files so we need to remove the directory path from a given ExhaleNode's ``file_name`` before we can ``include`` it or use it in a ``toctree``. ''' for node in self.all_nodes: node.file_name = os.path.basename(node.file_name) if node.kind == "file": node.program_file = os.path.basename(node.program_file)
[ "def", "gerrymanderNodeFilenames", "(", "self", ")", ":", "for", "node", "in", "self", ".", "all_nodes", ":", "node", ".", "file_name", "=", "os", ".", "path", ".", "basename", "(", "node", ".", "file_name", ")", "if", "node", ".", "kind", "==", "\"file\"", ":", "node", ".", "program_file", "=", "os", ".", "path", ".", "basename", "(", "node", ".", "program_file", ")" ]
55.083333
27.25
def register_parser_callback(self, func): """ Register a callback function that is called after self.iocs and self.ioc_name is populated. This is intended for use by subclasses that may have additional parsing requirements. :param func: A callable function. This should accept a single input, which will be an IOC class. :return: """ if hasattr(func, '__call__'): self.parser_callback = func log.debug('Set callback to {}'.format(func)) else: raise TypeError('Provided function is not callable: {}'.format(func))
[ "def", "register_parser_callback", "(", "self", ",", "func", ")", ":", "if", "hasattr", "(", "func", ",", "'__call__'", ")", ":", "self", ".", "parser_callback", "=", "func", "log", ".", "debug", "(", "'Set callback to {}'", ".", "format", "(", "func", ")", ")", "else", ":", "raise", "TypeError", "(", "'Provided function is not callable: {}'", ".", "format", "(", "func", ")", ")" ]
43.071429
26.5
def requirements(self): """Process requirements.""" class dotdict(dict): # pylint: disable=invalid-name """Dot notation access to dictionary attributes.""" def __getattr__(self, attr): value = self.get(attr) return dotdict(value) if isinstance(value, dict) else value return dotdict(self._meta.metadata.requirements)
[ "def", "requirements", "(", "self", ")", ":", "class", "dotdict", "(", "dict", ")", ":", "# pylint: disable=invalid-name", "\"\"\"Dot notation access to dictionary attributes.\"\"\"", "def", "__getattr__", "(", "self", ",", "attr", ")", ":", "value", "=", "self", ".", "get", "(", "attr", ")", "return", "dotdict", "(", "value", ")", "if", "isinstance", "(", "value", ",", "dict", ")", "else", "value", "return", "dotdict", "(", "self", ".", "_meta", ".", "metadata", ".", "requirements", ")" ]
39
17
def cookbook_statement(cookbook_name, metadata=None): """Return a valid Ruby 'cookbook' statement for the Berksfile.""" line = "cookbook '%s'" % cookbook_name if metadata: if not isinstance(metadata, dict): raise TypeError("Berksfile dependency hash for %s " "should be a dict of options, not %s." % (cookbook_name, metadata)) # not like the others... if 'constraint' in metadata: line += ", '%s'" % metadata.pop('constraint') for opt, spec in metadata.items(): line += ", %s: '%s'" % (opt, spec) return line
[ "def", "cookbook_statement", "(", "cookbook_name", ",", "metadata", "=", "None", ")", ":", "line", "=", "\"cookbook '%s'\"", "%", "cookbook_name", "if", "metadata", ":", "if", "not", "isinstance", "(", "metadata", ",", "dict", ")", ":", "raise", "TypeError", "(", "\"Berksfile dependency hash for %s \"", "\"should be a dict of options, not %s.\"", "%", "(", "cookbook_name", ",", "metadata", ")", ")", "# not like the others...", "if", "'constraint'", "in", "metadata", ":", "line", "+=", "\", '%s'\"", "%", "metadata", ".", "pop", "(", "'constraint'", ")", "for", "opt", ",", "spec", "in", "metadata", ".", "items", "(", ")", ":", "line", "+=", "\", %s: '%s'\"", "%", "(", "opt", ",", "spec", ")", "return", "line" ]
49.071429
13.142857
def get_all_chats(self): """ Fetches all chats :return: List of chats :rtype: list[Chat] """ chats = self.wapi_functions.getAllChats() if chats: return [factory_chat(chat, self) for chat in chats] else: return []
[ "def", "get_all_chats", "(", "self", ")", ":", "chats", "=", "self", ".", "wapi_functions", ".", "getAllChats", "(", ")", "if", "chats", ":", "return", "[", "factory_chat", "(", "chat", ",", "self", ")", "for", "chat", "in", "chats", "]", "else", ":", "return", "[", "]" ]
24.166667
16.333333
def _count_spaces_startswith(line): ''' Count the number of spaces before the first character ''' if line.split('#')[0].strip() == "": return None spaces = 0 for i in line: if i.isspace(): spaces += 1 else: return spaces
[ "def", "_count_spaces_startswith", "(", "line", ")", ":", "if", "line", ".", "split", "(", "'#'", ")", "[", "0", "]", ".", "strip", "(", ")", "==", "\"\"", ":", "return", "None", "spaces", "=", "0", "for", "i", "in", "line", ":", "if", "i", ".", "isspace", "(", ")", ":", "spaces", "+=", "1", "else", ":", "return", "spaces" ]
21.615385
21
def getExceptionMessage(exception, frame=-1, filename=None): """ Return a short message based on an exception, useful for debugging. Tries to find where the exception was triggered. """ stack = traceback.extract_tb(sys.exc_info()[2]) if filename: stack = [f for f in stack if f[0].find(filename) > -1] # badly raised exceptions can come without a stack if stack: (filename, line, func, text) = stack[frame] else: (filename, line, func, text) = ('no stack', 0, 'none', '') filename = scrubFilename(filename) exc = exception.__class__.__name__ msg = "" # a shortcut to extract a useful message out of most exceptions # for now if str(exception): msg = ": %s" % str(exception) return "exception %(exc)s at %(filename)s:%(line)s: %(func)s()%(msg)s" \ % locals()
[ "def", "getExceptionMessage", "(", "exception", ",", "frame", "=", "-", "1", ",", "filename", "=", "None", ")", ":", "stack", "=", "traceback", ".", "extract_tb", "(", "sys", ".", "exc_info", "(", ")", "[", "2", "]", ")", "if", "filename", ":", "stack", "=", "[", "f", "for", "f", "in", "stack", "if", "f", "[", "0", "]", ".", "find", "(", "filename", ")", ">", "-", "1", "]", "# badly raised exceptions can come without a stack", "if", "stack", ":", "(", "filename", ",", "line", ",", "func", ",", "text", ")", "=", "stack", "[", "frame", "]", "else", ":", "(", "filename", ",", "line", ",", "func", ",", "text", ")", "=", "(", "'no stack'", ",", "0", ",", "'none'", ",", "''", ")", "filename", "=", "scrubFilename", "(", "filename", ")", "exc", "=", "exception", ".", "__class__", ".", "__name__", "msg", "=", "\"\"", "# a shortcut to extract a useful message out of most exceptions", "# for now", "if", "str", "(", "exception", ")", ":", "msg", "=", "\": %s\"", "%", "str", "(", "exception", ")", "return", "\"exception %(exc)s at %(filename)s:%(line)s: %(func)s()%(msg)s\"", "%", "locals", "(", ")" ]
35
19.75
def to_gremlin(self): """Return a unicode object with the Gremlin representation of this block.""" self.validate() if len(self.start_class) == 1: # The official Gremlin documentation claims that this approach # is generally faster than the one below, since it makes using indexes easier. # http://gremlindocs.spmallette.documentup.com/#filter/has start_class = list(self.start_class)[0] return u'g.V({}, {})'.format('\'@class\'', safe_quoted_string(start_class)) else: start_classes_list = ','.join(safe_quoted_string(x) for x in self.start_class) return u'g.V.has(\'@class\', T.in, [{}])'.format(start_classes_list)
[ "def", "to_gremlin", "(", "self", ")", ":", "self", ".", "validate", "(", ")", "if", "len", "(", "self", ".", "start_class", ")", "==", "1", ":", "# The official Gremlin documentation claims that this approach", "# is generally faster than the one below, since it makes using indexes easier.", "# http://gremlindocs.spmallette.documentup.com/#filter/has", "start_class", "=", "list", "(", "self", ".", "start_class", ")", "[", "0", "]", "return", "u'g.V({}, {})'", ".", "format", "(", "'\\'@class\\''", ",", "safe_quoted_string", "(", "start_class", ")", ")", "else", ":", "start_classes_list", "=", "','", ".", "join", "(", "safe_quoted_string", "(", "x", ")", "for", "x", "in", "self", ".", "start_class", ")", "return", "u'g.V.has(\\'@class\\', T.in, [{}])'", ".", "format", "(", "start_classes_list", ")" ]
60.083333
27.25
def process_certificates(service_name, relation_id, unit, custom_hostname_link=None, user='root', group='root'): """Process the certificates supplied down the relation :param service_name: str Name of service the certifcates are for. :param relation_id: str Relation id providing the certs :param unit: str Unit providing the certs :param custom_hostname_link: str Name of custom link to create :param user: (Optional) Owner of certificate files. Defaults to 'root' :type user: str :param group: (Optional) Group of certificate files. Defaults to 'root' :type group: str :returns: True if certificates processed for local unit or False :rtype: bool """ data = relation_get(rid=relation_id, unit=unit) ssl_dir = os.path.join('/etc/apache2/ssl/', service_name) mkdir(path=ssl_dir) name = local_unit().replace('/', '_') certs = data.get('{}.processed_requests'.format(name)) chain = data.get('chain') ca = data.get('ca') if certs: certs = json.loads(certs) install_ca_cert(ca.encode()) install_certs(ssl_dir, certs, chain, user=user, group=group) create_ip_cert_links( ssl_dir, custom_hostname_link=custom_hostname_link) return True return False
[ "def", "process_certificates", "(", "service_name", ",", "relation_id", ",", "unit", ",", "custom_hostname_link", "=", "None", ",", "user", "=", "'root'", ",", "group", "=", "'root'", ")", ":", "data", "=", "relation_get", "(", "rid", "=", "relation_id", ",", "unit", "=", "unit", ")", "ssl_dir", "=", "os", ".", "path", ".", "join", "(", "'/etc/apache2/ssl/'", ",", "service_name", ")", "mkdir", "(", "path", "=", "ssl_dir", ")", "name", "=", "local_unit", "(", ")", ".", "replace", "(", "'/'", ",", "'_'", ")", "certs", "=", "data", ".", "get", "(", "'{}.processed_requests'", ".", "format", "(", "name", ")", ")", "chain", "=", "data", ".", "get", "(", "'chain'", ")", "ca", "=", "data", ".", "get", "(", "'ca'", ")", "if", "certs", ":", "certs", "=", "json", ".", "loads", "(", "certs", ")", "install_ca_cert", "(", "ca", ".", "encode", "(", ")", ")", "install_certs", "(", "ssl_dir", ",", "certs", ",", "chain", ",", "user", "=", "user", ",", "group", "=", "group", ")", "create_ip_cert_links", "(", "ssl_dir", ",", "custom_hostname_link", "=", "custom_hostname_link", ")", "return", "True", "return", "False" ]
41.483871
19
def has_permission(self, request): """Check if user has permission""" if not self.object and not self.permission: return True if not self.permission: return request.user.has_perm('{}_{}'.format( self.model_permission, self.object.__class__.__name__.lower()), self.object ) return request.user.has_perm(self.permission)
[ "def", "has_permission", "(", "self", ",", "request", ")", ":", "if", "not", "self", ".", "object", "and", "not", "self", ".", "permission", ":", "return", "True", "if", "not", "self", ".", "permission", ":", "return", "request", ".", "user", ".", "has_perm", "(", "'{}_{}'", ".", "format", "(", "self", ".", "model_permission", ",", "self", ".", "object", ".", "__class__", ".", "__name__", ".", "lower", "(", ")", ")", ",", "self", ".", "object", ")", "return", "request", ".", "user", ".", "has_perm", "(", "self", ".", "permission", ")" ]
34.083333
17.416667
def policy_definitions_list(hide_builtin=False, **kwargs): ''' .. versionadded:: 2019.2.0 List all policy definitions for a subscription. :param hide_builtin: Boolean which will filter out BuiltIn policy definitions from the result. CLI Example: .. code-block:: bash salt-call azurearm_resource.policy_definitions_list ''' result = {} polconn = __utils__['azurearm.get_client']('policy', **kwargs) try: policy_defs = __utils__['azurearm.paged_object_to_list'](polconn.policy_definitions.list()) for policy in policy_defs: if not (hide_builtin and policy['policy_type'] == 'BuiltIn'): result[policy['name']] = policy except CloudError as exc: __utils__['azurearm.log_cloud_error']('resource', str(exc), **kwargs) result = {'error': str(exc)} return result
[ "def", "policy_definitions_list", "(", "hide_builtin", "=", "False", ",", "*", "*", "kwargs", ")", ":", "result", "=", "{", "}", "polconn", "=", "__utils__", "[", "'azurearm.get_client'", "]", "(", "'policy'", ",", "*", "*", "kwargs", ")", "try", ":", "policy_defs", "=", "__utils__", "[", "'azurearm.paged_object_to_list'", "]", "(", "polconn", ".", "policy_definitions", ".", "list", "(", ")", ")", "for", "policy", "in", "policy_defs", ":", "if", "not", "(", "hide_builtin", "and", "policy", "[", "'policy_type'", "]", "==", "'BuiltIn'", ")", ":", "result", "[", "policy", "[", "'name'", "]", "]", "=", "policy", "except", "CloudError", "as", "exc", ":", "__utils__", "[", "'azurearm.log_cloud_error'", "]", "(", "'resource'", ",", "str", "(", "exc", ")", ",", "*", "*", "kwargs", ")", "result", "=", "{", "'error'", ":", "str", "(", "exc", ")", "}", "return", "result" ]
30.392857
28.75
def readline(self, size=-1): "The size is ignored since a complete line must be read." line = self.fin.readline() if not line: return '' return self.process_line(line.rstrip('\n'))
[ "def", "readline", "(", "self", ",", "size", "=", "-", "1", ")", ":", "line", "=", "self", ".", "fin", ".", "readline", "(", ")", "if", "not", "line", ":", "return", "''", "return", "self", ".", "process_line", "(", "line", ".", "rstrip", "(", "'\\n'", ")", ")" ]
36.5
15.5
def training_pass(self, input_data, targets): """ Perform a full forward and backward pass through the model. **Parameters:** input_data : GPUArray Data to train the model with. targets : GPUArray Training targets. **Returns:** loss : float Value of loss function as evaluated on the data and targets. gradients : list of GPUArray Gradients obtained from backpropagation in the backward pass. """ # Forward pass loss, hidden_cache, logistic_cache = self.evaluate( input_data, targets, return_cache=True, prediction=False) if not np.isfinite(loss): raise ValueError('Infinite activations!') # Backpropagation if self.hidden_layers: hidden_activations = hidden_cache[-1][0] else: hidden_activations = input_data df_top_layer = \ self.top_layer.backprop(hidden_activations, targets, cache=logistic_cache) gradients = list(df_top_layer[0][::-1]) df_hidden = df_top_layer[1] if self.hidden_layers: hidden_inputs = [input_data] + [c[0] for c in hidden_cache[:-1]] for hl, hc, hi in \ zip(self.hidden_layers[::-1], hidden_cache[::-1], hidden_inputs[::-1]): g, df_hidden = hl.backprop(hi, df_hidden, cache=hc) gradients.extend(g[::-1]) gradients.reverse() return loss, gradients
[ "def", "training_pass", "(", "self", ",", "input_data", ",", "targets", ")", ":", "# Forward pass", "loss", ",", "hidden_cache", ",", "logistic_cache", "=", "self", ".", "evaluate", "(", "input_data", ",", "targets", ",", "return_cache", "=", "True", ",", "prediction", "=", "False", ")", "if", "not", "np", ".", "isfinite", "(", "loss", ")", ":", "raise", "ValueError", "(", "'Infinite activations!'", ")", "# Backpropagation", "if", "self", ".", "hidden_layers", ":", "hidden_activations", "=", "hidden_cache", "[", "-", "1", "]", "[", "0", "]", "else", ":", "hidden_activations", "=", "input_data", "df_top_layer", "=", "self", ".", "top_layer", ".", "backprop", "(", "hidden_activations", ",", "targets", ",", "cache", "=", "logistic_cache", ")", "gradients", "=", "list", "(", "df_top_layer", "[", "0", "]", "[", ":", ":", "-", "1", "]", ")", "df_hidden", "=", "df_top_layer", "[", "1", "]", "if", "self", ".", "hidden_layers", ":", "hidden_inputs", "=", "[", "input_data", "]", "+", "[", "c", "[", "0", "]", "for", "c", "in", "hidden_cache", "[", ":", "-", "1", "]", "]", "for", "hl", ",", "hc", ",", "hi", "in", "zip", "(", "self", ".", "hidden_layers", "[", ":", ":", "-", "1", "]", ",", "hidden_cache", "[", ":", ":", "-", "1", "]", ",", "hidden_inputs", "[", ":", ":", "-", "1", "]", ")", ":", "g", ",", "df_hidden", "=", "hl", ".", "backprop", "(", "hi", ",", "df_hidden", ",", "cache", "=", "hc", ")", "gradients", ".", "extend", "(", "g", "[", ":", ":", "-", "1", "]", ")", "gradients", ".", "reverse", "(", ")", "return", "loss", ",", "gradients" ]
30.76
21
def signal(*args, **kwargs): from .core import Signal """A signal decorator designed to work both in the simpler way, like: .. code:: python @signal def validation_function(arg1, ...): '''Some doc''' and also as a double-called decorator, like .. code:: python @signal(SignalOptions.EXEC_CONCURRENT) def validation_function(arg1, ...): '''Some doc''' """ if len(args) == 1 and len(kwargs) == 0 and callable(args[0]): return Signal(fvalidation=args[0]) else: sig = Signal(*args, **kwargs) def wrapper(fvalidation): sig._set_fvalidation(fvalidation) return sig return wrapper
[ "def", "signal", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "from", ".", "core", "import", "Signal", "if", "len", "(", "args", ")", "==", "1", "and", "len", "(", "kwargs", ")", "==", "0", "and", "callable", "(", "args", "[", "0", "]", ")", ":", "return", "Signal", "(", "fvalidation", "=", "args", "[", "0", "]", ")", "else", ":", "sig", "=", "Signal", "(", "*", "args", ",", "*", "*", "kwargs", ")", "def", "wrapper", "(", "fvalidation", ")", ":", "sig", ".", "_set_fvalidation", "(", "fvalidation", ")", "return", "sig", "return", "wrapper" ]
25.37037
17.962963
def best_policy(mdp, U): """Given an MDP and a utility function U, determine the best policy, as a mapping from state to action. (Equation 17.4)""" pi = {} for s in mdp.states: pi[s] = argmax(mdp.actions(s), lambda a:expected_utility(a, s, U, mdp)) return pi
[ "def", "best_policy", "(", "mdp", ",", "U", ")", ":", "pi", "=", "{", "}", "for", "s", "in", "mdp", ".", "states", ":", "pi", "[", "s", "]", "=", "argmax", "(", "mdp", ".", "actions", "(", "s", ")", ",", "lambda", "a", ":", "expected_utility", "(", "a", ",", "s", ",", "U", ",", "mdp", ")", ")", "return", "pi" ]
40
18.142857
def get_settings(self, integration_id): """Return settings for given integration as a dictionary.""" try: integration = self.get(integration_id=integration_id) return json.loads(integration.settings) except (self.model.DoesNotExist, ValueError): return {}
[ "def", "get_settings", "(", "self", ",", "integration_id", ")", ":", "try", ":", "integration", "=", "self", ".", "get", "(", "integration_id", "=", "integration_id", ")", "return", "json", ".", "loads", "(", "integration", ".", "settings", ")", "except", "(", "self", ".", "model", ".", "DoesNotExist", ",", "ValueError", ")", ":", "return", "{", "}" ]
38.625
17.125