text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def untrain(self, category, text): """ Untrains a category with a sample of text :param category: the name of the category we want to train :type category: str :param text: the text we want to untrain the category with :type text: str """ try: bayes_category = self.categories.get_category(category) except KeyError: return tokens = self.tokenizer(str(text)) occurance_counts = self.count_token_occurrences(tokens) for word, count in occurance_counts.items(): bayes_category.untrain_token(word, count) # Updating our per-category overall probabilities self.calculate_category_probability()
[ "def", "untrain", "(", "self", ",", "category", ",", "text", ")", ":", "try", ":", "bayes_category", "=", "self", ".", "categories", ".", "get_category", "(", "category", ")", "except", "KeyError", ":", "return", "tokens", "=", "self", ".", "tokenizer", "(", "str", "(", "text", ")", ")", "occurance_counts", "=", "self", ".", "count_token_occurrences", "(", "tokens", ")", "for", "word", ",", "count", "in", "occurance_counts", ".", "items", "(", ")", ":", "bayes_category", ".", "untrain_token", "(", "word", ",", "count", ")", "# Updating our per-category overall probabilities", "self", ".", "calculate_category_probability", "(", ")" ]
32.727273
19.181818
def channel_open( self, registry_address: PaymentNetworkID, token_address: TokenAddress, partner_address: Address, settle_timeout: BlockTimeout = None, retry_timeout: NetworkTimeout = DEFAULT_RETRY_TIMEOUT, ) -> ChannelID: """ Open a channel with the peer at `partner_address` with the given `token_address`. """ if settle_timeout is None: settle_timeout = self.raiden.config['settle_timeout'] if settle_timeout < self.raiden.config['reveal_timeout'] * 2: raise InvalidSettleTimeout( 'settle_timeout can not be smaller than double the reveal_timeout', ) if not is_binary_address(registry_address): raise InvalidAddress('Expected binary address format for registry in channel open') if not is_binary_address(token_address): raise InvalidAddress('Expected binary address format for token in channel open') if not is_binary_address(partner_address): raise InvalidAddress('Expected binary address format for partner in channel open') chain_state = views.state_from_raiden(self.raiden) channel_state = views.get_channelstate_for( chain_state=chain_state, payment_network_id=registry_address, token_address=token_address, partner_address=partner_address, ) if channel_state: raise DuplicatedChannelError('Channel with given partner address already exists') registry = self.raiden.chain.token_network_registry(registry_address) token_network_address = registry.get_token_network(token_address) if token_network_address is None: raise TokenNotRegistered( 'Token network for token %s does not exist' % to_checksum_address(token_address), ) token_network = self.raiden.chain.token_network( registry.get_token_network(token_address), ) with self.raiden.gas_reserve_lock: has_enough_reserve, estimated_required_reserve = has_enough_gas_reserve( self.raiden, channels_to_open=1, ) if not has_enough_reserve: raise InsufficientGasReserve(( 'The account balance is below the estimated amount necessary to ' 'finish the lifecycles of all active channels. A balance of at ' f'least {estimated_required_reserve} wei is required.' )) try: token_network.new_netting_channel( partner=partner_address, settle_timeout=settle_timeout, given_block_identifier=views.state_from_raiden(self.raiden).block_hash, ) except DuplicatedChannelError: log.info('partner opened channel first') waiting.wait_for_newchannel( raiden=self.raiden, payment_network_id=registry_address, token_address=token_address, partner_address=partner_address, retry_timeout=retry_timeout, ) chain_state = views.state_from_raiden(self.raiden) channel_state = views.get_channelstate_for( chain_state=chain_state, payment_network_id=registry_address, token_address=token_address, partner_address=partner_address, ) assert channel_state, f'channel {channel_state} is gone' return channel_state.identifier
[ "def", "channel_open", "(", "self", ",", "registry_address", ":", "PaymentNetworkID", ",", "token_address", ":", "TokenAddress", ",", "partner_address", ":", "Address", ",", "settle_timeout", ":", "BlockTimeout", "=", "None", ",", "retry_timeout", ":", "NetworkTimeout", "=", "DEFAULT_RETRY_TIMEOUT", ",", ")", "->", "ChannelID", ":", "if", "settle_timeout", "is", "None", ":", "settle_timeout", "=", "self", ".", "raiden", ".", "config", "[", "'settle_timeout'", "]", "if", "settle_timeout", "<", "self", ".", "raiden", ".", "config", "[", "'reveal_timeout'", "]", "*", "2", ":", "raise", "InvalidSettleTimeout", "(", "'settle_timeout can not be smaller than double the reveal_timeout'", ",", ")", "if", "not", "is_binary_address", "(", "registry_address", ")", ":", "raise", "InvalidAddress", "(", "'Expected binary address format for registry in channel open'", ")", "if", "not", "is_binary_address", "(", "token_address", ")", ":", "raise", "InvalidAddress", "(", "'Expected binary address format for token in channel open'", ")", "if", "not", "is_binary_address", "(", "partner_address", ")", ":", "raise", "InvalidAddress", "(", "'Expected binary address format for partner in channel open'", ")", "chain_state", "=", "views", ".", "state_from_raiden", "(", "self", ".", "raiden", ")", "channel_state", "=", "views", ".", "get_channelstate_for", "(", "chain_state", "=", "chain_state", ",", "payment_network_id", "=", "registry_address", ",", "token_address", "=", "token_address", ",", "partner_address", "=", "partner_address", ",", ")", "if", "channel_state", ":", "raise", "DuplicatedChannelError", "(", "'Channel with given partner address already exists'", ")", "registry", "=", "self", ".", "raiden", ".", "chain", ".", "token_network_registry", "(", "registry_address", ")", "token_network_address", "=", "registry", ".", "get_token_network", "(", "token_address", ")", "if", "token_network_address", "is", "None", ":", "raise", "TokenNotRegistered", "(", "'Token network for token %s does not exist'", "%", "to_checksum_address", "(", "token_address", ")", ",", ")", "token_network", "=", "self", ".", "raiden", ".", "chain", ".", "token_network", "(", "registry", ".", "get_token_network", "(", "token_address", ")", ",", ")", "with", "self", ".", "raiden", ".", "gas_reserve_lock", ":", "has_enough_reserve", ",", "estimated_required_reserve", "=", "has_enough_gas_reserve", "(", "self", ".", "raiden", ",", "channels_to_open", "=", "1", ",", ")", "if", "not", "has_enough_reserve", ":", "raise", "InsufficientGasReserve", "(", "(", "'The account balance is below the estimated amount necessary to '", "'finish the lifecycles of all active channels. A balance of at '", "f'least {estimated_required_reserve} wei is required.'", ")", ")", "try", ":", "token_network", ".", "new_netting_channel", "(", "partner", "=", "partner_address", ",", "settle_timeout", "=", "settle_timeout", ",", "given_block_identifier", "=", "views", ".", "state_from_raiden", "(", "self", ".", "raiden", ")", ".", "block_hash", ",", ")", "except", "DuplicatedChannelError", ":", "log", ".", "info", "(", "'partner opened channel first'", ")", "waiting", ".", "wait_for_newchannel", "(", "raiden", "=", "self", ".", "raiden", ",", "payment_network_id", "=", "registry_address", ",", "token_address", "=", "token_address", ",", "partner_address", "=", "partner_address", ",", "retry_timeout", "=", "retry_timeout", ",", ")", "chain_state", "=", "views", ".", "state_from_raiden", "(", "self", ".", "raiden", ")", "channel_state", "=", "views", ".", "get_channelstate_for", "(", "chain_state", "=", "chain_state", ",", "payment_network_id", "=", "registry_address", ",", "token_address", "=", "token_address", ",", "partner_address", "=", "partner_address", ",", ")", "assert", "channel_state", ",", "f'channel {channel_state} is gone'", "return", "channel_state", ".", "identifier" ]
38.967033
21.274725
def representation_function_compiler(self, func_name): """Generic function can be used to compile __repr__ or __unicode__ or __str__""" def get_col_accessor(col): return ALCHEMY_TEMPLATES.col_accessor.safe_substitute(col=col) def get_col_evaluator(col): return ALCHEMY_TEMPLATES.col_evaluator.safe_substitute(col=col) col_evaluators = ", ".join([get_col_evaluator(n) for n in self.primary_keys]) col_accessors = ", ".join([get_col_accessor(n) for n in self.primary_keys]) return ALCHEMY_TEMPLATES.representor_function.safe_substitute(func_name=func_name, col_accessors=col_accessors, col_evaluators=col_evaluators, class_name=self.class_name)
[ "def", "representation_function_compiler", "(", "self", ",", "func_name", ")", ":", "def", "get_col_accessor", "(", "col", ")", ":", "return", "ALCHEMY_TEMPLATES", ".", "col_accessor", ".", "safe_substitute", "(", "col", "=", "col", ")", "def", "get_col_evaluator", "(", "col", ")", ":", "return", "ALCHEMY_TEMPLATES", ".", "col_evaluator", ".", "safe_substitute", "(", "col", "=", "col", ")", "col_evaluators", "=", "\", \"", ".", "join", "(", "[", "get_col_evaluator", "(", "n", ")", "for", "n", "in", "self", ".", "primary_keys", "]", ")", "col_accessors", "=", "\", \"", ".", "join", "(", "[", "get_col_accessor", "(", "n", ")", "for", "n", "in", "self", ".", "primary_keys", "]", ")", "return", "ALCHEMY_TEMPLATES", ".", "representor_function", ".", "safe_substitute", "(", "func_name", "=", "func_name", ",", "col_accessors", "=", "col_accessors", ",", "col_evaluators", "=", "col_evaluators", ",", "class_name", "=", "self", ".", "class_name", ")" ]
57.0625
35.4375
def trac_tickets(self): ''' Looks for any of the following trac ticket formats in the description field: t12345, t 12345, T12345, T 12345, #12345, # 12345, ticket 12345, TICKET 12345 ''' # ticket_numbers = re.findall(re.compile("([t#]\s?[0-9]{2,})|([t#][0-9]{2,})", re.IGNORECASE), self.description) ticket_numbers = re.findall(r"^[tT#]\s?[0-9]+", self.description) ticket_numbers += re.findall(r"[tT#][0-9]+", self.description) ticket_numbers += re.findall(re.compile("ticket\s?[0-9]+", re.IGNORECASE), self.description) # Remove Duplicates ticket_numbers = [re.sub('[^0-9]','', t) for t in ticket_numbers] return list(set(ticket_numbers))
[ "def", "trac_tickets", "(", "self", ")", ":", "# ticket_numbers = re.findall(re.compile(\"([t#]\\s?[0-9]{2,})|([t#][0-9]{2,})\", re.IGNORECASE), self.description)", "ticket_numbers", "=", "re", ".", "findall", "(", "r\"^[tT#]\\s?[0-9]+\"", ",", "self", ".", "description", ")", "ticket_numbers", "+=", "re", ".", "findall", "(", "r\"[tT#][0-9]+\"", ",", "self", ".", "description", ")", "ticket_numbers", "+=", "re", ".", "findall", "(", "re", ".", "compile", "(", "\"ticket\\s?[0-9]+\"", ",", "re", ".", "IGNORECASE", ")", ",", "self", ".", "description", ")", "# Remove Duplicates", "ticket_numbers", "=", "[", "re", ".", "sub", "(", "'[^0-9]'", ",", "''", ",", "t", ")", "for", "t", "in", "ticket_numbers", "]", "return", "list", "(", "set", "(", "ticket_numbers", ")", ")" ]
51.5
35.5
def interpret_stats(results): """Generates the string to be shown as updates after the execution of a Cypher query :param results: ``ResultSet`` with the raw results of the execution of the Cypher query """ stats = results.stats contains_updates = stats.pop("contains_updates", False) if stats else False if not contains_updates: result = '{} rows affected.'.format(len(results)) else: result = '' for stat, value in stats.items(): if value: result = "{}\n{} {}.".format(result, value, stat.replace("_", " ")) return result.strip()
[ "def", "interpret_stats", "(", "results", ")", ":", "stats", "=", "results", ".", "stats", "contains_updates", "=", "stats", ".", "pop", "(", "\"contains_updates\"", ",", "False", ")", "if", "stats", "else", "False", "if", "not", "contains_updates", ":", "result", "=", "'{} rows affected.'", ".", "format", "(", "len", "(", "results", ")", ")", "else", ":", "result", "=", "''", "for", "stat", ",", "value", "in", "stats", ".", "items", "(", ")", ":", "if", "value", ":", "result", "=", "\"{}\\n{} {}.\"", ".", "format", "(", "result", ",", "value", ",", "stat", ".", "replace", "(", "\"_\"", ",", "\" \"", ")", ")", "return", "result", ".", "strip", "(", ")" ]
37.111111
18.333333
def pop_state(self, idx=None): """ Pops off the most recent state. :param idx: If provided, specifies the index at which the next string begins. """ self.state.pop() if idx is not None: self.str_begin = idx
[ "def", "pop_state", "(", "self", ",", "idx", "=", "None", ")", ":", "self", ".", "state", ".", "pop", "(", ")", "if", "idx", "is", "not", "None", ":", "self", ".", "str_begin", "=", "idx" ]
23.166667
17
def _get_client(): ''' Return cloud client ''' client = salt.cloud.CloudClient( os.path.join(os.path.dirname(__opts__['conf_file']), 'cloud') ) return client
[ "def", "_get_client", "(", ")", ":", "client", "=", "salt", ".", "cloud", ".", "CloudClient", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__opts__", "[", "'conf_file'", "]", ")", ",", "'cloud'", ")", ")", "return", "client" ]
24.25
24
def create_from_data_channel(cls, data_channel): """Scan the data tree on the given data_channel to create a corresponding InputSetGenerator tree. """ gather_depth = cls._get_gather_depth(data_channel) generator = InputSetGeneratorNode() for (data_path, data_node) in data_channel.get_ready_data_nodes( [], gather_depth): flat_data_node = data_node.flattened_clone(save=False) input_item = InputItem( flat_data_node, data_channel.channel, data_channel.as_channel, mode=data_channel.mode) generator._add_input_item(data_path, input_item) return generator
[ "def", "create_from_data_channel", "(", "cls", ",", "data_channel", ")", ":", "gather_depth", "=", "cls", ".", "_get_gather_depth", "(", "data_channel", ")", "generator", "=", "InputSetGeneratorNode", "(", ")", "for", "(", "data_path", ",", "data_node", ")", "in", "data_channel", ".", "get_ready_data_nodes", "(", "[", "]", ",", "gather_depth", ")", ":", "flat_data_node", "=", "data_node", ".", "flattened_clone", "(", "save", "=", "False", ")", "input_item", "=", "InputItem", "(", "flat_data_node", ",", "data_channel", ".", "channel", ",", "data_channel", ".", "as_channel", ",", "mode", "=", "data_channel", ".", "mode", ")", "generator", ".", "_add_input_item", "(", "data_path", ",", "input_item", ")", "return", "generator" ]
45.333333
14.666667
def get_set(self, project, articleset, **filters): """List the articlesets in a project""" url = URL.articleset.format(**locals()) return self.request(url, **filters)
[ "def", "get_set", "(", "self", ",", "project", ",", "articleset", ",", "*", "*", "filters", ")", ":", "url", "=", "URL", ".", "articleset", ".", "format", "(", "*", "*", "locals", "(", ")", ")", "return", "self", ".", "request", "(", "url", ",", "*", "*", "filters", ")" ]
46.75
5
def create_vs(lb, name, ip, port, protocol, profile, pool_name): ''' Create a virtual server CLI Examples: .. code-block:: bash salt-run f5.create_vs lbalancer vs_name 10.0.0.1 80 tcp http poolname ''' if __opts__['load_balancers'].get(lb, None): (username, password) = list(__opts__['load_balancers'][lb].values()) else: raise Exception('Unable to find `{0}` load balancer'.format(lb)) F5 = F5Mgmt(lb, username, password) F5.create_vs(name, ip, port, protocol, profile, pool_name) return True
[ "def", "create_vs", "(", "lb", ",", "name", ",", "ip", ",", "port", ",", "protocol", ",", "profile", ",", "pool_name", ")", ":", "if", "__opts__", "[", "'load_balancers'", "]", ".", "get", "(", "lb", ",", "None", ")", ":", "(", "username", ",", "password", ")", "=", "list", "(", "__opts__", "[", "'load_balancers'", "]", "[", "lb", "]", ".", "values", "(", ")", ")", "else", ":", "raise", "Exception", "(", "'Unable to find `{0}` load balancer'", ".", "format", "(", "lb", ")", ")", "F5", "=", "F5Mgmt", "(", "lb", ",", "username", ",", "password", ")", "F5", ".", "create_vs", "(", "name", ",", "ip", ",", "port", ",", "protocol", ",", "profile", ",", "pool_name", ")", "return", "True" ]
30.222222
27.444444
def load_params_from_file(self, fname: str): """ Loads and sets model parameters from file. :param fname: Path to load parameters from. """ utils.check_condition(os.path.exists(fname), "No model parameter file found under %s. " "This is either not a model directory or the first training " "checkpoint has not happened yet." % fname) self.params, self.aux_params = utils.load_params(fname) utils.check_condition(all(name.startswith(self.prefix) for name in self.params.keys()), "Not all parameter names start with model prefix '%s'" % self.prefix) utils.check_condition(all(name.startswith(self.prefix) for name in self.aux_params.keys()), "Not all auxiliary parameter names start with model prefix '%s'" % self.prefix) logger.info('Loaded params from "%s"', fname)
[ "def", "load_params_from_file", "(", "self", ",", "fname", ":", "str", ")", ":", "utils", ".", "check_condition", "(", "os", ".", "path", ".", "exists", "(", "fname", ")", ",", "\"No model parameter file found under %s. \"", "\"This is either not a model directory or the first training \"", "\"checkpoint has not happened yet.\"", "%", "fname", ")", "self", ".", "params", ",", "self", ".", "aux_params", "=", "utils", ".", "load_params", "(", "fname", ")", "utils", ".", "check_condition", "(", "all", "(", "name", ".", "startswith", "(", "self", ".", "prefix", ")", "for", "name", "in", "self", ".", "params", ".", "keys", "(", ")", ")", ",", "\"Not all parameter names start with model prefix '%s'\"", "%", "self", ".", "prefix", ")", "utils", ".", "check_condition", "(", "all", "(", "name", ".", "startswith", "(", "self", ".", "prefix", ")", "for", "name", "in", "self", ".", "aux_params", ".", "keys", "(", ")", ")", ",", "\"Not all auxiliary parameter names start with model prefix '%s'\"", "%", "self", ".", "prefix", ")", "logger", ".", "info", "(", "'Loaded params from \"%s\"'", ",", "fname", ")" ]
66
35.2
def get_route_templates(self): """ Generate Openshift route templates or playbook tasks. Each port on a service definition found in container.yml represents an externally exposed port. """ def _get_published_ports(service_config): result = [] for port in service_config.get('ports', []): protocol = 'TCP' if isinstance(port, string_types) and '/' in port: port, protocol = port.split('/') if isinstance(port, string_types) and ':' in port: host, container = port.split(':') else: host = port result.append({'port': host, 'protocol': protocol.lower()}) return result templates = [] for name, service_config in self._services.items(): state = service_config.get(self.CONFIG_KEY, {}).get('state', 'present') force = service_config.get(self.CONFIG_KEY, {}).get('force', False) published_ports = _get_published_ports(service_config) if state != 'present': continue for port in published_ports: route_name = "%s-%s" % (name, port['port']) labels = dict( app=self._namespace_name, service=name ) template = CommentedMap() template['apiVersion'] = self.DEFAULT_API_VERSION template['kind'] = 'Route' template['force'] = force template['metadata'] = CommentedMap([ ('name', route_name), ('namespace', self._namespace_name), ('labels', labels.copy()) ]) template['spec'] = CommentedMap([ ('to', CommentedMap([ ('kind', 'Service'), ('name', name) ])), ('port', CommentedMap([ ('targetPort', 'port-{}-{}'.format(port['port'], port['protocol'])) ])) ]) if service_config.get(self.CONFIG_KEY, {}).get('routes'): for route in service_config[self.CONFIG_KEY]['routes']: if str(route.get('port')) == str(port['port']): for key, value in route.items(): if key not in ('force', 'port'): self.copy_attribute(template['spec'], key, value) templates.append(template) return templates
[ "def", "get_route_templates", "(", "self", ")", ":", "def", "_get_published_ports", "(", "service_config", ")", ":", "result", "=", "[", "]", "for", "port", "in", "service_config", ".", "get", "(", "'ports'", ",", "[", "]", ")", ":", "protocol", "=", "'TCP'", "if", "isinstance", "(", "port", ",", "string_types", ")", "and", "'/'", "in", "port", ":", "port", ",", "protocol", "=", "port", ".", "split", "(", "'/'", ")", "if", "isinstance", "(", "port", ",", "string_types", ")", "and", "':'", "in", "port", ":", "host", ",", "container", "=", "port", ".", "split", "(", "':'", ")", "else", ":", "host", "=", "port", "result", ".", "append", "(", "{", "'port'", ":", "host", ",", "'protocol'", ":", "protocol", ".", "lower", "(", ")", "}", ")", "return", "result", "templates", "=", "[", "]", "for", "name", ",", "service_config", "in", "self", ".", "_services", ".", "items", "(", ")", ":", "state", "=", "service_config", ".", "get", "(", "self", ".", "CONFIG_KEY", ",", "{", "}", ")", ".", "get", "(", "'state'", ",", "'present'", ")", "force", "=", "service_config", ".", "get", "(", "self", ".", "CONFIG_KEY", ",", "{", "}", ")", ".", "get", "(", "'force'", ",", "False", ")", "published_ports", "=", "_get_published_ports", "(", "service_config", ")", "if", "state", "!=", "'present'", ":", "continue", "for", "port", "in", "published_ports", ":", "route_name", "=", "\"%s-%s\"", "%", "(", "name", ",", "port", "[", "'port'", "]", ")", "labels", "=", "dict", "(", "app", "=", "self", ".", "_namespace_name", ",", "service", "=", "name", ")", "template", "=", "CommentedMap", "(", ")", "template", "[", "'apiVersion'", "]", "=", "self", ".", "DEFAULT_API_VERSION", "template", "[", "'kind'", "]", "=", "'Route'", "template", "[", "'force'", "]", "=", "force", "template", "[", "'metadata'", "]", "=", "CommentedMap", "(", "[", "(", "'name'", ",", "route_name", ")", ",", "(", "'namespace'", ",", "self", ".", "_namespace_name", ")", ",", "(", "'labels'", ",", "labels", ".", "copy", "(", ")", ")", "]", ")", "template", "[", "'spec'", "]", "=", "CommentedMap", "(", "[", "(", "'to'", ",", "CommentedMap", "(", "[", "(", "'kind'", ",", "'Service'", ")", ",", "(", "'name'", ",", "name", ")", "]", ")", ")", ",", "(", "'port'", ",", "CommentedMap", "(", "[", "(", "'targetPort'", ",", "'port-{}-{}'", ".", "format", "(", "port", "[", "'port'", "]", ",", "port", "[", "'protocol'", "]", ")", ")", "]", ")", ")", "]", ")", "if", "service_config", ".", "get", "(", "self", ".", "CONFIG_KEY", ",", "{", "}", ")", ".", "get", "(", "'routes'", ")", ":", "for", "route", "in", "service_config", "[", "self", ".", "CONFIG_KEY", "]", "[", "'routes'", "]", ":", "if", "str", "(", "route", ".", "get", "(", "'port'", ")", ")", "==", "str", "(", "port", "[", "'port'", "]", ")", ":", "for", "key", ",", "value", "in", "route", ".", "items", "(", ")", ":", "if", "key", "not", "in", "(", "'force'", ",", "'port'", ")", ":", "self", ".", "copy_attribute", "(", "template", "[", "'spec'", "]", ",", "key", ",", "value", ")", "templates", ".", "append", "(", "template", ")", "return", "templates" ]
42.290323
19.16129
def _create_directory(cls, directory, loop=False): """ Creates the given directory if it does not exists. :param directory: The directory to create. :type directory: str :param loop: Tell us if we are in the creation loop or not. :type loop: bool """ if not loop and PyFunceble.directory_separator in directory: # * We are not in the loop. # and # * The directory separator in the given directory. # We split the directories separator. splited_directory = directory.split(PyFunceble.directory_separator) # We initiate a variable which will save the full path to create. full_path_to_create = "" for single_directory in splited_directory: # We loop through each directory. # We append the currently read directory to the full path. full_path_to_create += single_directory + PyFunceble.directory_separator # And we create the directory if it does not exist. cls._create_directory(full_path_to_create, True) if not PyFunceble.path.isdir(directory): # The given directory does not exist. # We update the permission. # (Only if we are under Travis CI.) AutoSave.travis_permissions() # We create the directory. PyFunceble.mkdir(directory) # We update the permission. # (Only if we are under Travis CI.) AutoSave.travis_permissions()
[ "def", "_create_directory", "(", "cls", ",", "directory", ",", "loop", "=", "False", ")", ":", "if", "not", "loop", "and", "PyFunceble", ".", "directory_separator", "in", "directory", ":", "# * We are not in the loop.", "# and", "# * The directory separator in the given directory.", "# We split the directories separator.", "splited_directory", "=", "directory", ".", "split", "(", "PyFunceble", ".", "directory_separator", ")", "# We initiate a variable which will save the full path to create.", "full_path_to_create", "=", "\"\"", "for", "single_directory", "in", "splited_directory", ":", "# We loop through each directory.", "# We append the currently read directory to the full path.", "full_path_to_create", "+=", "single_directory", "+", "PyFunceble", ".", "directory_separator", "# And we create the directory if it does not exist.", "cls", ".", "_create_directory", "(", "full_path_to_create", ",", "True", ")", "if", "not", "PyFunceble", ".", "path", ".", "isdir", "(", "directory", ")", ":", "# The given directory does not exist.", "# We update the permission.", "# (Only if we are under Travis CI.)", "AutoSave", ".", "travis_permissions", "(", ")", "# We create the directory.", "PyFunceble", ".", "mkdir", "(", "directory", ")", "# We update the permission.", "# (Only if we are under Travis CI.)", "AutoSave", ".", "travis_permissions", "(", ")" ]
35.25
21.159091
def save(self, commit=True): """ Saves the instance. """ if self.instance.pk: # First handle updates post = super().save(commit=False) post.updated_by = self.user post.updates_count = F('updates_count') + 1 else: post = Post( topic=self.topic, subject=self.cleaned_data['subject'], approved=self.perm_handler.can_post_without_approval(self.forum, self.user), content=self.cleaned_data['content'], enable_signature=self.cleaned_data['enable_signature']) if not self.user.is_anonymous: post.poster = self.user else: post.username = self.cleaned_data['username'] post.anonymous_key = get_anonymous_user_forum_key(self.user) # Locks the topic if appropriate. lock_topic = self.cleaned_data.get('lock_topic', False) if lock_topic: self.topic.status = Topic.TOPIC_LOCKED self.topic.save() if commit: post.save() return post
[ "def", "save", "(", "self", ",", "commit", "=", "True", ")", ":", "if", "self", ".", "instance", ".", "pk", ":", "# First handle updates", "post", "=", "super", "(", ")", ".", "save", "(", "commit", "=", "False", ")", "post", ".", "updated_by", "=", "self", ".", "user", "post", ".", "updates_count", "=", "F", "(", "'updates_count'", ")", "+", "1", "else", ":", "post", "=", "Post", "(", "topic", "=", "self", ".", "topic", ",", "subject", "=", "self", ".", "cleaned_data", "[", "'subject'", "]", ",", "approved", "=", "self", ".", "perm_handler", ".", "can_post_without_approval", "(", "self", ".", "forum", ",", "self", ".", "user", ")", ",", "content", "=", "self", ".", "cleaned_data", "[", "'content'", "]", ",", "enable_signature", "=", "self", ".", "cleaned_data", "[", "'enable_signature'", "]", ")", "if", "not", "self", ".", "user", ".", "is_anonymous", ":", "post", ".", "poster", "=", "self", ".", "user", "else", ":", "post", ".", "username", "=", "self", ".", "cleaned_data", "[", "'username'", "]", "post", ".", "anonymous_key", "=", "get_anonymous_user_forum_key", "(", "self", ".", "user", ")", "# Locks the topic if appropriate.", "lock_topic", "=", "self", ".", "cleaned_data", ".", "get", "(", "'lock_topic'", ",", "False", ")", "if", "lock_topic", ":", "self", ".", "topic", ".", "status", "=", "Topic", ".", "TOPIC_LOCKED", "self", ".", "topic", ".", "save", "(", ")", "if", "commit", ":", "post", ".", "save", "(", ")", "return", "post" ]
36.766667
17.866667
def fit_interval_censoring( self, lower_bound, upper_bound, event_observed=None, timeline=None, label=None, alpha=None, ci_labels=None, show_progress=False, entry=None, weights=None, ): # pylint: disable=too-many-arguments """ Fit the model to an interval censored dataset. Parameters ---------- lower_bound: an array, or pd.Series length n, the start of the period the subject experienced the event in. upper_bound: an array, or pd.Series length n, the end of the period the subject experienced the event in. If the value is equal to the corresponding value in lower_bound, then the individual's event was observed (not censored). event_observed: numpy array or pd.Series, optional length n, if left optional, infer from ``lower_bound`` and ``upper_cound`` (if lower_bound==upper_bound then event observed, if lower_bound < upper_bound, then event censored) timeline: list, optional return the estimate at the values in timeline (positively increasing) label: string, optional a string to name the column of the estimate. alpha: float, optional the alpha value in the confidence intervals. Overrides the initializing alpha for this call to fit only. ci_labels: list, optional add custom column names to the generated confidence intervals as a length-2 list: [<lower-bound name>, <upper-bound name>]. Default: <label>_lower_<alpha> show_progress: boolean, optional since this is an iterative fitting algorithm, switching this to True will display some iteration details. entry: an array, or pd.Series, of length n relative time when a subject entered the study. This is useful for left-truncated (not left-censored) observations. If None, all members of the population entered study when they were "born": time zero. weights: an array, or pd.Series, of length n integer weights per observation Returns ------- self self with new properties like ``cumulative_hazard_``, ``survival_function_`` """ check_nans_or_infs(lower_bound) check_positivity(upper_bound) self.upper_bound = np.asarray(pass_for_numeric_dtypes_or_raise_array(upper_bound)) self.lower_bound = np.asarray(pass_for_numeric_dtypes_or_raise_array(lower_bound)) if (self.upper_bound < self.lower_bound).any(): raise ValueError("All upper_bound times must be greater than or equal to lower_bound times.") if event_observed is None: event_observed = self.upper_bound == self.lower_bound if ((self.lower_bound == self.upper_bound) != event_observed).any(): raise ValueError( "For all rows, lower_bound == upper_bound if and only if event observed = 1 (uncensored). Likewise, lower_bound < upper_bound if and only if event observed = 0 (censored)" ) self._censoring_type = CensoringType.INTERVAL return self._fit( (np.clip(self.lower_bound, 1e-20, 1e25), np.clip(self.upper_bound, 1e-20, 1e25)), event_observed=event_observed, timeline=timeline, label=label, alpha=alpha, ci_labels=ci_labels, show_progress=show_progress, entry=entry, weights=weights, )
[ "def", "fit_interval_censoring", "(", "self", ",", "lower_bound", ",", "upper_bound", ",", "event_observed", "=", "None", ",", "timeline", "=", "None", ",", "label", "=", "None", ",", "alpha", "=", "None", ",", "ci_labels", "=", "None", ",", "show_progress", "=", "False", ",", "entry", "=", "None", ",", "weights", "=", "None", ",", ")", ":", "# pylint: disable=too-many-arguments", "check_nans_or_infs", "(", "lower_bound", ")", "check_positivity", "(", "upper_bound", ")", "self", ".", "upper_bound", "=", "np", ".", "asarray", "(", "pass_for_numeric_dtypes_or_raise_array", "(", "upper_bound", ")", ")", "self", ".", "lower_bound", "=", "np", ".", "asarray", "(", "pass_for_numeric_dtypes_or_raise_array", "(", "lower_bound", ")", ")", "if", "(", "self", ".", "upper_bound", "<", "self", ".", "lower_bound", ")", ".", "any", "(", ")", ":", "raise", "ValueError", "(", "\"All upper_bound times must be greater than or equal to lower_bound times.\"", ")", "if", "event_observed", "is", "None", ":", "event_observed", "=", "self", ".", "upper_bound", "==", "self", ".", "lower_bound", "if", "(", "(", "self", ".", "lower_bound", "==", "self", ".", "upper_bound", ")", "!=", "event_observed", ")", ".", "any", "(", ")", ":", "raise", "ValueError", "(", "\"For all rows, lower_bound == upper_bound if and only if event observed = 1 (uncensored). Likewise, lower_bound < upper_bound if and only if event observed = 0 (censored)\"", ")", "self", ".", "_censoring_type", "=", "CensoringType", ".", "INTERVAL", "return", "self", ".", "_fit", "(", "(", "np", ".", "clip", "(", "self", ".", "lower_bound", ",", "1e-20", ",", "1e25", ")", ",", "np", ".", "clip", "(", "self", ".", "upper_bound", ",", "1e-20", ",", "1e25", ")", ")", ",", "event_observed", "=", "event_observed", ",", "timeline", "=", "timeline", ",", "label", "=", "label", ",", "alpha", "=", "alpha", ",", "ci_labels", "=", "ci_labels", ",", "show_progress", "=", "show_progress", ",", "entry", "=", "entry", ",", "weights", "=", "weights", ",", ")" ]
45.363636
28.636364
def grep(source, regex, stop_on_first=False): """Grep the constant pool of all classes in source.""" loader = ClassLoader(source, max_cache=-1) r = re.compile(regex) def _matches(constant): return r.match(constant.value) for klass in loader.classes: it = loader.search_constant_pool(path=klass, type_=UTF8, f=_matches) if next(it, None): print(klass) if stop_on_first: break
[ "def", "grep", "(", "source", ",", "regex", ",", "stop_on_first", "=", "False", ")", ":", "loader", "=", "ClassLoader", "(", "source", ",", "max_cache", "=", "-", "1", ")", "r", "=", "re", ".", "compile", "(", "regex", ")", "def", "_matches", "(", "constant", ")", ":", "return", "r", ".", "match", "(", "constant", ".", "value", ")", "for", "klass", "in", "loader", ".", "classes", ":", "it", "=", "loader", ".", "search_constant_pool", "(", "path", "=", "klass", ",", "type_", "=", "UTF8", ",", "f", "=", "_matches", ")", "if", "next", "(", "it", ",", "None", ")", ":", "print", "(", "klass", ")", "if", "stop_on_first", ":", "break" ]
31.928571
16.071429
def split_evenly(n, chunks): """Split an integer into evenly distributed list >>> split_evenly(7, 3) [3, 2, 2] >>> split_evenly(12, 3) [4, 4, 4] >>> split_evenly(35, 10) [4, 4, 4, 4, 4, 3, 3, 3, 3, 3] >>> split_evenly(1, 2) Traceback (most recent call last): ... ChunkingError: Number of chunks is greater than number """ if n < chunks: raise ChunkingError("Number of chunks is greater than number") if n % chunks == 0: # Either we can evenly split or only 1 chunk left return [n / chunks] * chunks # otherwise the current chunk should be a bit larger max_size = n / chunks + 1 return [max_size] + split_evenly(n - max_size, chunks - 1)
[ "def", "split_evenly", "(", "n", ",", "chunks", ")", ":", "if", "n", "<", "chunks", ":", "raise", "ChunkingError", "(", "\"Number of chunks is greater than number\"", ")", "if", "n", "%", "chunks", "==", "0", ":", "# Either we can evenly split or only 1 chunk left", "return", "[", "n", "/", "chunks", "]", "*", "chunks", "# otherwise the current chunk should be a bit larger", "max_size", "=", "n", "/", "chunks", "+", "1", "return", "[", "max_size", "]", "+", "split_evenly", "(", "n", "-", "max_size", ",", "chunks", "-", "1", ")" ]
27.384615
19.730769
def spielman_wr(self, norm=True): """Returns a list of site-specific omega values calculated from the `ExpCM`. Args: `norm` (bool) If `True`, normalize the `omega_r` values by the ExpCM gene-wide `omega`. Returns: `wr` (list) list of `omega_r` values of length `nsites` Following `Spielman and Wilke, MBE, 32:1097-1108 <https://doi.org/10.1093/molbev/msv003>`_, we can predict the `dN/dS` value for each site `r`, :math:`\\rm{spielman}\\omega_r`, from the `ExpCM`. When `norm` is `False`, the `omega_r` values are defined as :math:`\\rm{spielman}\\omega_r = \\frac{\\sum_x \\sum_{y \\in N_x}p_{r,x}\ P_{r,xy}}{\\sum_x \\sum_{y \\in Nx}p_{r,x}Q_{xy}}`, where `r,x,y`, :math:`p_{r,x}`, :math:`P_{r,xy}`, and :math:`Q_{x,y}` have the same definitions as in the main `ExpCM` doc string and :math:`N_{x}` is the set of codons which are non-synonymous to codon `x` and differ from `x` by one nucleotide. When `norm` is `True`, the `omega_r` values above are divided by the ExpCM `omega` value.""" wr = [] for r in range(self.nsites): num = 0 den = 0 for i in range(N_CODON): j = scipy.intersect1d(scipy.where(CODON_SINGLEMUT[i]==True)[0], scipy.where(CODON_NONSYN[i]==True)[0]) p_i = self.stationarystate[r][i] P_xy = self.Prxy[r][i][j].sum() if norm: P_xy = P_xy/self.omega Q_xy = self.Qxy[i][j].sum() num += (p_i * P_xy) den += (p_i * Q_xy) result = num/den wr.append(result) return wr
[ "def", "spielman_wr", "(", "self", ",", "norm", "=", "True", ")", ":", "wr", "=", "[", "]", "for", "r", "in", "range", "(", "self", ".", "nsites", ")", ":", "num", "=", "0", "den", "=", "0", "for", "i", "in", "range", "(", "N_CODON", ")", ":", "j", "=", "scipy", ".", "intersect1d", "(", "scipy", ".", "where", "(", "CODON_SINGLEMUT", "[", "i", "]", "==", "True", ")", "[", "0", "]", ",", "scipy", ".", "where", "(", "CODON_NONSYN", "[", "i", "]", "==", "True", ")", "[", "0", "]", ")", "p_i", "=", "self", ".", "stationarystate", "[", "r", "]", "[", "i", "]", "P_xy", "=", "self", ".", "Prxy", "[", "r", "]", "[", "i", "]", "[", "j", "]", ".", "sum", "(", ")", "if", "norm", ":", "P_xy", "=", "P_xy", "/", "self", ".", "omega", "Q_xy", "=", "self", ".", "Qxy", "[", "i", "]", "[", "j", "]", ".", "sum", "(", ")", "num", "+=", "(", "p_i", "*", "P_xy", ")", "den", "+=", "(", "p_i", "*", "Q_xy", ")", "result", "=", "num", "/", "den", "wr", ".", "append", "(", "result", ")", "return", "wr" ]
40.244444
21.511111
def get_configuration(dev): r"""Get the current active configuration of the device. dev is the Device object to which the request will be sent to. This function differs from the Device.get_active_configuration method because the later may use cached data, while this function always does a device request. """ bmRequestType = util.build_request_type( util.CTRL_IN, util.CTRL_TYPE_STANDARD, util.CTRL_RECIPIENT_DEVICE) return dev.ctrl_transfer( bmRequestType, bRequest = 0x08, data_or_wLength = 1)[0]
[ "def", "get_configuration", "(", "dev", ")", ":", "bmRequestType", "=", "util", ".", "build_request_type", "(", "util", ".", "CTRL_IN", ",", "util", ".", "CTRL_TYPE_STANDARD", ",", "util", ".", "CTRL_RECIPIENT_DEVICE", ")", "return", "dev", ".", "ctrl_transfer", "(", "bmRequestType", ",", "bRequest", "=", "0x08", ",", "data_or_wLength", "=", "1", ")", "[", "0", "]" ]
34.315789
15.157895
def clamp_within_range(self, x, y): """ Clamp x and y so that they fall within range of the tilemap. """ x = int(x) y = int(y) if x < 0: x = 0 if y < 0: y = 0 if x > self.size_in_tiles.X: x = self.size_in_tiles.X if y > self.size_in_tiles.Y: y = self.size_in_tiles.Y return x, y
[ "def", "clamp_within_range", "(", "self", ",", "x", ",", "y", ")", ":", "x", "=", "int", "(", "x", ")", "y", "=", "int", "(", "y", ")", "if", "x", "<", "0", ":", "x", "=", "0", "if", "y", "<", "0", ":", "y", "=", "0", "if", "x", ">", "self", ".", "size_in_tiles", ".", "X", ":", "x", "=", "self", ".", "size_in_tiles", ".", "X", "if", "y", ">", "self", ".", "size_in_tiles", ".", "Y", ":", "y", "=", "self", ".", "size_in_tiles", ".", "Y", "return", "x", ",", "y" ]
23.058824
16.823529
def get_rudder_scores_vs_background(self): ''' Returns ------- pd.DataFrame of rudder scores vs background ''' df = self.get_term_and_background_counts() corpus_percentiles = self._get_percentiles_from_freqs(df['corpus']) background_percentiles = self._get_percentiles_from_freqs(df['background']) df['Rudder'] = (self._get_rudder_scores_for_percentile_pair(corpus_percentiles, background_percentiles)) df = df.sort_values(by='Rudder', ascending=True) return df
[ "def", "get_rudder_scores_vs_background", "(", "self", ")", ":", "df", "=", "self", ".", "get_term_and_background_counts", "(", ")", "corpus_percentiles", "=", "self", ".", "_get_percentiles_from_freqs", "(", "df", "[", "'corpus'", "]", ")", "background_percentiles", "=", "self", ".", "_get_percentiles_from_freqs", "(", "df", "[", "'background'", "]", ")", "df", "[", "'Rudder'", "]", "=", "(", "self", ".", "_get_rudder_scores_for_percentile_pair", "(", "corpus_percentiles", ",", "background_percentiles", ")", ")", "df", "=", "df", ".", "sort_values", "(", "by", "=", "'Rudder'", ",", "ascending", "=", "True", ")", "return", "df" ]
46.538462
26.692308
def _get_seqprop_to_seqprop_alignment(self, seqprop1, seqprop2): """Return the alignment stored in self.sequence_alignments given a seqprop + another seqprop""" if isinstance(seqprop1, str): seqprop1_id = seqprop1 else: seqprop1_id = seqprop1.id if isinstance(seqprop2, str): seqprop2_id = seqprop2 else: seqprop2_id = seqprop2.id aln_id = '{}_{}'.format(seqprop1_id, seqprop2_id) if self.sequence_alignments.has_id(aln_id): alignment = self.sequence_alignments.get_by_id(aln_id) return alignment else: raise ValueError('{}: sequence alignment not found, please run the alignment first'.format(aln_id))
[ "def", "_get_seqprop_to_seqprop_alignment", "(", "self", ",", "seqprop1", ",", "seqprop2", ")", ":", "if", "isinstance", "(", "seqprop1", ",", "str", ")", ":", "seqprop1_id", "=", "seqprop1", "else", ":", "seqprop1_id", "=", "seqprop1", ".", "id", "if", "isinstance", "(", "seqprop2", ",", "str", ")", ":", "seqprop2_id", "=", "seqprop2", "else", ":", "seqprop2_id", "=", "seqprop2", ".", "id", "aln_id", "=", "'{}_{}'", ".", "format", "(", "seqprop1_id", ",", "seqprop2_id", ")", "if", "self", ".", "sequence_alignments", ".", "has_id", "(", "aln_id", ")", ":", "alignment", "=", "self", ".", "sequence_alignments", ".", "get_by_id", "(", "aln_id", ")", "return", "alignment", "else", ":", "raise", "ValueError", "(", "'{}: sequence alignment not found, please run the alignment first'", ".", "format", "(", "aln_id", ")", ")" ]
40.833333
19.222222
def handle_basic_executor_options(options, parser): """Handle the options specified by add_basic_executor_options().""" # setup logging logLevel = logging.INFO if options.debug: logLevel = logging.DEBUG elif options.quiet: logLevel = logging.WARNING util.setup_logging(level=logLevel)
[ "def", "handle_basic_executor_options", "(", "options", ",", "parser", ")", ":", "# setup logging", "logLevel", "=", "logging", ".", "INFO", "if", "options", ".", "debug", ":", "logLevel", "=", "logging", ".", "DEBUG", "elif", "options", ".", "quiet", ":", "logLevel", "=", "logging", ".", "WARNING", "util", ".", "setup_logging", "(", "level", "=", "logLevel", ")" ]
35.111111
10.777778
def format(self, record): """ Format the record using the corresponding formatter. """ if record.levelno == DEBUG: return self.debug_formatter.format(record) if record.levelno == INFO: return self.info_formatter.format(record) if record.levelno == ERROR: return self.error_formatter.format(record) if record.levelno == WARNING: return self.warning_formatter.format(record) if record.levelno == CRITICAL: return self.critical_formatter.format(record)
[ "def", "format", "(", "self", ",", "record", ")", ":", "if", "record", ".", "levelno", "==", "DEBUG", ":", "return", "self", ".", "debug_formatter", ".", "format", "(", "record", ")", "if", "record", ".", "levelno", "==", "INFO", ":", "return", "self", ".", "info_formatter", ".", "format", "(", "record", ")", "if", "record", ".", "levelno", "==", "ERROR", ":", "return", "self", ".", "error_formatter", ".", "format", "(", "record", ")", "if", "record", ".", "levelno", "==", "WARNING", ":", "return", "self", ".", "warning_formatter", ".", "format", "(", "record", ")", "if", "record", ".", "levelno", "==", "CRITICAL", ":", "return", "self", ".", "critical_formatter", ".", "format", "(", "record", ")" ]
40
9.285714
def _client_send(self, msg): """Sends an Rpc message through the connection. Args: msg: string, the message to send. Raises: Error: a socket error occurred during the send. """ try: self._client.write(msg.encode("utf8") + b'\n') self._client.flush() self.log.debug('Snippet sent %s.', msg) except socket.error as e: raise Error( self._ad, 'Encountered socket error "%s" sending RPC message "%s"' % (e, msg))
[ "def", "_client_send", "(", "self", ",", "msg", ")", ":", "try", ":", "self", ".", "_client", ".", "write", "(", "msg", ".", "encode", "(", "\"utf8\"", ")", "+", "b'\\n'", ")", "self", ".", "_client", ".", "flush", "(", ")", "self", ".", "log", ".", "debug", "(", "'Snippet sent %s.'", ",", "msg", ")", "except", "socket", ".", "error", "as", "e", ":", "raise", "Error", "(", "self", ".", "_ad", ",", "'Encountered socket error \"%s\" sending RPC message \"%s\"'", "%", "(", "e", ",", "msg", ")", ")" ]
31.111111
17.777778
def GetPixelColor(x: int, y: int, handle: int = 0) -> int: """ Get pixel color of a native window. x: int. y: int. handle: int, the handle of a native window. Return int, the bgr value of point (x,y). r = bgr & 0x0000FF g = (bgr & 0x00FF00) >> 8 b = (bgr & 0xFF0000) >> 16 If handle is 0, get pixel from Desktop window(root control). Note: Not all devices support GetPixel. An application should call GetDeviceCaps to determine whether a specified device supports this function. For example, console window doesn't support. """ hdc = ctypes.windll.user32.GetWindowDC(ctypes.c_void_p(handle)) bgr = ctypes.windll.gdi32.GetPixel(hdc, x, y) ctypes.windll.user32.ReleaseDC(ctypes.c_void_p(handle), hdc) return bgr
[ "def", "GetPixelColor", "(", "x", ":", "int", ",", "y", ":", "int", ",", "handle", ":", "int", "=", "0", ")", "->", "int", ":", "hdc", "=", "ctypes", ".", "windll", ".", "user32", ".", "GetWindowDC", "(", "ctypes", ".", "c_void_p", "(", "handle", ")", ")", "bgr", "=", "ctypes", ".", "windll", ".", "gdi32", ".", "GetPixel", "(", "hdc", ",", "x", ",", "y", ")", "ctypes", ".", "windll", ".", "user32", ".", "ReleaseDC", "(", "ctypes", ".", "c_void_p", "(", "handle", ")", ",", "hdc", ")", "return", "bgr" ]
38.3
17.4
def _unlock(self, name, client_id, request_id): """Handles unlocking Complains if a non-existent lock should be released or if a lock should be released that was acquired by another client before. """ if name in self._locks: other_client_id, other_request_id = self._locks[name] if other_client_id != client_id: response = (self.RELEASE_ERROR + self.DELIMITER + 'Lock `%s` was acquired by `%s` (old request id `%s`) and not by ' '`%s` (request id `%s`)' % (name, other_client_id, other_request_id, client_id, request_id)) self._logger.error(response) return response else: del self._locks[name] return self.RELEASED else: response = (self.RELEASE_ERROR + self.DELIMITER + 'Lock `%s` cannot be found in database (client id `%s`, ' 'request id `%s`)' % (name, client_id, request_id)) self._logger.error(response) return response
[ "def", "_unlock", "(", "self", ",", "name", ",", "client_id", ",", "request_id", ")", ":", "if", "name", "in", "self", ".", "_locks", ":", "other_client_id", ",", "other_request_id", "=", "self", ".", "_locks", "[", "name", "]", "if", "other_client_id", "!=", "client_id", ":", "response", "=", "(", "self", ".", "RELEASE_ERROR", "+", "self", ".", "DELIMITER", "+", "'Lock `%s` was acquired by `%s` (old request id `%s`) and not by '", "'`%s` (request id `%s`)'", "%", "(", "name", ",", "other_client_id", ",", "other_request_id", ",", "client_id", ",", "request_id", ")", ")", "self", ".", "_logger", ".", "error", "(", "response", ")", "return", "response", "else", ":", "del", "self", ".", "_locks", "[", "name", "]", "return", "self", ".", "RELEASED", "else", ":", "response", "=", "(", "self", ".", "RELEASE_ERROR", "+", "self", ".", "DELIMITER", "+", "'Lock `%s` cannot be found in database (client id `%s`, '", "'request id `%s`)'", "%", "(", "name", ",", "client_id", ",", "request_id", ")", ")", "self", ".", "_logger", ".", "error", "(", "response", ")", "return", "response" ]
46.068966
19.758621
def _assign_method(self, resource_class, method_type): """ Using reflection, assigns a new method to this class. Args: resource_class: A resource class method_type: The HTTP method type """ """ If we assigned the same method to each method, it's the same method in memory, so we need one for each acceptable HTTP method. """ method_name = resource_class.get_method_name( resource_class, method_type) valid_status_codes = getattr( resource_class.Meta, 'valid_status_codes', DEFAULT_VALID_STATUS_CODES ) # I know what you're going to say, and I'd love help making this nicer # reflection assigns the same memory addr to each method otherwise. def get(self, method_type=method_type, method_name=method_name, valid_status_codes=valid_status_codes, resource=resource_class, data=None, uid=None, **kwargs): return self.call_api( method_type, method_name, valid_status_codes, resource, data, uid=uid, **kwargs) def put(self, method_type=method_type, method_name=method_name, valid_status_codes=valid_status_codes, resource=resource_class, data=None, uid=None, **kwargs): return self.call_api( method_type, method_name, valid_status_codes, resource, data, uid=uid, **kwargs) def post(self, method_type=method_type, method_name=method_name, valid_status_codes=valid_status_codes, resource=resource_class, data=None, uid=None, **kwargs): return self.call_api( method_type, method_name, valid_status_codes, resource, data, uid=uid, **kwargs) def patch(self, method_type=method_type, method_name=method_name, valid_status_codes=valid_status_codes, resource=resource_class, data=None, uid=None, **kwargs): return self.call_api( method_type, method_name, valid_status_codes, resource, data, uid=uid, **kwargs) def delete(self, method_type=method_type, method_name=method_name, valid_status_codes=valid_status_codes, resource=resource_class, data=None, uid=None, **kwargs): return self.call_api( method_type, method_name, valid_status_codes, resource, data, uid=uid, **kwargs) method_map = { 'GET': get, 'PUT': put, 'POST': post, 'PATCH': patch, 'DELETE': delete } setattr( self, method_name, types.MethodType(method_map[method_type], self) )
[ "def", "_assign_method", "(", "self", ",", "resource_class", ",", "method_type", ")", ":", "\"\"\"\n If we assigned the same method to each method, it's the same\n method in memory, so we need one for each acceptable HTTP method.\n \"\"\"", "method_name", "=", "resource_class", ".", "get_method_name", "(", "resource_class", ",", "method_type", ")", "valid_status_codes", "=", "getattr", "(", "resource_class", ".", "Meta", ",", "'valid_status_codes'", ",", "DEFAULT_VALID_STATUS_CODES", ")", "# I know what you're going to say, and I'd love help making this nicer", "# reflection assigns the same memory addr to each method otherwise.", "def", "get", "(", "self", ",", "method_type", "=", "method_type", ",", "method_name", "=", "method_name", ",", "valid_status_codes", "=", "valid_status_codes", ",", "resource", "=", "resource_class", ",", "data", "=", "None", ",", "uid", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "call_api", "(", "method_type", ",", "method_name", ",", "valid_status_codes", ",", "resource", ",", "data", ",", "uid", "=", "uid", ",", "*", "*", "kwargs", ")", "def", "put", "(", "self", ",", "method_type", "=", "method_type", ",", "method_name", "=", "method_name", ",", "valid_status_codes", "=", "valid_status_codes", ",", "resource", "=", "resource_class", ",", "data", "=", "None", ",", "uid", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "call_api", "(", "method_type", ",", "method_name", ",", "valid_status_codes", ",", "resource", ",", "data", ",", "uid", "=", "uid", ",", "*", "*", "kwargs", ")", "def", "post", "(", "self", ",", "method_type", "=", "method_type", ",", "method_name", "=", "method_name", ",", "valid_status_codes", "=", "valid_status_codes", ",", "resource", "=", "resource_class", ",", "data", "=", "None", ",", "uid", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "call_api", "(", "method_type", ",", "method_name", ",", "valid_status_codes", ",", "resource", ",", "data", ",", "uid", "=", "uid", ",", "*", "*", "kwargs", ")", "def", "patch", "(", "self", ",", "method_type", "=", "method_type", ",", "method_name", "=", "method_name", ",", "valid_status_codes", "=", "valid_status_codes", ",", "resource", "=", "resource_class", ",", "data", "=", "None", ",", "uid", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "call_api", "(", "method_type", ",", "method_name", ",", "valid_status_codes", ",", "resource", ",", "data", ",", "uid", "=", "uid", ",", "*", "*", "kwargs", ")", "def", "delete", "(", "self", ",", "method_type", "=", "method_type", ",", "method_name", "=", "method_name", ",", "valid_status_codes", "=", "valid_status_codes", ",", "resource", "=", "resource_class", ",", "data", "=", "None", ",", "uid", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "call_api", "(", "method_type", ",", "method_name", ",", "valid_status_codes", ",", "resource", ",", "data", ",", "uid", "=", "uid", ",", "*", "*", "kwargs", ")", "method_map", "=", "{", "'GET'", ":", "get", ",", "'PUT'", ":", "put", ",", "'POST'", ":", "post", ",", "'PATCH'", ":", "patch", ",", "'DELETE'", ":", "delete", "}", "setattr", "(", "self", ",", "method_name", ",", "types", ".", "MethodType", "(", "method_map", "[", "method_type", "]", ",", "self", ")", ")" ]
38.213333
17.386667
def normalize_dates(): """Experiment to make sense of TLG dates. TODO: start here, parse everything with pass """ _dict = get_date_author() for tlg_date in _dict: date = {} if tlg_date == 'Varia': #give a homer-to-byz date for 'varia' pass elif tlg_date == 'Incertum': #? pass else: tmp_date = _handle_splits(tlg_date) date.update(tmp_date) print(date)
[ "def", "normalize_dates", "(", ")", ":", "_dict", "=", "get_date_author", "(", ")", "for", "tlg_date", "in", "_dict", ":", "date", "=", "{", "}", "if", "tlg_date", "==", "'Varia'", ":", "#give a homer-to-byz date for 'varia'", "pass", "elif", "tlg_date", "==", "'Incertum'", ":", "#?", "pass", "else", ":", "tmp_date", "=", "_handle_splits", "(", "tlg_date", ")", "date", ".", "update", "(", "tmp_date", ")", "print", "(", "date", ")" ]
26
15.111111
def help_center_category_translations_missing(self, category_id, **kwargs): "https://developer.zendesk.com/rest_api/docs/help_center/translations#list-missing-translations" api_path = "/api/v2/help_center/categories/{category_id}/translations/missing.json" api_path = api_path.format(category_id=category_id) return self.call(api_path, **kwargs)
[ "def", "help_center_category_translations_missing", "(", "self", ",", "category_id", ",", "*", "*", "kwargs", ")", ":", "api_path", "=", "\"/api/v2/help_center/categories/{category_id}/translations/missing.json\"", "api_path", "=", "api_path", ".", "format", "(", "category_id", "=", "category_id", ")", "return", "self", ".", "call", "(", "api_path", ",", "*", "*", "kwargs", ")" ]
74.6
34.6
def tokenize_number(val, line): """Parse val correctly into int or float.""" try: num = int(val) typ = TokenType.int except ValueError: num = float(val) typ = TokenType.float return {'type': typ, 'value': num, 'line': line}
[ "def", "tokenize_number", "(", "val", ",", "line", ")", ":", "try", ":", "num", "=", "int", "(", "val", ")", "typ", "=", "TokenType", ".", "int", "except", "ValueError", ":", "num", "=", "float", "(", "val", ")", "typ", "=", "TokenType", ".", "float", "return", "{", "'type'", ":", "typ", ",", "'value'", ":", "num", ",", "'line'", ":", "line", "}" ]
26.3
16.9
def number_of_nodes(self, t=None): """Return the number of nodes in the t snpashot of a dynamic graph. Parameters ---------- t : snapshot id (default=None) If None return the number of nodes in the flattened graph. Returns ------- nnodes : int The number of nodes in the graph. See Also -------- order which is identical Examples -------- >>> G = dn.DynGraph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2], t=0) >>> G.number_of_nodes(0) 3 """ if t is None: return len(self._node) else: nds = sum([1 for n in self.degree(t=t).values() if n > 0]) return nds
[ "def", "number_of_nodes", "(", "self", ",", "t", "=", "None", ")", ":", "if", "t", "is", "None", ":", "return", "len", "(", "self", ".", "_node", ")", "else", ":", "nds", "=", "sum", "(", "[", "1", "for", "n", "in", "self", ".", "degree", "(", "t", "=", "t", ")", ".", "values", "(", ")", "if", "n", ">", "0", "]", ")", "return", "nds" ]
25.7
21.366667
def get_name(self): """Get name based on 4 class attributes Each attribute is substituted by '' if attribute does not exist :return: dependent_host_name/dependent_service_description..host_name/service_description :rtype: str TODO: Clean this function (use format for string) """ return getattr(self, 'dependent_host_name', '') + '/'\ + getattr(self, 'dependent_service_description', '') \ + '..' + getattr(self, 'host_name', '') + '/' \ + getattr(self, 'service_description', '')
[ "def", "get_name", "(", "self", ")", ":", "return", "getattr", "(", "self", ",", "'dependent_host_name'", ",", "''", ")", "+", "'/'", "+", "getattr", "(", "self", ",", "'dependent_service_description'", ",", "''", ")", "+", "'..'", "+", "getattr", "(", "self", ",", "'host_name'", ",", "''", ")", "+", "'/'", "+", "getattr", "(", "self", ",", "'service_description'", ",", "''", ")" ]
46.833333
22.333333
def get_fastq_files(data): """Retrieve fastq files for the given lane, ready to process. """ assert "files" in data, "Did not find `files` in input; nothing to process" ready_files = [] should_gzip = True # Bowtie does not accept gzipped fastq if 'bowtie' in data['reference'].keys(): should_gzip = False for fname in data["files"]: if fname.endswith(".bam"): if _pipeline_needs_fastq(data["config"], data): ready_files = convert_bam_to_fastq(fname, data["dirs"]["work"], data, data["dirs"], data["config"]) else: ready_files = [fname] elif objectstore.is_remote(fname): ready_files.append(fname) # Trimming does quality conversion, so if not doing that, do an explicit conversion elif not(dd.get_trim_reads(data)) and dd.get_quality_format(data) != "standard": out_dir = utils.safe_makedir(os.path.join(dd.get_work_dir(data), "fastq_convert")) ready_files.append(fastq.groom(fname, data, out_dir=out_dir)) else: ready_files.append(fname) ready_files = [x for x in ready_files if x is not None] if should_gzip: out_dir = utils.safe_makedir(os.path.join(dd.get_work_dir(data), "fastq")) ready_files = [_gzip_fastq(x, out_dir) for x in ready_files] for in_file in ready_files: if not objectstore.is_remote(in_file): assert os.path.exists(in_file), "%s does not exist." % in_file return ready_files
[ "def", "get_fastq_files", "(", "data", ")", ":", "assert", "\"files\"", "in", "data", ",", "\"Did not find `files` in input; nothing to process\"", "ready_files", "=", "[", "]", "should_gzip", "=", "True", "# Bowtie does not accept gzipped fastq", "if", "'bowtie'", "in", "data", "[", "'reference'", "]", ".", "keys", "(", ")", ":", "should_gzip", "=", "False", "for", "fname", "in", "data", "[", "\"files\"", "]", ":", "if", "fname", ".", "endswith", "(", "\".bam\"", ")", ":", "if", "_pipeline_needs_fastq", "(", "data", "[", "\"config\"", "]", ",", "data", ")", ":", "ready_files", "=", "convert_bam_to_fastq", "(", "fname", ",", "data", "[", "\"dirs\"", "]", "[", "\"work\"", "]", ",", "data", ",", "data", "[", "\"dirs\"", "]", ",", "data", "[", "\"config\"", "]", ")", "else", ":", "ready_files", "=", "[", "fname", "]", "elif", "objectstore", ".", "is_remote", "(", "fname", ")", ":", "ready_files", ".", "append", "(", "fname", ")", "# Trimming does quality conversion, so if not doing that, do an explicit conversion", "elif", "not", "(", "dd", ".", "get_trim_reads", "(", "data", ")", ")", "and", "dd", ".", "get_quality_format", "(", "data", ")", "!=", "\"standard\"", ":", "out_dir", "=", "utils", ".", "safe_makedir", "(", "os", ".", "path", ".", "join", "(", "dd", ".", "get_work_dir", "(", "data", ")", ",", "\"fastq_convert\"", ")", ")", "ready_files", ".", "append", "(", "fastq", ".", "groom", "(", "fname", ",", "data", ",", "out_dir", "=", "out_dir", ")", ")", "else", ":", "ready_files", ".", "append", "(", "fname", ")", "ready_files", "=", "[", "x", "for", "x", "in", "ready_files", "if", "x", "is", "not", "None", "]", "if", "should_gzip", ":", "out_dir", "=", "utils", ".", "safe_makedir", "(", "os", ".", "path", ".", "join", "(", "dd", ".", "get_work_dir", "(", "data", ")", ",", "\"fastq\"", ")", ")", "ready_files", "=", "[", "_gzip_fastq", "(", "x", ",", "out_dir", ")", "for", "x", "in", "ready_files", "]", "for", "in_file", "in", "ready_files", ":", "if", "not", "objectstore", ".", "is_remote", "(", "in_file", ")", ":", "assert", "os", ".", "path", ".", "exists", "(", "in_file", ")", ",", "\"%s does not exist.\"", "%", "in_file", "return", "ready_files" ]
47
21
def merge_from(self, other): """Merge information from another PhoneNumber object into this one.""" if other.country_code is not None: self.country_code = other.country_code if other.national_number is not None: self.national_number = other.national_number if other.extension is not None: self.extension = other.extension if other.italian_leading_zero is not None: self.italian_leading_zero = other.italian_leading_zero if other.number_of_leading_zeros is not None: self.number_of_leading_zeros = other.number_of_leading_zeros if other.raw_input is not None: self.raw_input = other.raw_input if other.country_code_source is not CountryCodeSource.UNSPECIFIED: self.country_code_source = other.country_code_source if other.preferred_domestic_carrier_code is not None: self.preferred_domestic_carrier_code = other.preferred_domestic_carrier_code
[ "def", "merge_from", "(", "self", ",", "other", ")", ":", "if", "other", ".", "country_code", "is", "not", "None", ":", "self", ".", "country_code", "=", "other", ".", "country_code", "if", "other", ".", "national_number", "is", "not", "None", ":", "self", ".", "national_number", "=", "other", ".", "national_number", "if", "other", ".", "extension", "is", "not", "None", ":", "self", ".", "extension", "=", "other", ".", "extension", "if", "other", ".", "italian_leading_zero", "is", "not", "None", ":", "self", ".", "italian_leading_zero", "=", "other", ".", "italian_leading_zero", "if", "other", ".", "number_of_leading_zeros", "is", "not", "None", ":", "self", ".", "number_of_leading_zeros", "=", "other", ".", "number_of_leading_zeros", "if", "other", ".", "raw_input", "is", "not", "None", ":", "self", ".", "raw_input", "=", "other", ".", "raw_input", "if", "other", ".", "country_code_source", "is", "not", "CountryCodeSource", ".", "UNSPECIFIED", ":", "self", ".", "country_code_source", "=", "other", ".", "country_code_source", "if", "other", ".", "preferred_domestic_carrier_code", "is", "not", "None", ":", "self", ".", "preferred_domestic_carrier_code", "=", "other", ".", "preferred_domestic_carrier_code" ]
55.166667
14.611111
def detect_stream_mode(stream): ''' detect_stream_mode - Detect the mode on a given stream @param stream <object> - A stream object If "mode" is present, that will be used. @return <type> - "Bytes" type or "str" type ''' # If "Mode" is present, pull from that if hasattr(stream, 'mode'): if 'b' in stream.mode: return bytes elif 't' in stream.mode: return str # Read a zero-length string off the device if hasattr(stream, 'read'): zeroStr = stream.read(0) if type(zeroStr) is str: return str return bytes elif hasattr(stream, 'recv'): zeroStr = stream.recv(0) if type(zeroStr) is str: return str return bytes # Cannot figure it out, assume bytes. return bytes
[ "def", "detect_stream_mode", "(", "stream", ")", ":", "# If \"Mode\" is present, pull from that", "if", "hasattr", "(", "stream", ",", "'mode'", ")", ":", "if", "'b'", "in", "stream", ".", "mode", ":", "return", "bytes", "elif", "'t'", "in", "stream", ".", "mode", ":", "return", "str", "# Read a zero-length string off the device ", "if", "hasattr", "(", "stream", ",", "'read'", ")", ":", "zeroStr", "=", "stream", ".", "read", "(", "0", ")", "if", "type", "(", "zeroStr", ")", "is", "str", ":", "return", "str", "return", "bytes", "elif", "hasattr", "(", "stream", ",", "'recv'", ")", ":", "zeroStr", "=", "stream", ".", "recv", "(", "0", ")", "if", "type", "(", "zeroStr", ")", "is", "str", ":", "return", "str", "return", "bytes", "# Cannot figure it out, assume bytes.", "return", "bytes" ]
26.548387
17.774194
def create_git_commit(self, message, tree, parents, author=github.GithubObject.NotSet, committer=github.GithubObject.NotSet): """ :calls: `POST /repos/:owner/:repo/git/commits <http://developer.github.com/v3/git/commits>`_ :param message: string :param tree: :class:`github.GitTree.GitTree` :param parents: list of :class:`github.GitCommit.GitCommit` :param author: :class:`github.InputGitAuthor.InputGitAuthor` :param committer: :class:`github.InputGitAuthor.InputGitAuthor` :rtype: :class:`github.GitCommit.GitCommit` """ assert isinstance(message, (str, unicode)), message assert isinstance(tree, github.GitTree.GitTree), tree assert all(isinstance(element, github.GitCommit.GitCommit) for element in parents), parents assert author is github.GithubObject.NotSet or isinstance(author, github.InputGitAuthor), author assert committer is github.GithubObject.NotSet or isinstance(committer, github.InputGitAuthor), committer post_parameters = { "message": message, "tree": tree._identity, "parents": [element._identity for element in parents], } if author is not github.GithubObject.NotSet: post_parameters["author"] = author._identity if committer is not github.GithubObject.NotSet: post_parameters["committer"] = committer._identity headers, data = self._requester.requestJsonAndCheck( "POST", self.url + "/git/commits", input=post_parameters ) return github.GitCommit.GitCommit(self._requester, headers, data, completed=True)
[ "def", "create_git_commit", "(", "self", ",", "message", ",", "tree", ",", "parents", ",", "author", "=", "github", ".", "GithubObject", ".", "NotSet", ",", "committer", "=", "github", ".", "GithubObject", ".", "NotSet", ")", ":", "assert", "isinstance", "(", "message", ",", "(", "str", ",", "unicode", ")", ")", ",", "message", "assert", "isinstance", "(", "tree", ",", "github", ".", "GitTree", ".", "GitTree", ")", ",", "tree", "assert", "all", "(", "isinstance", "(", "element", ",", "github", ".", "GitCommit", ".", "GitCommit", ")", "for", "element", "in", "parents", ")", ",", "parents", "assert", "author", "is", "github", ".", "GithubObject", ".", "NotSet", "or", "isinstance", "(", "author", ",", "github", ".", "InputGitAuthor", ")", ",", "author", "assert", "committer", "is", "github", ".", "GithubObject", ".", "NotSet", "or", "isinstance", "(", "committer", ",", "github", ".", "InputGitAuthor", ")", ",", "committer", "post_parameters", "=", "{", "\"message\"", ":", "message", ",", "\"tree\"", ":", "tree", ".", "_identity", ",", "\"parents\"", ":", "[", "element", ".", "_identity", "for", "element", "in", "parents", "]", ",", "}", "if", "author", "is", "not", "github", ".", "GithubObject", ".", "NotSet", ":", "post_parameters", "[", "\"author\"", "]", "=", "author", ".", "_identity", "if", "committer", "is", "not", "github", ".", "GithubObject", ".", "NotSet", ":", "post_parameters", "[", "\"committer\"", "]", "=", "committer", ".", "_identity", "headers", ",", "data", "=", "self", ".", "_requester", ".", "requestJsonAndCheck", "(", "\"POST\"", ",", "self", ".", "url", "+", "\"/git/commits\"", ",", "input", "=", "post_parameters", ")", "return", "github", ".", "GitCommit", ".", "GitCommit", "(", "self", ".", "_requester", ",", "headers", ",", "data", ",", "completed", "=", "True", ")" ]
55.433333
25.966667
def to_match(self): """Return a unicode object with the MATCH representation of this BetweenClause.""" template = u'({field_name} BETWEEN {lower_bound} AND {upper_bound})' return template.format( field_name=self.field.to_match(), lower_bound=self.lower_bound.to_match(), upper_bound=self.upper_bound.to_match())
[ "def", "to_match", "(", "self", ")", ":", "template", "=", "u'({field_name} BETWEEN {lower_bound} AND {upper_bound})'", "return", "template", ".", "format", "(", "field_name", "=", "self", ".", "field", ".", "to_match", "(", ")", ",", "lower_bound", "=", "self", ".", "lower_bound", ".", "to_match", "(", ")", ",", "upper_bound", "=", "self", ".", "upper_bound", ".", "to_match", "(", ")", ")" ]
52.142857
13.571429
def readInfoElement(self, infoElement, instanceObject): """ Read the info element. :: <info/> <info"> <location/> </info> """ infoLocation = self.locationFromElement(infoElement) instanceObject.addInfo(infoLocation, copySourceName=self.infoSource)
[ "def", "readInfoElement", "(", "self", ",", "infoElement", ",", "instanceObject", ")", ":", "infoLocation", "=", "self", ".", "locationFromElement", "(", "infoElement", ")", "instanceObject", ".", "addInfo", "(", "infoLocation", ",", "copySourceName", "=", "self", ".", "infoSource", ")" ]
25
22.928571
def add_cert(self, cert): """ Explicitely adds certificate to set of trusted in the store @param cert - X509 object to add """ if not isinstance(cert, X509): raise TypeError("cert should be X509") libcrypto.X509_STORE_add_cert(self.store, cert.cert)
[ "def", "add_cert", "(", "self", ",", "cert", ")", ":", "if", "not", "isinstance", "(", "cert", ",", "X509", ")", ":", "raise", "TypeError", "(", "\"cert should be X509\"", ")", "libcrypto", ".", "X509_STORE_add_cert", "(", "self", ".", "store", ",", "cert", ".", "cert", ")" ]
37.75
9.25
def gene_by_protein_id(self, protein_id): """ Get the gene ID associated with the given protein ID, return its Gene object """ gene_id = self.gene_id_of_protein_id(protein_id) return self.gene_by_id(gene_id)
[ "def", "gene_by_protein_id", "(", "self", ",", "protein_id", ")", ":", "gene_id", "=", "self", ".", "gene_id_of_protein_id", "(", "protein_id", ")", "return", "self", ".", "gene_by_id", "(", "gene_id", ")" ]
35.571429
7
def _config_parser_constrained(self, read_only): """:return: Config Parser constrained to our submodule in read or write mode""" try: pc = self.parent_commit except ValueError: pc = None # end handle empty parent repository parser = self._config_parser(self.repo, pc, read_only) parser.set_submodule(self) return SectionConstraint(parser, sm_section(self.name))
[ "def", "_config_parser_constrained", "(", "self", ",", "read_only", ")", ":", "try", ":", "pc", "=", "self", ".", "parent_commit", "except", "ValueError", ":", "pc", "=", "None", "# end handle empty parent repository", "parser", "=", "self", ".", "_config_parser", "(", "self", ".", "repo", ",", "pc", ",", "read_only", ")", "parser", ".", "set_submodule", "(", "self", ")", "return", "SectionConstraint", "(", "parser", ",", "sm_section", "(", "self", ".", "name", ")", ")" ]
43.2
12.9
def add_vts(self, task_name, targets, cache_key, valid, phase): """ Add a single VersionedTargetSet entry to the report. :param InvalidationCacheManager cache_manager: :param CacheKey cache_key: :param bool valid: :param string phase: """ if task_name not in self._task_reports: self.add_task(task_name) self._task_reports[task_name].add(targets, cache_key, valid, phase)
[ "def", "add_vts", "(", "self", ",", "task_name", ",", "targets", ",", "cache_key", ",", "valid", ",", "phase", ")", ":", "if", "task_name", "not", "in", "self", ".", "_task_reports", ":", "self", ".", "add_task", "(", "task_name", ")", "self", ".", "_task_reports", "[", "task_name", "]", ".", "add", "(", "targets", ",", "cache_key", ",", "valid", ",", "phase", ")" ]
40
12.1
def _onNavigate(self, index): '''Handle selection of path segment.''' if index > 0: self.setLocation( self._locationWidget.itemData(index), interactive=True )
[ "def", "_onNavigate", "(", "self", ",", "index", ")", ":", "if", "index", ">", "0", ":", "self", ".", "setLocation", "(", "self", ".", "_locationWidget", ".", "itemData", "(", "index", ")", ",", "interactive", "=", "True", ")" ]
34.833333
17.5
def _get_from_cache(self, sector, scale, eft, basis): """Try to load a set of Wilson coefficients from the cache, else return None.""" try: return self._cache[eft][scale][basis][sector] except KeyError: return None
[ "def", "_get_from_cache", "(", "self", ",", "sector", ",", "scale", ",", "eft", ",", "basis", ")", ":", "try", ":", "return", "self", ".", "_cache", "[", "eft", "]", "[", "scale", "]", "[", "basis", "]", "[", "sector", "]", "except", "KeyError", ":", "return", "None" ]
37.714286
13
def betweenness_centrality(self, normalized=True): """ Calculates betweenness centrality and returns an node id -> weight dictionary. Node betweenness weights are updated in the process. """ bc = proximity.brandes_betweenness_centrality(self, normalized) for id, w in bc.iteritems(): self[id]._betweenness = w return bc
[ "def", "betweenness_centrality", "(", "self", ",", "normalized", "=", "True", ")", ":", "bc", "=", "proximity", ".", "brandes_betweenness_centrality", "(", "self", ",", "normalized", ")", "for", "id", ",", "w", "in", "bc", ".", "iteritems", "(", ")", ":", "self", "[", "id", "]", ".", "_betweenness", "=", "w", "return", "bc" ]
51.571429
15.142857
def print_entitlements(opts, data, page_info=None, show_list_info=True): """Print entitlements as a table or output in another format.""" if utils.maybe_print_as_json(opts, data, page_info): return headers = ["Name", "Token", "Created / Updated", "Identifier"] rows = [] for entitlement in sorted(data, key=itemgetter("name")): rows.append( [ click.style( "%(name)s (%(type)s)" % { "name": click.style(entitlement["name"], fg="cyan"), "type": "user" if entitlement["user"] else "token", } ), click.style(entitlement["token"], fg="yellow"), click.style(entitlement["updated_at"], fg="blue"), click.style(entitlement["slug_perm"], fg="green"), ] ) if data: click.echo() utils.pretty_print_table(headers, rows) if not show_list_info: return click.echo() num_results = len(data) list_suffix = "entitlement%s" % ("s" if num_results != 1 else "") utils.pretty_print_list_info(num_results=num_results, suffix=list_suffix)
[ "def", "print_entitlements", "(", "opts", ",", "data", ",", "page_info", "=", "None", ",", "show_list_info", "=", "True", ")", ":", "if", "utils", ".", "maybe_print_as_json", "(", "opts", ",", "data", ",", "page_info", ")", ":", "return", "headers", "=", "[", "\"Name\"", ",", "\"Token\"", ",", "\"Created / Updated\"", ",", "\"Identifier\"", "]", "rows", "=", "[", "]", "for", "entitlement", "in", "sorted", "(", "data", ",", "key", "=", "itemgetter", "(", "\"name\"", ")", ")", ":", "rows", ".", "append", "(", "[", "click", ".", "style", "(", "\"%(name)s (%(type)s)\"", "%", "{", "\"name\"", ":", "click", ".", "style", "(", "entitlement", "[", "\"name\"", "]", ",", "fg", "=", "\"cyan\"", ")", ",", "\"type\"", ":", "\"user\"", "if", "entitlement", "[", "\"user\"", "]", "else", "\"token\"", ",", "}", ")", ",", "click", ".", "style", "(", "entitlement", "[", "\"token\"", "]", ",", "fg", "=", "\"yellow\"", ")", ",", "click", ".", "style", "(", "entitlement", "[", "\"updated_at\"", "]", ",", "fg", "=", "\"blue\"", ")", ",", "click", ".", "style", "(", "entitlement", "[", "\"slug_perm\"", "]", ",", "fg", "=", "\"green\"", ")", ",", "]", ")", "if", "data", ":", "click", ".", "echo", "(", ")", "utils", ".", "pretty_print_table", "(", "headers", ",", "rows", ")", "if", "not", "show_list_info", ":", "return", "click", ".", "echo", "(", ")", "num_results", "=", "len", "(", "data", ")", "list_suffix", "=", "\"entitlement%s\"", "%", "(", "\"s\"", "if", "num_results", "!=", "1", "else", "\"\"", ")", "utils", ".", "pretty_print_list_info", "(", "num_results", "=", "num_results", ",", "suffix", "=", "list_suffix", ")" ]
33.027778
25.194444
def _create_co_virtual_idp(self, context): """ Create a virtual IdP to represent the CO. :type context: The current context :rtype: saml.server.Server :param context: :return: An idp server """ co_name = self._get_co_name(context) context.decorate(self.KEY_CO_NAME, co_name) # Verify that we are configured for this CO. If the CO was not # configured most likely the endpoint used was not registered and # SATOSA core code threw an exception before getting here, but we # include this check in case later the regex used to register the # endpoints is relaxed. co_names = self._co_names_from_config() if co_name not in co_names: msg = "CO {} not in configured list of COs {}".format(co_name, co_names) satosa_logging(logger, logging.WARN, msg, context.state) raise SATOSAError(msg) # Make a copy of the general IdP config that we will then overwrite # with mappings between SAML bindings and CO specific URL endpoints, # and the entityID for the CO virtual IdP. backend_name = context.target_backend idp_config = copy.deepcopy(self.idp_config) idp_config = self._add_endpoints_to_config(idp_config, co_name, backend_name) idp_config = self._add_entity_id(idp_config, co_name) # Use the overwritten IdP config to generate a pysaml2 config object # and from it a server object. pysaml2_idp_config = IdPConfig().load(idp_config, metadata_construction=False) server = Server(config=pysaml2_idp_config) return server
[ "def", "_create_co_virtual_idp", "(", "self", ",", "context", ")", ":", "co_name", "=", "self", ".", "_get_co_name", "(", "context", ")", "context", ".", "decorate", "(", "self", ".", "KEY_CO_NAME", ",", "co_name", ")", "# Verify that we are configured for this CO. If the CO was not", "# configured most likely the endpoint used was not registered and", "# SATOSA core code threw an exception before getting here, but we", "# include this check in case later the regex used to register the", "# endpoints is relaxed.", "co_names", "=", "self", ".", "_co_names_from_config", "(", ")", "if", "co_name", "not", "in", "co_names", ":", "msg", "=", "\"CO {} not in configured list of COs {}\"", ".", "format", "(", "co_name", ",", "co_names", ")", "satosa_logging", "(", "logger", ",", "logging", ".", "WARN", ",", "msg", ",", "context", ".", "state", ")", "raise", "SATOSAError", "(", "msg", ")", "# Make a copy of the general IdP config that we will then overwrite", "# with mappings between SAML bindings and CO specific URL endpoints,", "# and the entityID for the CO virtual IdP.", "backend_name", "=", "context", ".", "target_backend", "idp_config", "=", "copy", ".", "deepcopy", "(", "self", ".", "idp_config", ")", "idp_config", "=", "self", ".", "_add_endpoints_to_config", "(", "idp_config", ",", "co_name", ",", "backend_name", ")", "idp_config", "=", "self", ".", "_add_entity_id", "(", "idp_config", ",", "co_name", ")", "# Use the overwritten IdP config to generate a pysaml2 config object", "# and from it a server object.", "pysaml2_idp_config", "=", "IdPConfig", "(", ")", ".", "load", "(", "idp_config", ",", "metadata_construction", "=", "False", ")", "server", "=", "Server", "(", "config", "=", "pysaml2_idp_config", ")", "return", "server" ]
43
20.813953
def parse_list_header(value): """Parse lists as described by RFC 2068 Section 2. In particular, parse comma-separated lists where the elements of the list may include quoted-strings. A quoted-string could contain a comma. A non-quoted string could have quotes in the middle. Quotes are removed automatically after parsing. It basically works like :func:`parse_set_header` just that items may appear multiple times and case sensitivity is preserved. The return value is a standard :class:`list`: >>> parse_list_header('token, "quoted value"') ['token', 'quoted value'] To create a header from the :class:`list` again, use the :func:`dump_header` function. :param value: a string with a list header. :return: :class:`list` :rtype: list """ result = [] for item in _parse_list_header(value): if item[:1] == item[-1:] == '"': item = unquote_header_value(item[1:-1]) result.append(item) return result
[ "def", "parse_list_header", "(", "value", ")", ":", "result", "=", "[", "]", "for", "item", "in", "_parse_list_header", "(", "value", ")", ":", "if", "item", "[", ":", "1", "]", "==", "item", "[", "-", "1", ":", "]", "==", "'\"'", ":", "item", "=", "unquote_header_value", "(", "item", "[", "1", ":", "-", "1", "]", ")", "result", ".", "append", "(", "item", ")", "return", "result" ]
33.793103
19.827586
def echo_error(root_resource, message): """Generate an error, but we get to set the error message.""" params = dict(message=message) return root_resource.get(ECHO_ERROR_PATH, params)
[ "def", "echo_error", "(", "root_resource", ",", "message", ")", ":", "params", "=", "dict", "(", "message", "=", "message", ")", "return", "root_resource", ".", "get", "(", "ECHO_ERROR_PATH", ",", "params", ")" ]
46.25
5
def compare_vm_configs(new_config, current_config): ''' Compares virtual machine current and new configuration, the current is the one which is deployed now, and the new is the target config. Returns the differences between the objects in a dictionary, the keys are the configuration parameter keys and the values are differences objects: either list or recursive difference new_config: New config dictionary with every available parameter current_config Currently deployed configuration ''' diffs = {} keys = set(new_config.keys()) # These values identify the virtual machine, comparison is unnecessary keys.discard('name') keys.discard('datacenter') keys.discard('datastore') for property_key in ('version', 'image'): if property_key in keys: single_value_diff = recursive_diff( {property_key: current_config[property_key]}, {property_key: new_config[property_key]}) if single_value_diff.diffs: diffs[property_key] = single_value_diff keys.discard(property_key) if 'cpu' in keys: keys.remove('cpu') cpu_diff = recursive_diff(current_config['cpu'], new_config['cpu']) if cpu_diff.diffs: diffs['cpu'] = cpu_diff if 'memory' in keys: keys.remove('memory') _convert_units([current_config['memory']]) _convert_units([new_config['memory']]) memory_diff = recursive_diff(current_config['memory'], new_config['memory']) if memory_diff.diffs: diffs['memory'] = memory_diff if 'advanced_configs' in keys: keys.remove('advanced_configs') key = 'advanced_configs' advanced_diff = recursive_diff(current_config[key], new_config[key]) if advanced_diff.diffs: diffs[key] = advanced_diff if 'disks' in keys: keys.remove('disks') _convert_units(current_config['disks']) _convert_units(new_config['disks']) disk_diffs = list_diff(current_config['disks'], new_config['disks'], 'address') # REMOVE UNSUPPORTED DIFFERENCES/CHANGES # If the disk already exist, the backing properties like eagerly scrub # and thin provisioning # cannot be updated, and should not be identified as differences disk_diffs.remove_diff(diff_key='eagerly_scrub') # Filename updates are not supported yet, on VSAN datastores the # backing.fileName points to a uid + the vmdk name disk_diffs.remove_diff(diff_key='filename') # The adapter name shouldn't be changed disk_diffs.remove_diff(diff_key='adapter') if disk_diffs.diffs: diffs['disks'] = disk_diffs if 'interfaces' in keys: keys.remove('interfaces') interface_diffs = list_diff(current_config['interfaces'], new_config['interfaces'], 'mac') # The adapter name shouldn't be changed interface_diffs.remove_diff(diff_key='adapter') if interface_diffs.diffs: diffs['interfaces'] = interface_diffs # For general items where the identification can be done by adapter for key in keys: if key not in current_config or key not in new_config: raise ValueError('A general device {0} configuration was ' 'not supplied or it was not retrieved from ' 'remote configuration'.format(key)) device_diffs = list_diff(current_config[key], new_config[key], 'adapter') if device_diffs.diffs: diffs[key] = device_diffs return diffs
[ "def", "compare_vm_configs", "(", "new_config", ",", "current_config", ")", ":", "diffs", "=", "{", "}", "keys", "=", "set", "(", "new_config", ".", "keys", "(", ")", ")", "# These values identify the virtual machine, comparison is unnecessary", "keys", ".", "discard", "(", "'name'", ")", "keys", ".", "discard", "(", "'datacenter'", ")", "keys", ".", "discard", "(", "'datastore'", ")", "for", "property_key", "in", "(", "'version'", ",", "'image'", ")", ":", "if", "property_key", "in", "keys", ":", "single_value_diff", "=", "recursive_diff", "(", "{", "property_key", ":", "current_config", "[", "property_key", "]", "}", ",", "{", "property_key", ":", "new_config", "[", "property_key", "]", "}", ")", "if", "single_value_diff", ".", "diffs", ":", "diffs", "[", "property_key", "]", "=", "single_value_diff", "keys", ".", "discard", "(", "property_key", ")", "if", "'cpu'", "in", "keys", ":", "keys", ".", "remove", "(", "'cpu'", ")", "cpu_diff", "=", "recursive_diff", "(", "current_config", "[", "'cpu'", "]", ",", "new_config", "[", "'cpu'", "]", ")", "if", "cpu_diff", ".", "diffs", ":", "diffs", "[", "'cpu'", "]", "=", "cpu_diff", "if", "'memory'", "in", "keys", ":", "keys", ".", "remove", "(", "'memory'", ")", "_convert_units", "(", "[", "current_config", "[", "'memory'", "]", "]", ")", "_convert_units", "(", "[", "new_config", "[", "'memory'", "]", "]", ")", "memory_diff", "=", "recursive_diff", "(", "current_config", "[", "'memory'", "]", ",", "new_config", "[", "'memory'", "]", ")", "if", "memory_diff", ".", "diffs", ":", "diffs", "[", "'memory'", "]", "=", "memory_diff", "if", "'advanced_configs'", "in", "keys", ":", "keys", ".", "remove", "(", "'advanced_configs'", ")", "key", "=", "'advanced_configs'", "advanced_diff", "=", "recursive_diff", "(", "current_config", "[", "key", "]", ",", "new_config", "[", "key", "]", ")", "if", "advanced_diff", ".", "diffs", ":", "diffs", "[", "key", "]", "=", "advanced_diff", "if", "'disks'", "in", "keys", ":", "keys", ".", "remove", "(", "'disks'", ")", "_convert_units", "(", "current_config", "[", "'disks'", "]", ")", "_convert_units", "(", "new_config", "[", "'disks'", "]", ")", "disk_diffs", "=", "list_diff", "(", "current_config", "[", "'disks'", "]", ",", "new_config", "[", "'disks'", "]", ",", "'address'", ")", "# REMOVE UNSUPPORTED DIFFERENCES/CHANGES", "# If the disk already exist, the backing properties like eagerly scrub", "# and thin provisioning", "# cannot be updated, and should not be identified as differences", "disk_diffs", ".", "remove_diff", "(", "diff_key", "=", "'eagerly_scrub'", ")", "# Filename updates are not supported yet, on VSAN datastores the", "# backing.fileName points to a uid + the vmdk name", "disk_diffs", ".", "remove_diff", "(", "diff_key", "=", "'filename'", ")", "# The adapter name shouldn't be changed", "disk_diffs", ".", "remove_diff", "(", "diff_key", "=", "'adapter'", ")", "if", "disk_diffs", ".", "diffs", ":", "diffs", "[", "'disks'", "]", "=", "disk_diffs", "if", "'interfaces'", "in", "keys", ":", "keys", ".", "remove", "(", "'interfaces'", ")", "interface_diffs", "=", "list_diff", "(", "current_config", "[", "'interfaces'", "]", ",", "new_config", "[", "'interfaces'", "]", ",", "'mac'", ")", "# The adapter name shouldn't be changed", "interface_diffs", ".", "remove_diff", "(", "diff_key", "=", "'adapter'", ")", "if", "interface_diffs", ".", "diffs", ":", "diffs", "[", "'interfaces'", "]", "=", "interface_diffs", "# For general items where the identification can be done by adapter", "for", "key", "in", "keys", ":", "if", "key", "not", "in", "current_config", "or", "key", "not", "in", "new_config", ":", "raise", "ValueError", "(", "'A general device {0} configuration was '", "'not supplied or it was not retrieved from '", "'remote configuration'", ".", "format", "(", "key", ")", ")", "device_diffs", "=", "list_diff", "(", "current_config", "[", "key", "]", ",", "new_config", "[", "key", "]", ",", "'adapter'", ")", "if", "device_diffs", ".", "diffs", ":", "diffs", "[", "key", "]", "=", "device_diffs", "return", "diffs" ]
40.06383
18.191489
def merge_upwards_if_smaller_than(self, small_size, a_or_u): """After prune_if_smaller_than is run, we may still have excess nodes. For example, with a small_size of 609710690: 7 /* 28815419 /data/* 32 /data/srv/* 925746 /data/srv/docker.bak/* 12 /data/srv/docker.bak/shared/* 682860348 /data/srv/docker.bak/shared/standalone/* This is reduced to: 31147487 /* 682860355 /data/srv/docker.bak/shared/standalone/* Run this only when done with the scanning.""" # Assert that we're not messing things up. prev_app_size = self.app_size() prev_use_size = self.use_size() small_nodes = self._find_small_nodes(small_size, (), a_or_u) for node, parents in small_nodes: # Check immediate grandparent for isdir=None and if it # exists, move this there. The isdir=None node is always # last. if len(parents) >= 2: tail = parents[-2]._nodes[-1] if tail._isdir is None: assert tail._app_size is not None, tail tail._add_size(node.app_size(), node.use_size()) parents[-1]._nodes.remove(node) assert len(parents[-1]._nodes) # The actual assertion. assert prev_app_size == self.app_size(), ( prev_app_size, self.app_size()) assert prev_use_size == self.use_size(), ( prev_use_size, self.use_size())
[ "def", "merge_upwards_if_smaller_than", "(", "self", ",", "small_size", ",", "a_or_u", ")", ":", "# Assert that we're not messing things up.", "prev_app_size", "=", "self", ".", "app_size", "(", ")", "prev_use_size", "=", "self", ".", "use_size", "(", ")", "small_nodes", "=", "self", ".", "_find_small_nodes", "(", "small_size", ",", "(", ")", ",", "a_or_u", ")", "for", "node", ",", "parents", "in", "small_nodes", ":", "# Check immediate grandparent for isdir=None and if it", "# exists, move this there. The isdir=None node is always", "# last.", "if", "len", "(", "parents", ")", ">=", "2", ":", "tail", "=", "parents", "[", "-", "2", "]", ".", "_nodes", "[", "-", "1", "]", "if", "tail", ".", "_isdir", "is", "None", ":", "assert", "tail", ".", "_app_size", "is", "not", "None", ",", "tail", "tail", ".", "_add_size", "(", "node", ".", "app_size", "(", ")", ",", "node", ".", "use_size", "(", ")", ")", "parents", "[", "-", "1", "]", ".", "_nodes", ".", "remove", "(", "node", ")", "assert", "len", "(", "parents", "[", "-", "1", "]", ".", "_nodes", ")", "# The actual assertion.", "assert", "prev_app_size", "==", "self", ".", "app_size", "(", ")", ",", "(", "prev_app_size", ",", "self", ".", "app_size", "(", ")", ")", "assert", "prev_use_size", "==", "self", ".", "use_size", "(", ")", ",", "(", "prev_use_size", ",", "self", ".", "use_size", "(", ")", ")" ]
37.47619
17.428571
def expire(self, current_time=None): """Expire any old entries `current_time` Optional time to be used to clean up queue (can be used in unit tests) """ if not self._queue: return if current_time is None: current_time = time() while self._queue: # Get top most item top = self._queue[0] # Early exit if item was not promoted and its expiration time # is greater than now. if top.promoted is None and top.expiry_date > current_time: break # Pop item from the stack top = heappop(self._queue) need_reschedule = (top.promoted is not None and top.promoted > current_time) # Give chance to reschedule if not need_reschedule: top.promoted = None top.on_delete(False) need_reschedule = (top.promoted is not None and top.promoted > current_time) # If item is promoted and expiration time somewhere in future # just reschedule it if need_reschedule: top.expiry_date = top.promoted top.promoted = None heappush(self._queue, top) else: del self._items[top.session_id]
[ "def", "expire", "(", "self", ",", "current_time", "=", "None", ")", ":", "if", "not", "self", ".", "_queue", ":", "return", "if", "current_time", "is", "None", ":", "current_time", "=", "time", "(", ")", "while", "self", ".", "_queue", ":", "# Get top most item", "top", "=", "self", ".", "_queue", "[", "0", "]", "# Early exit if item was not promoted and its expiration time", "# is greater than now.", "if", "top", ".", "promoted", "is", "None", "and", "top", ".", "expiry_date", ">", "current_time", ":", "break", "# Pop item from the stack", "top", "=", "heappop", "(", "self", ".", "_queue", ")", "need_reschedule", "=", "(", "top", ".", "promoted", "is", "not", "None", "and", "top", ".", "promoted", ">", "current_time", ")", "# Give chance to reschedule", "if", "not", "need_reschedule", ":", "top", ".", "promoted", "=", "None", "top", ".", "on_delete", "(", "False", ")", "need_reschedule", "=", "(", "top", ".", "promoted", "is", "not", "None", "and", "top", ".", "promoted", ">", "current_time", ")", "# If item is promoted and expiration time somewhere in future", "# just reschedule it", "if", "need_reschedule", ":", "top", ".", "expiry_date", "=", "top", ".", "promoted", "top", ".", "promoted", "=", "None", "heappush", "(", "self", ".", "_queue", ",", "top", ")", "else", ":", "del", "self", ".", "_items", "[", "top", ".", "session_id", "]" ]
31.837209
18.395349
def enforce_filetype_file(form, field): '''Only allowed domains in resource.url when filetype is file''' if form._fields.get('filetype').data != RESOURCE_FILETYPE_FILE: return domain = urlparse(field.data).netloc allowed_domains = current_app.config['RESOURCES_FILE_ALLOWED_DOMAINS'] allowed_domains += [current_app.config.get('SERVER_NAME')] if current_app.config.get('CDN_DOMAIN'): allowed_domains.append(current_app.config['CDN_DOMAIN']) if '*' in allowed_domains: return if domain and domain not in allowed_domains: message = _('Domain "{domain}" not allowed for filetype "{filetype}"') raise validators.ValidationError(message.format( domain=domain, filetype=RESOURCE_FILETYPE_FILE ))
[ "def", "enforce_filetype_file", "(", "form", ",", "field", ")", ":", "if", "form", ".", "_fields", ".", "get", "(", "'filetype'", ")", ".", "data", "!=", "RESOURCE_FILETYPE_FILE", ":", "return", "domain", "=", "urlparse", "(", "field", ".", "data", ")", ".", "netloc", "allowed_domains", "=", "current_app", ".", "config", "[", "'RESOURCES_FILE_ALLOWED_DOMAINS'", "]", "allowed_domains", "+=", "[", "current_app", ".", "config", ".", "get", "(", "'SERVER_NAME'", ")", "]", "if", "current_app", ".", "config", ".", "get", "(", "'CDN_DOMAIN'", ")", ":", "allowed_domains", ".", "append", "(", "current_app", ".", "config", "[", "'CDN_DOMAIN'", "]", ")", "if", "'*'", "in", "allowed_domains", ":", "return", "if", "domain", "and", "domain", "not", "in", "allowed_domains", ":", "message", "=", "_", "(", "'Domain \"{domain}\" not allowed for filetype \"{filetype}\"'", ")", "raise", "validators", ".", "ValidationError", "(", "message", ".", "format", "(", "domain", "=", "domain", ",", "filetype", "=", "RESOURCE_FILETYPE_FILE", ")", ")" ]
47.875
19.5
def clean(file_, imports): """Remove modules that aren't imported in project from file.""" modules_not_imported = compare_modules(file_, imports) re_remove = re.compile("|".join(modules_not_imported)) to_write = [] try: f = open_func(file_, "r+") except OSError: logging.error("Failed on file: {}".format(file_)) raise else: for i in f.readlines(): if re_remove.match(i) is None: to_write.append(i) f.seek(0) f.truncate() for i in to_write: f.write(i) finally: f.close() logging.info("Successfully cleaned up requirements in " + file_)
[ "def", "clean", "(", "file_", ",", "imports", ")", ":", "modules_not_imported", "=", "compare_modules", "(", "file_", ",", "imports", ")", "re_remove", "=", "re", ".", "compile", "(", "\"|\"", ".", "join", "(", "modules_not_imported", ")", ")", "to_write", "=", "[", "]", "try", ":", "f", "=", "open_func", "(", "file_", ",", "\"r+\"", ")", "except", "OSError", ":", "logging", ".", "error", "(", "\"Failed on file: {}\"", ".", "format", "(", "file_", ")", ")", "raise", "else", ":", "for", "i", "in", "f", ".", "readlines", "(", ")", ":", "if", "re_remove", ".", "match", "(", "i", ")", "is", "None", ":", "to_write", ".", "append", "(", "i", ")", "f", ".", "seek", "(", "0", ")", "f", ".", "truncate", "(", ")", "for", "i", "in", "to_write", ":", "f", ".", "write", "(", "i", ")", "finally", ":", "f", ".", "close", "(", ")", "logging", ".", "info", "(", "\"Successfully cleaned up requirements in \"", "+", "file_", ")" ]
27.291667
20.75
def find_by_localpath(self, path): """ Returns the repo with the specified local <path> """ # note that the paths in self.jsondata were already _homepath2real()'d # in the class' __init__() resolved = _homepath2real(path) for row in self.jsondata: if resolved == os.path.realpath(row["localpath"]): return self._infofromdict(row)
[ "def", "find_by_localpath", "(", "self", ",", "path", ")", ":", "# note that the paths in self.jsondata were already _homepath2real()'d", "# in the class' __init__()", "resolved", "=", "_homepath2real", "(", "path", ")", "for", "row", "in", "self", ".", "jsondata", ":", "if", "resolved", "==", "os", ".", "path", ".", "realpath", "(", "row", "[", "\"localpath\"", "]", ")", ":", "return", "self", ".", "_infofromdict", "(", "row", ")" ]
40.4
10.2
def get_new_working_set(self): """ Get a new list of IPs to work with from the queue. This returns None if there is no update. Read all the messages from the queue on which we get the IP addresses that we have to monitor. We will ignore all of them, except the last one, since maybe we received two updates in a row, but each update is a full state, so only the last one matters. Raises the StopReceived exception if the stop signal ("None") was received on the notification queue. """ new_list_of_ips = None while True: try: new_list_of_ips = self.q_monitor_ips.get_nowait() self.q_monitor_ips.task_done() if type(new_list_of_ips) is MonitorPluginStopSignal: raise StopReceived() except Queue.Empty: # No more messages, all done reading monitor list for now break if new_list_of_ips is not None: CURRENT_STATE.working_set = new_list_of_ips return new_list_of_ips
[ "def", "get_new_working_set", "(", "self", ")", ":", "new_list_of_ips", "=", "None", "while", "True", ":", "try", ":", "new_list_of_ips", "=", "self", ".", "q_monitor_ips", ".", "get_nowait", "(", ")", "self", ".", "q_monitor_ips", ".", "task_done", "(", ")", "if", "type", "(", "new_list_of_ips", ")", "is", "MonitorPluginStopSignal", ":", "raise", "StopReceived", "(", ")", "except", "Queue", ".", "Empty", ":", "# No more messages, all done reading monitor list for now", "break", "if", "new_list_of_ips", "is", "not", "None", ":", "CURRENT_STATE", ".", "working_set", "=", "new_list_of_ips", "return", "new_list_of_ips" ]
38.857143
19.785714
def qualified_name(self): '''return the fully qualified name (`<module>.<struct>#<field>`)''' return '{0}.{1}#{2}'.format(self.module.name, self.struct.name, self.name)
[ "def", "qualified_name", "(", "self", ")", ":", "return", "'{0}.{1}#{2}'", ".", "format", "(", "self", ".", "module", ".", "name", ",", "self", ".", "struct", ".", "name", ",", "self", ".", "name", ")" ]
60.666667
30.666667
def _relay_message(self, message): """ Relay messages from the forum on the server to the client represented by this actor. """ info("relaying message: {message}") if not message.was_sent_by(self._id_factory): self.pipe.send(message) self.pipe.deliver()
[ "def", "_relay_message", "(", "self", ",", "message", ")", ":", "info", "(", "\"relaying message: {message}\"", ")", "if", "not", "message", ".", "was_sent_by", "(", "self", ".", "_id_factory", ")", ":", "self", ".", "pipe", ".", "send", "(", "message", ")", "self", ".", "pipe", ".", "deliver", "(", ")" ]
31.8
13.2
def resp_set_wififirmware(self, resp): """Default callback for get_wififirmware """ if resp: self.wifi_firmware_version = float(str(str(resp.version >> 16) + "." + str(resp.version & 0xff))) self.wifi_firmware_build_timestamp = resp.build
[ "def", "resp_set_wififirmware", "(", "self", ",", "resp", ")", ":", "if", "resp", ":", "self", ".", "wifi_firmware_version", "=", "float", "(", "str", "(", "str", "(", "resp", ".", "version", ">>", "16", ")", "+", "\".\"", "+", "str", "(", "resp", ".", "version", "&", "0xff", ")", ")", ")", "self", ".", "wifi_firmware_build_timestamp", "=", "resp", ".", "build" ]
46.833333
19
def calculate_sync_order(oscillator_phases): """! @brief Calculates level of global synchronization (order parameter) for input phases. @details This parameter is tend 1.0 when the oscillatory network close to global synchronization and it tend to 0.0 when desynchronization is observed in the network. @param[in] oscillator_phases (list): List of oscillator phases that are used for level of global synchronization. @return (double) Level of global synchronization (order parameter). @see calculate_order_parameter() """ exp_amount = 0.0; average_phase = 0.0; for phase in oscillator_phases: exp_amount += math.expm1( abs(1j * phase) ); average_phase += phase; exp_amount /= len(oscillator_phases); average_phase = math.expm1( abs(1j * (average_phase / len(oscillator_phases))) ); return abs(average_phase) / abs(exp_amount);
[ "def", "calculate_sync_order", "(", "oscillator_phases", ")", ":", "exp_amount", "=", "0.0", "average_phase", "=", "0.0", "for", "phase", "in", "oscillator_phases", ":", "exp_amount", "+=", "math", ".", "expm1", "(", "abs", "(", "1j", "*", "phase", ")", ")", "average_phase", "+=", "phase", "exp_amount", "/=", "len", "(", "oscillator_phases", ")", "average_phase", "=", "math", ".", "expm1", "(", "abs", "(", "1j", "*", "(", "average_phase", "/", "len", "(", "oscillator_phases", ")", ")", ")", ")", "return", "abs", "(", "average_phase", ")", "/", "abs", "(", "exp_amount", ")" ]
41.48
26.48
def cast(cls, value_type, value, visitor=None, **kwargs): """Cast is for visitors where you are visiting some random data structure (perhaps returned by a previous ``VisitorPattern.visit()`` operation), and you want to convert back to the value type. This function also takes positional arguments: ``value_type=``\ *RecordType* The type to cast to. ``value=``\ *object* ``visitor=``\ *Visitor.Options* Specifies the visitor options, which customizes the descent and reduction. """ if visitor is None: visitor = cls.Visitor( cls.grok, cls.reverse, cls.collect, cls.produce, **kwargs) return cls.map(visitor, value, value_type)
[ "def", "cast", "(", "cls", ",", "value_type", ",", "value", ",", "visitor", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "visitor", "is", "None", ":", "visitor", "=", "cls", ".", "Visitor", "(", "cls", ".", "grok", ",", "cls", ".", "reverse", ",", "cls", ".", "collect", ",", "cls", ".", "produce", ",", "*", "*", "kwargs", ")", "return", "cls", ".", "map", "(", "visitor", ",", "value", ",", "value_type", ")" ]
36.045455
19.227273
def secure_required(view_func): """ Decorator to switch an url from http to https. If a view is accessed through http and this decorator is applied to that view, than it will return a permanent redirect to the secure (https) version of the same view. The decorator also must check that ``USERENA_USE_HTTPS`` is enabled. If disabled, it should not redirect to https because the project doesn't support it. """ def _wrapped_view(request, *args, **kwargs): if not request.is_secure(): if getattr(settings, 'USERENA_USE_HTTPS', userena_settings.DEFAULT_USERENA_USE_HTTPS): request_url = request.build_absolute_uri(request.get_full_path()) secure_url = request_url.replace('http://', 'https://') return HttpResponsePermanentRedirect(secure_url) return view_func(request, *args, **kwargs) return wraps(view_func, assigned=available_attrs(view_func))(_wrapped_view)
[ "def", "secure_required", "(", "view_func", ")", ":", "def", "_wrapped_view", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "request", ".", "is_secure", "(", ")", ":", "if", "getattr", "(", "settings", ",", "'USERENA_USE_HTTPS'", ",", "userena_settings", ".", "DEFAULT_USERENA_USE_HTTPS", ")", ":", "request_url", "=", "request", ".", "build_absolute_uri", "(", "request", ".", "get_full_path", "(", ")", ")", "secure_url", "=", "request_url", ".", "replace", "(", "'http://'", ",", "'https://'", ")", "return", "HttpResponsePermanentRedirect", "(", "secure_url", ")", "return", "view_func", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wraps", "(", "view_func", ",", "assigned", "=", "available_attrs", "(", "view_func", ")", ")", "(", "_wrapped_view", ")" ]
45.761905
25.095238
def fetch(self): """ Fetch a ExecutionInstance :returns: Fetched ExecutionInstance :rtype: twilio.rest.studio.v1.flow.execution.ExecutionInstance """ params = values.of({}) payload = self._version.fetch( 'GET', self._uri, params=params, ) return ExecutionInstance( self._version, payload, flow_sid=self._solution['flow_sid'], sid=self._solution['sid'], )
[ "def", "fetch", "(", "self", ")", ":", "params", "=", "values", ".", "of", "(", "{", "}", ")", "payload", "=", "self", ".", "_version", ".", "fetch", "(", "'GET'", ",", "self", ".", "_uri", ",", "params", "=", "params", ",", ")", "return", "ExecutionInstance", "(", "self", ".", "_version", ",", "payload", ",", "flow_sid", "=", "self", ".", "_solution", "[", "'flow_sid'", "]", ",", "sid", "=", "self", ".", "_solution", "[", "'sid'", "]", ",", ")" ]
23.857143
17.285714
def _path_factory(check): """Create a function that checks paths.""" @functools.wraps(check) def validator(paths): if isinstance(paths, str): check(paths) elif isinstance(paths, collections.Sequence): for path in paths: check(path) else: raise Exception('expected either basestr or sequenc of basstr') return validator
[ "def", "_path_factory", "(", "check", ")", ":", "@", "functools", ".", "wraps", "(", "check", ")", "def", "validator", "(", "paths", ")", ":", "if", "isinstance", "(", "paths", ",", "str", ")", ":", "check", "(", "paths", ")", "elif", "isinstance", "(", "paths", ",", "collections", ".", "Sequence", ")", ":", "for", "path", "in", "paths", ":", "check", "(", "path", ")", "else", ":", "raise", "Exception", "(", "'expected either basestr or sequenc of basstr'", ")", "return", "validator" ]
28.5
18.785714
def get_meta(self, name, meta_key=None): '''Get the ``content`` attribute of a meta tag ``name``. For example:: head.get_meta('decription') returns the ``content`` attribute of the meta tag with attribute ``name`` equal to ``description`` or ``None``. If a different meta key needs to be matched, it can be specified via the ``meta_key`` parameter:: head.get_meta('og:title', meta_key='property') ''' meta_key = meta_key or 'name' for child in self.meta._children: if isinstance(child, Html) and child.attr(meta_key) == name: return child.attr('content')
[ "def", "get_meta", "(", "self", ",", "name", ",", "meta_key", "=", "None", ")", ":", "meta_key", "=", "meta_key", "or", "'name'", "for", "child", "in", "self", ".", "meta", ".", "_children", ":", "if", "isinstance", "(", "child", ",", "Html", ")", "and", "child", ".", "attr", "(", "meta_key", ")", "==", "name", ":", "return", "child", ".", "attr", "(", "'content'", ")" ]
36.944444
20.944444
def _update(self, rect, delta_y, force_update_margins=False): """ Updates panels """ helper = TextHelper(self.editor) if not self: return for zones_id, zone in self._panels.items(): if zones_id == Panel.Position.TOP or \ zones_id == Panel.Position.BOTTOM: continue panels = list(zone.values()) for panel in panels: if panel.scrollable and delta_y: panel.scroll(0, delta_y) line, col = helper.cursor_position() oline, ocol = self._cached_cursor_pos if line != oline or col != ocol or panel.scrollable: panel.update(0, rect.y(), panel.width(), rect.height()) self._cached_cursor_pos = helper.cursor_position() if (rect.contains(self.editor.viewport().rect()) or force_update_margins): self._update_viewport_margins()
[ "def", "_update", "(", "self", ",", "rect", ",", "delta_y", ",", "force_update_margins", "=", "False", ")", ":", "helper", "=", "TextHelper", "(", "self", ".", "editor", ")", "if", "not", "self", ":", "return", "for", "zones_id", ",", "zone", "in", "self", ".", "_panels", ".", "items", "(", ")", ":", "if", "zones_id", "==", "Panel", ".", "Position", ".", "TOP", "or", "zones_id", "==", "Panel", ".", "Position", ".", "BOTTOM", ":", "continue", "panels", "=", "list", "(", "zone", ".", "values", "(", ")", ")", "for", "panel", "in", "panels", ":", "if", "panel", ".", "scrollable", "and", "delta_y", ":", "panel", ".", "scroll", "(", "0", ",", "delta_y", ")", "line", ",", "col", "=", "helper", ".", "cursor_position", "(", ")", "oline", ",", "ocol", "=", "self", ".", "_cached_cursor_pos", "if", "line", "!=", "oline", "or", "col", "!=", "ocol", "or", "panel", ".", "scrollable", ":", "panel", ".", "update", "(", "0", ",", "rect", ".", "y", "(", ")", ",", "panel", ".", "width", "(", ")", ",", "rect", ".", "height", "(", ")", ")", "self", ".", "_cached_cursor_pos", "=", "helper", ".", "cursor_position", "(", ")", "if", "(", "rect", ".", "contains", "(", "self", ".", "editor", ".", "viewport", "(", ")", ".", "rect", "(", ")", ")", "or", "force_update_margins", ")", ":", "self", ".", "_update_viewport_margins", "(", ")" ]
45.761905
12.714286
def marshal_with(schema, code='default', description='', inherit=None, apply=None): """Marshal the return value of the decorated view function using the specified schema. Usage: .. code-block:: python class PetSchema(Schema): class Meta: fields = ('name', 'category') @marshal_with(PetSchema) def get_pet(pet_id): return Pet.query.filter(Pet.id == pet_id).one() :param schema: :class:`Schema <marshmallow.Schema>` class or instance, or `None` :param code: Optional HTTP response code :param description: Optional response description :param inherit: Inherit schemas from parent classes :param apply: Marshal response with specified schema """ def wrapper(func): options = { code: { 'schema': schema or {}, 'description': description, }, } annotate(func, 'schemas', [options], inherit=inherit, apply=apply) return activate(func) return wrapper
[ "def", "marshal_with", "(", "schema", ",", "code", "=", "'default'", ",", "description", "=", "''", ",", "inherit", "=", "None", ",", "apply", "=", "None", ")", ":", "def", "wrapper", "(", "func", ")", ":", "options", "=", "{", "code", ":", "{", "'schema'", ":", "schema", "or", "{", "}", ",", "'description'", ":", "description", ",", "}", ",", "}", "annotate", "(", "func", ",", "'schemas'", ",", "[", "options", "]", ",", "inherit", "=", "inherit", ",", "apply", "=", "apply", ")", "return", "activate", "(", "func", ")", "return", "wrapper" ]
31.75
20.46875
def isInstrumentAllowed(self, instrument): """Checks if the specified instrument can be set for this analysis, either if the instrument was assigned directly (by using "Allows instrument entry of results") or indirectly via Method ("Allows manual entry of results") in Analysis Service Edit view. Param instrument can be either an uid or an object :param instrument: string,Instrument :return: True if the assignment of the passed in instrument is allowed :rtype: bool """ if isinstance(instrument, str): uid = instrument else: uid = instrument.UID() return uid in self.getAllowedInstrumentUIDs()
[ "def", "isInstrumentAllowed", "(", "self", ",", "instrument", ")", ":", "if", "isinstance", "(", "instrument", ",", "str", ")", ":", "uid", "=", "instrument", "else", ":", "uid", "=", "instrument", ".", "UID", "(", ")", "return", "uid", "in", "self", ".", "getAllowedInstrumentUIDs", "(", ")" ]
43.875
16.75
def safe_execute_script(self, script): """ When executing a script that contains a jQuery command, it's important that the jQuery library has been loaded first. This method will load jQuery if it wasn't already loaded. """ try: self.execute_script(script) except Exception: # The likely reason this fails is because: "jQuery is not defined" self.activate_jquery() # It's a good thing we can define it here self.execute_script(script)
[ "def", "safe_execute_script", "(", "self", ",", "script", ")", ":", "try", ":", "self", ".", "execute_script", "(", "script", ")", "except", "Exception", ":", "# The likely reason this fails is because: \"jQuery is not defined\"", "self", ".", "activate_jquery", "(", ")", "# It's a good thing we can define it here", "self", ".", "execute_script", "(", "script", ")" ]
52.1
15.5
def remove_router_interface(self, context, router_info): """Removes previously configured interface from router on Arista HW. This deals with both IPv6 and IPv4 configurations. """ if router_info: router_name = self._arista_router_name(router_info['id'], router_info['name']) mlag_peer_failed = False for s in self._servers: try: self.delete_interface_from_router(router_info['seg_id'], router_name, s) if self._mlag_configured: mlag_peer_failed = False except Exception: if self._mlag_configured and not mlag_peer_failed: mlag_peer_failed = True else: msg = (_('Failed to add interface to router ' '%s on EOS') % router_name) LOG.exception(msg) raise arista_exc.AristaServicePluginRpcError(msg=msg)
[ "def", "remove_router_interface", "(", "self", ",", "context", ",", "router_info", ")", ":", "if", "router_info", ":", "router_name", "=", "self", ".", "_arista_router_name", "(", "router_info", "[", "'id'", "]", ",", "router_info", "[", "'name'", "]", ")", "mlag_peer_failed", "=", "False", "for", "s", "in", "self", ".", "_servers", ":", "try", ":", "self", ".", "delete_interface_from_router", "(", "router_info", "[", "'seg_id'", "]", ",", "router_name", ",", "s", ")", "if", "self", ".", "_mlag_configured", ":", "mlag_peer_failed", "=", "False", "except", "Exception", ":", "if", "self", ".", "_mlag_configured", "and", "not", "mlag_peer_failed", ":", "mlag_peer_failed", "=", "True", "else", ":", "msg", "=", "(", "_", "(", "'Failed to add interface to router '", "'%s on EOS'", ")", "%", "router_name", ")", "LOG", ".", "exception", "(", "msg", ")", "raise", "arista_exc", ".", "AristaServicePluginRpcError", "(", "msg", "=", "msg", ")" ]
48.521739
17.608696
def search_edges_with_bel(self, bel: str) -> List[Edge]: """Search edges with given BEL. :param bel: A BEL string to use as a search """ return self.session.query(Edge).filter(Edge.bel.like(bel))
[ "def", "search_edges_with_bel", "(", "self", ",", "bel", ":", "str", ")", "->", "List", "[", "Edge", "]", ":", "return", "self", ".", "session", ".", "query", "(", "Edge", ")", ".", "filter", "(", "Edge", ".", "bel", ".", "like", "(", "bel", ")", ")" ]
37.166667
15.5
def has_false(self, e, extra_constraints=(), solver=None, model_callback=None): #pylint:disable=unused-argument """ Should return False if `e` can possibly be False. :param e: The AST. :param extra_constraints: Extra constraints (as ASTs) to add to the solver for this solve. :param solver: A solver, for backends that require it. :param model_callback: a function that will be executed with recovered models (if any) :return: A boolean. """ #if self._solver_required and solver is None: # raise BackendError("%s requires a solver for evaluation" % self.__class__.__name__) return self._has_false(self.convert(e), extra_constraints=extra_constraints, solver=solver, model_callback=model_callback)
[ "def", "has_false", "(", "self", ",", "e", ",", "extra_constraints", "=", "(", ")", ",", "solver", "=", "None", ",", "model_callback", "=", "None", ")", ":", "#pylint:disable=unused-argument", "#if self._solver_required and solver is None:", "# raise BackendError(\"%s requires a solver for evaluation\" % self.__class__.__name__)", "return", "self", ".", "_has_false", "(", "self", ".", "convert", "(", "e", ")", ",", "extra_constraints", "=", "extra_constraints", ",", "solver", "=", "solver", ",", "model_callback", "=", "model_callback", ")" ]
55.4
35.266667
def _config_params(base_config, assoc_files, region, out_file, items): """Add parameters based on configuration variables, associated files and genomic regions. """ params = [] dbsnp = assoc_files.get("dbsnp") if dbsnp: params += ["--dbsnp", dbsnp] cosmic = assoc_files.get("cosmic") if cosmic: params += ["--cosmic", cosmic] variant_regions = bedutils.population_variant_regions(items) region = subset_variant_regions(variant_regions, region, out_file, items) if region: params += ["-L", bamprep.region_to_gatk(region), "--interval_set_rule", "INTERSECTION"] # set low frequency calling parameter if adjusted # to set other MuTect parameters on contamination, pass options to resources for mutect # --fraction_contamination --minimum_normal_allele_fraction min_af = tz.get_in(["algorithm", "min_allele_fraction"], base_config) if min_af: params += ["--minimum_mutation_cell_fraction", "%.2f" % (min_af / 100.0)] resources = config_utils.get_resources("mutect", base_config) if resources.get("options") is not None: params += [str(x) for x in resources.get("options", [])] # Output quality scores if "--enable_qscore_output" not in params: params.append("--enable_qscore_output") # drf not currently supported in MuTect to turn off duplicateread filter # params += gatk.standard_cl_params(items) return params
[ "def", "_config_params", "(", "base_config", ",", "assoc_files", ",", "region", ",", "out_file", ",", "items", ")", ":", "params", "=", "[", "]", "dbsnp", "=", "assoc_files", ".", "get", "(", "\"dbsnp\"", ")", "if", "dbsnp", ":", "params", "+=", "[", "\"--dbsnp\"", ",", "dbsnp", "]", "cosmic", "=", "assoc_files", ".", "get", "(", "\"cosmic\"", ")", "if", "cosmic", ":", "params", "+=", "[", "\"--cosmic\"", ",", "cosmic", "]", "variant_regions", "=", "bedutils", ".", "population_variant_regions", "(", "items", ")", "region", "=", "subset_variant_regions", "(", "variant_regions", ",", "region", ",", "out_file", ",", "items", ")", "if", "region", ":", "params", "+=", "[", "\"-L\"", ",", "bamprep", ".", "region_to_gatk", "(", "region", ")", ",", "\"--interval_set_rule\"", ",", "\"INTERSECTION\"", "]", "# set low frequency calling parameter if adjusted", "# to set other MuTect parameters on contamination, pass options to resources for mutect", "# --fraction_contamination --minimum_normal_allele_fraction", "min_af", "=", "tz", ".", "get_in", "(", "[", "\"algorithm\"", ",", "\"min_allele_fraction\"", "]", ",", "base_config", ")", "if", "min_af", ":", "params", "+=", "[", "\"--minimum_mutation_cell_fraction\"", ",", "\"%.2f\"", "%", "(", "min_af", "/", "100.0", ")", "]", "resources", "=", "config_utils", ".", "get_resources", "(", "\"mutect\"", ",", "base_config", ")", "if", "resources", ".", "get", "(", "\"options\"", ")", "is", "not", "None", ":", "params", "+=", "[", "str", "(", "x", ")", "for", "x", "in", "resources", ".", "get", "(", "\"options\"", ",", "[", "]", ")", "]", "# Output quality scores", "if", "\"--enable_qscore_output\"", "not", "in", "params", ":", "params", ".", "append", "(", "\"--enable_qscore_output\"", ")", "# drf not currently supported in MuTect to turn off duplicateread filter", "# params += gatk.standard_cl_params(items)", "return", "params" ]
47.833333
19.433333
def render(pass_info, saltenv='base', sls='', argline='', **kwargs): ''' Fetch secret from pass based on pass_path ''' try: _get_pass_exec() except SaltRenderError: raise # Make sure environment variable HOME is set, since Pass looks for the # password-store under ~/.password-store. os.environ['HOME'] = expanduser('~') return _decrypt_object(pass_info)
[ "def", "render", "(", "pass_info", ",", "saltenv", "=", "'base'", ",", "sls", "=", "''", ",", "argline", "=", "''", ",", "*", "*", "kwargs", ")", ":", "try", ":", "_get_pass_exec", "(", ")", "except", "SaltRenderError", ":", "raise", "# Make sure environment variable HOME is set, since Pass looks for the", "# password-store under ~/.password-store.", "os", ".", "environ", "[", "'HOME'", "]", "=", "expanduser", "(", "'~'", ")", "return", "_decrypt_object", "(", "pass_info", ")" ]
30.384615
20.692308
def signature_unsafe(m: bytes, sk: bytes, pk: bytes) -> bytes: """ Not safe to use with secret keys or secret data. See module docstring. This function should be used for testing only. """ h = H(sk) a = decodecoord(h) r = Hint(h[b // 8 : b // 4] + m) R = scalarmult_B(r) S = (r + Hint(encodepoint(R) + pk + m) * a) % l return encodepoint(R) + encodeint(S)
[ "def", "signature_unsafe", "(", "m", ":", "bytes", ",", "sk", ":", "bytes", ",", "pk", ":", "bytes", ")", "->", "bytes", ":", "h", "=", "H", "(", "sk", ")", "a", "=", "decodecoord", "(", "h", ")", "r", "=", "Hint", "(", "h", "[", "b", "//", "8", ":", "b", "//", "4", "]", "+", "m", ")", "R", "=", "scalarmult_B", "(", "r", ")", "S", "=", "(", "r", "+", "Hint", "(", "encodepoint", "(", "R", ")", "+", "pk", "+", "m", ")", "*", "a", ")", "%", "l", "return", "encodepoint", "(", "R", ")", "+", "encodeint", "(", "S", ")" ]
32.166667
15.333333
def list(): """ List blink(1) devices connected, by serial number :return: List of blink(1) device serial numbers """ try: devs = hid.enumerate(VENDOR_ID,PRODUCT_ID) serials = list(map(lambda d:d.get('serial_number'), devs)) return serials except IOError as e: return []
[ "def", "list", "(", ")", ":", "try", ":", "devs", "=", "hid", ".", "enumerate", "(", "VENDOR_ID", ",", "PRODUCT_ID", ")", "serials", "=", "list", "(", "map", "(", "lambda", "d", ":", "d", ".", "get", "(", "'serial_number'", ")", ",", "devs", ")", ")", "return", "serials", "except", "IOError", "as", "e", ":", "return", "[", "]" ]
32.363636
16.181818
def ordered_async_call(func_list): """ Runs the list of function asynchronously, returns the response maintaining the order :param func_list: Expects list of lists to be of format [[func1, args1, kwargs1], [func2, args2, kwargs2], ...] :return: List of output of the functions [output1, output2, ...] """ def worker(function, f_args, f_kwargs, queue, index): """ Runs the function and appends the output to list, and the Exception in the case of error """ response = { 'index': index, # For tracking the index of each function in actual list. # Since, this function is called asynchronously, order in # queue may differ 'data': None, 'error': None } # Handle error in the function call try: response['data'] = function(*f_args, **f_kwargs) except Exception as e: response['error'] = e # send back the exception along with the queue queue.put(response) queue = Queue() # For preserving state across threads processes = [Process(target=worker, args=(func, args, kwargs, queue, i)) \ for i, (func, args, kwargs) in enumerate(func_list)] for process in processes: process.start() response_list = [] for process in processes: # Wait for process to finish process.join() # Get back the response from the queue response = queue.get() if response['error']: raise response['error'] # Raise exception if the function call failed response_list.append(response) return [content['data'] for content in sorted(response_list, key=lambda x: x['index'])]
[ "def", "ordered_async_call", "(", "func_list", ")", ":", "def", "worker", "(", "function", ",", "f_args", ",", "f_kwargs", ",", "queue", ",", "index", ")", ":", "\"\"\"\n Runs the function and appends the output to list, and the Exception in the case of error\n \"\"\"", "response", "=", "{", "'index'", ":", "index", ",", "# For tracking the index of each function in actual list.", "# Since, this function is called asynchronously, order in", "# queue may differ", "'data'", ":", "None", ",", "'error'", ":", "None", "}", "# Handle error in the function call", "try", ":", "response", "[", "'data'", "]", "=", "function", "(", "*", "f_args", ",", "*", "*", "f_kwargs", ")", "except", "Exception", "as", "e", ":", "response", "[", "'error'", "]", "=", "e", "# send back the exception along with the queue", "queue", ".", "put", "(", "response", ")", "queue", "=", "Queue", "(", ")", "# For preserving state across threads", "processes", "=", "[", "Process", "(", "target", "=", "worker", ",", "args", "=", "(", "func", ",", "args", ",", "kwargs", ",", "queue", ",", "i", ")", ")", "for", "i", ",", "(", "func", ",", "args", ",", "kwargs", ")", "in", "enumerate", "(", "func_list", ")", "]", "for", "process", "in", "processes", ":", "process", ".", "start", "(", ")", "response_list", "=", "[", "]", "for", "process", "in", "processes", ":", "# Wait for process to finish", "process", ".", "join", "(", ")", "# Get back the response from the queue", "response", "=", "queue", ".", "get", "(", ")", "if", "response", "[", "'error'", "]", ":", "raise", "response", "[", "'error'", "]", "# Raise exception if the function call failed", "response_list", ".", "append", "(", "response", ")", "return", "[", "content", "[", "'data'", "]", "for", "content", "in", "sorted", "(", "response_list", ",", "key", "=", "lambda", "x", ":", "x", "[", "'index'", "]", ")", "]" ]
34.857143
22.653061
def MakePmfFromItems(t, name=''): """Makes a PMF from a sequence of value-probability pairs Args: t: sequence of value-probability pairs name: string name for this PMF Returns: Pmf object """ pmf = Pmf(dict(t), name) pmf.Normalize() return pmf
[ "def", "MakePmfFromItems", "(", "t", ",", "name", "=", "''", ")", ":", "pmf", "=", "Pmf", "(", "dict", "(", "t", ")", ",", "name", ")", "pmf", ".", "Normalize", "(", ")", "return", "pmf" ]
21.923077
18.076923
def is_node_highlighted(graph: BELGraph, node: BaseEntity) -> bool: """Returns if the given node is highlighted. :param graph: A BEL graph :param node: A BEL node :type node: tuple :return: Does the node contain highlight information? :rtype: bool """ return NODE_HIGHLIGHT in graph.node[node]
[ "def", "is_node_highlighted", "(", "graph", ":", "BELGraph", ",", "node", ":", "BaseEntity", ")", "->", "bool", ":", "return", "NODE_HIGHLIGHT", "in", "graph", ".", "node", "[", "node", "]" ]
31.7
15.6
def persist_block(self, block: 'BaseBlock' ) -> Tuple[Tuple[Hash32, ...], Tuple[Hash32, ...]]: """ Persist the given block's header and uncles. Assumes all block transactions have been persisted already. """ with self.db.atomic_batch() as db: return self._persist_block(db, block)
[ "def", "persist_block", "(", "self", ",", "block", ":", "'BaseBlock'", ")", "->", "Tuple", "[", "Tuple", "[", "Hash32", ",", "...", "]", ",", "Tuple", "[", "Hash32", ",", "...", "]", "]", ":", "with", "self", ".", "db", ".", "atomic_batch", "(", ")", "as", "db", ":", "return", "self", ".", "_persist_block", "(", "db", ",", "block", ")" ]
36.8
14
def exists(self, task_name): """ Determines if task directory exists. `task_name` Task name. Returns ``True`` if task exists. """ try: return os.path.exists(self._get_task_dir(task_name)) except OSError: return False
[ "def", "exists", "(", "self", ",", "task_name", ")", ":", "try", ":", "return", "os", ".", "path", ".", "exists", "(", "self", ".", "_get_task_dir", "(", "task_name", ")", ")", "except", "OSError", ":", "return", "False" ]
21.928571
20.857143
def _gather_group_members(group, groups, users): ''' Gather group members ''' _group = __salt__['group.info'](group) if not _group: log.warning('Group %s does not exist, ignoring.', group) return for member in _group['members']: if member not in users: users[member] = groups[group]
[ "def", "_gather_group_members", "(", "group", ",", "groups", ",", "users", ")", ":", "_group", "=", "__salt__", "[", "'group.info'", "]", "(", "group", ")", "if", "not", "_group", ":", "log", ".", "warning", "(", "'Group %s does not exist, ignoring.'", ",", "group", ")", "return", "for", "member", "in", "_group", "[", "'members'", "]", ":", "if", "member", "not", "in", "users", ":", "users", "[", "member", "]", "=", "groups", "[", "group", "]" ]
25.538462
19.846154
def add_metadata_sectors(self, vtoc, sector_list, header): """Add track/sector list """ tslist = BaseSectorList(header) for start in range(0, len(sector_list), header.ts_pairs): end = min(start + header.ts_pairs, len(sector_list)) if _xd: log.debug("ts: %d-%d" % (start, end)) s = Dos33TSSector(header, sector_list, start, end) s.ts_start, s.ts_end = start, end tslist.append(s) self.num_tslists = len(tslist) vtoc.assign_sector_numbers(self, tslist) sector_list.extend(tslist) self.track, self.sector = header.track_from_sector(tslist[0].sector_num) if _xd: log.debug("track/sector lists:\n%s" % str(tslist))
[ "def", "add_metadata_sectors", "(", "self", ",", "vtoc", ",", "sector_list", ",", "header", ")", ":", "tslist", "=", "BaseSectorList", "(", "header", ")", "for", "start", "in", "range", "(", "0", ",", "len", "(", "sector_list", ")", ",", "header", ".", "ts_pairs", ")", ":", "end", "=", "min", "(", "start", "+", "header", ".", "ts_pairs", ",", "len", "(", "sector_list", ")", ")", "if", "_xd", ":", "log", ".", "debug", "(", "\"ts: %d-%d\"", "%", "(", "start", ",", "end", ")", ")", "s", "=", "Dos33TSSector", "(", "header", ",", "sector_list", ",", "start", ",", "end", ")", "s", ".", "ts_start", ",", "s", ".", "ts_end", "=", "start", ",", "end", "tslist", ".", "append", "(", "s", ")", "self", ".", "num_tslists", "=", "len", "(", "tslist", ")", "vtoc", ".", "assign_sector_numbers", "(", "self", ",", "tslist", ")", "sector_list", ".", "extend", "(", "tslist", ")", "self", ".", "track", ",", "self", ".", "sector", "=", "header", ".", "track_from_sector", "(", "tslist", "[", "0", "]", ".", "sector_num", ")", "if", "_xd", ":", "log", ".", "debug", "(", "\"track/sector lists:\\n%s\"", "%", "str", "(", "tslist", ")", ")" ]
48.466667
13.733333
def load_hashes(filename): """Load the hashes dict from the hashfile""" # { filename : (sha1 of header if available or 'NA', # sha1 of input, # sha1 of output) } hashes = {} try: with open(filename, 'r') as cython_hash_file: for hash_record in cython_hash_file: (filename, header_hash, cython_hash, gen_file_hash) = hash_record.split() hashes[filename] = (header_hash, cython_hash, gen_file_hash) except (KeyError, ValueError, AttributeError, IOError): hashes = {} return hashes
[ "def", "load_hashes", "(", "filename", ")", ":", "# { filename : (sha1 of header if available or 'NA',", "# sha1 of input,", "# sha1 of output) }", "hashes", "=", "{", "}", "try", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "cython_hash_file", ":", "for", "hash_record", "in", "cython_hash_file", ":", "(", "filename", ",", "header_hash", ",", "cython_hash", ",", "gen_file_hash", ")", "=", "hash_record", ".", "split", "(", ")", "hashes", "[", "filename", "]", "=", "(", "header_hash", ",", "cython_hash", ",", "gen_file_hash", ")", "except", "(", "KeyError", ",", "ValueError", ",", "AttributeError", ",", "IOError", ")", ":", "hashes", "=", "{", "}", "return", "hashes" ]
37.5625
17.6875
def detached_signature_for(plaintext_str, keys): """ Signs the given plaintext string and returns the detached signature. A detached signature in GPG speak is a separate blob of data containing a signature for the specified plaintext. :param bytes plaintext_str: bytestring to sign :param keys: list of one or more key to sign with. :type keys: list[gpg.gpgme._gpgme_key] :returns: A list of signature and the signed blob of data :rtype: tuple[list[gpg.results.NewSignature], str] """ ctx = gpg.core.Context(armor=True) ctx.signers = keys (sigblob, sign_result) = ctx.sign(plaintext_str, mode=gpg.constants.SIG_MODE_DETACH) return sign_result.signatures, sigblob
[ "def", "detached_signature_for", "(", "plaintext_str", ",", "keys", ")", ":", "ctx", "=", "gpg", ".", "core", ".", "Context", "(", "armor", "=", "True", ")", "ctx", ".", "signers", "=", "keys", "(", "sigblob", ",", "sign_result", ")", "=", "ctx", ".", "sign", "(", "plaintext_str", ",", "mode", "=", "gpg", ".", "constants", ".", "SIG_MODE_DETACH", ")", "return", "sign_result", ".", "signatures", ",", "sigblob" ]
41.166667
15.944444
def subscribe(self, peer_jid): """ Request presence subscription with the given `peer_jid`. This is deliberately not a coroutine; we don’t know whether the peer is online (usually) and they may defer the confirmation very long, if they confirm at all. Use :meth:`on_subscribed` to get notified when a peer accepted a subscription request. """ self.client.enqueue( stanza.Presence(type_=structs.PresenceType.SUBSCRIBE, to=peer_jid) )
[ "def", "subscribe", "(", "self", ",", "peer_jid", ")", ":", "self", ".", "client", ".", "enqueue", "(", "stanza", ".", "Presence", "(", "type_", "=", "structs", ".", "PresenceType", ".", "SUBSCRIBE", ",", "to", "=", "peer_jid", ")", ")" ]
41
19.769231
def rectify_pi(self): """ rectify the prior information equation with the current state of the parameter_data dataframe. Equations that list fixed, tied or missing parameters are removed. This method is called during Pst.write() """ if self.prior_information.shape[0] == 0: return self._parse_pi_par_names() adj_names = self.adj_par_names def is_good(names): for n in names: if n not in adj_names: return False return True keep_idx = self.prior_information.names.\ apply(lambda x: is_good(x)) self.prior_information = self.prior_information.loc[keep_idx,:]
[ "def", "rectify_pi", "(", "self", ")", ":", "if", "self", ".", "prior_information", ".", "shape", "[", "0", "]", "==", "0", ":", "return", "self", ".", "_parse_pi_par_names", "(", ")", "adj_names", "=", "self", ".", "adj_par_names", "def", "is_good", "(", "names", ")", ":", "for", "n", "in", "names", ":", "if", "n", "not", "in", "adj_names", ":", "return", "False", "return", "True", "keep_idx", "=", "self", ".", "prior_information", ".", "names", ".", "apply", "(", "lambda", "x", ":", "is_good", "(", "x", ")", ")", "self", ".", "prior_information", "=", "self", ".", "prior_information", ".", "loc", "[", "keep_idx", ",", ":", "]" ]
39.166667
14.444444
def repp(file, config=None, module=None, active=None, format=None, trace_level=0): """ Tokenize with a Regular Expression PreProcessor (REPP). Results are printed directly to stdout. If more programmatic access is desired, the :mod:`delphin.repp` module provides a similar interface. Args: file (str, file): filename, open file, or stream of sentence inputs config (str): path to a PET REPP configuration (.set) file module (str): path to a top-level REPP module; other modules are found by external group calls active (list): select which modules are active; if `None`, all are used; incompatible with *config* (default: `None`) format (str): the output format (`"yy"`, `"string"`, `"line"`, or `"triple"`; default: `"yy"`) trace_level (int): if `0` no trace info is printed; if `1`, applied rules are printed, if greather than `1`, both applied and unapplied rules (in order) are printed (default: `0`) """ from delphin.repp import REPP if config is not None and module is not None: raise ValueError("cannot specify both 'config' and 'module'") if config is not None and active: raise ValueError("'active' cannot be used with 'config'") if config: r = REPP.from_config(config) elif module: r = REPP.from_file(module, active=active) else: r = REPP() # just tokenize if hasattr(file, 'read'): for line in file: _repp(r, line, format, trace_level) else: with io.open(file, encoding='utf-8') as fh: for line in fh: _repp(r, line, format, trace_level)
[ "def", "repp", "(", "file", ",", "config", "=", "None", ",", "module", "=", "None", ",", "active", "=", "None", ",", "format", "=", "None", ",", "trace_level", "=", "0", ")", ":", "from", "delphin", ".", "repp", "import", "REPP", "if", "config", "is", "not", "None", "and", "module", "is", "not", "None", ":", "raise", "ValueError", "(", "\"cannot specify both 'config' and 'module'\"", ")", "if", "config", "is", "not", "None", "and", "active", ":", "raise", "ValueError", "(", "\"'active' cannot be used with 'config'\"", ")", "if", "config", ":", "r", "=", "REPP", ".", "from_config", "(", "config", ")", "elif", "module", ":", "r", "=", "REPP", ".", "from_file", "(", "module", ",", "active", "=", "active", ")", "else", ":", "r", "=", "REPP", "(", ")", "# just tokenize", "if", "hasattr", "(", "file", ",", "'read'", ")", ":", "for", "line", "in", "file", ":", "_repp", "(", "r", ",", "line", ",", "format", ",", "trace_level", ")", "else", ":", "with", "io", ".", "open", "(", "file", ",", "encoding", "=", "'utf-8'", ")", "as", "fh", ":", "for", "line", "in", "fh", ":", "_repp", "(", "r", ",", "line", ",", "format", ",", "trace_level", ")" ]
38.795455
19.295455
def make_env_key(app_name, key): """Creates an environment key-equivalent for the given key""" key = key.replace('-', '_').replace(' ', '_') return str("_".join((x.upper() for x in (app_name, key))))
[ "def", "make_env_key", "(", "app_name", ",", "key", ")", ":", "key", "=", "key", ".", "replace", "(", "'-'", ",", "'_'", ")", ".", "replace", "(", "' '", ",", "'_'", ")", "return", "str", "(", "\"_\"", ".", "join", "(", "(", "x", ".", "upper", "(", ")", "for", "x", "in", "(", "app_name", ",", "key", ")", ")", ")", ")" ]
52
9.75
def mergesort(list_of_lists, key=None): """ Perform an N-way merge operation on sorted lists. @param list_of_lists: (really iterable of iterable) of sorted elements (either by naturally or by C{key}) @param key: specify sort key function (like C{sort()}, C{sorted()}) Yields tuples of the form C{(item, iterator)}, where the iterator is the built-in list iterator or something you pass in, if you pre-generate the iterators. This is a stable merge; complexity O(N lg N) Examples:: >>> print list(mergesort([[1,2,3,4], ... [2,3.25,3.75,4.5,6,7], ... [2.625,3.625,6.625,9]])) [1, 2, 2, 2.625, 3, 3.25, 3.625, 3.75, 4, 4.5, 6, 6.625, 7, 9] # note stability >>> print list(mergesort([[1,2,3,4], ... [2,3.25,3.75,4.5,6,7], ... [2.625,3.625,6.625,9]], ... key=int)) [1, 2, 2, 2.625, 3, 3.25, 3.75, 3.625, 4, 4.5, 6, 6.625, 7, 9] >>> print list(mergesort([[4, 3, 2, 1], ... [7, 6, 4.5, 3.75, 3.25, 2], ... [9, 6.625, 3.625, 2.625]], ... key=lambda x: -x)) [9, 7, 6.625, 6, 4.5, 4, 3.75, 3.625, 3.25, 3, 2.625, 2, 2, 1] """ heap = [] for i, itr in enumerate(iter(pl) for pl in list_of_lists): try: item = itr.next() if key: toadd = (key(item), i, item, itr) else: toadd = (item, i, itr) heap.append(toadd) except StopIteration: pass heapq.heapify(heap) if key: while heap: _, idx, item, itr = heap[0] yield item try: item = itr.next() heapq.heapreplace(heap, (key(item), idx, item, itr) ) except StopIteration: heapq.heappop(heap) else: while heap: item, idx, itr = heap[0] yield item try: heapq.heapreplace(heap, (itr.next(), idx, itr)) except StopIteration: heapq.heappop(heap)
[ "def", "mergesort", "(", "list_of_lists", ",", "key", "=", "None", ")", ":", "heap", "=", "[", "]", "for", "i", ",", "itr", "in", "enumerate", "(", "iter", "(", "pl", ")", "for", "pl", "in", "list_of_lists", ")", ":", "try", ":", "item", "=", "itr", ".", "next", "(", ")", "if", "key", ":", "toadd", "=", "(", "key", "(", "item", ")", ",", "i", ",", "item", ",", "itr", ")", "else", ":", "toadd", "=", "(", "item", ",", "i", ",", "itr", ")", "heap", ".", "append", "(", "toadd", ")", "except", "StopIteration", ":", "pass", "heapq", ".", "heapify", "(", "heap", ")", "if", "key", ":", "while", "heap", ":", "_", ",", "idx", ",", "item", ",", "itr", "=", "heap", "[", "0", "]", "yield", "item", "try", ":", "item", "=", "itr", ".", "next", "(", ")", "heapq", ".", "heapreplace", "(", "heap", ",", "(", "key", "(", "item", ")", ",", "idx", ",", "item", ",", "itr", ")", ")", "except", "StopIteration", ":", "heapq", ".", "heappop", "(", "heap", ")", "else", ":", "while", "heap", ":", "item", ",", "idx", ",", "itr", "=", "heap", "[", "0", "]", "yield", "item", "try", ":", "heapq", ".", "heapreplace", "(", "heap", ",", "(", "itr", ".", "next", "(", ")", ",", "idx", ",", "itr", ")", ")", "except", "StopIteration", ":", "heapq", ".", "heappop", "(", "heap", ")" ]
31.893939
19.80303
def create_custom_resource_definition(self, body, **kwargs): """ create a CustomResourceDefinition This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_custom_resource_definition(body, async_req=True) >>> result = thread.get() :param async_req bool :param V1beta1CustomResourceDefinition body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :return: V1beta1CustomResourceDefinition If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_custom_resource_definition_with_http_info(body, **kwargs) else: (data) = self.create_custom_resource_definition_with_http_info(body, **kwargs) return data
[ "def", "create_custom_resource_definition", "(", "self", ",", "body", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "return", "self", ".", "create_custom_resource_definition_with_http_info", "(", "body", ",", "*", "*", "kwargs", ")", "else", ":", "(", "data", ")", "=", "self", ".", "create_custom_resource_definition_with_http_info", "(", "body", ",", "*", "*", "kwargs", ")", "return", "data" ]
66.26087
38.086957
def compare_graph_searchers(): """Prints a table of results like this: >>> compare_graph_searchers() Searcher Romania(A, B) Romania(O, N) Australia breadth_first_tree_search < 21/ 22/ 59/B> <1158/1159/3288/N> < 7/ 8/ 22/WA> breadth_first_search < 7/ 11/ 18/B> < 19/ 20/ 45/N> < 2/ 6/ 8/WA> depth_first_graph_search < 8/ 9/ 20/B> < 16/ 17/ 38/N> < 4/ 5/ 11/WA> iterative_deepening_search < 11/ 33/ 31/B> < 656/1815/1812/N> < 3/ 11/ 11/WA> depth_limited_search < 54/ 65/ 185/B> < 387/1012/1125/N> < 50/ 54/ 200/WA> recursive_best_first_search < 5/ 6/ 15/B> <5887/5888/16532/N> < 11/ 12/ 43/WA>""" compare_searchers(problems=[GraphProblem('A', 'B', romania), GraphProblem('O', 'N', romania), GraphProblem('Q', 'WA', australia)], header=['Searcher', 'Romania(A, B)', 'Romania(O, N)', 'Australia'])
[ "def", "compare_graph_searchers", "(", ")", ":", "compare_searchers", "(", "problems", "=", "[", "GraphProblem", "(", "'A'", ",", "'B'", ",", "romania", ")", ",", "GraphProblem", "(", "'O'", ",", "'N'", ",", "romania", ")", ",", "GraphProblem", "(", "'Q'", ",", "'WA'", ",", "australia", ")", "]", ",", "header", "=", "[", "'Searcher'", ",", "'Romania(A, B)'", ",", "'Romania(O, N)'", ",", "'Australia'", "]", ")" ]
75.428571
33.428571
def _get_configured_module(option_name, known_modules=None): """Get the module specified by the value of option_name. The value of the configuration option will be used to load the module by name from the known module list or treated as a path if not found in known_modules. Args: option_name: name of persistence module known_modules: dictionary of module names and module paths, ie: {'ndb':'furious.extras.appengine.ndb_persistence'} Returns: module of the module path matching the name in known_modules """ from furious.job_utils import path_to_reference config = get_config() option_value = config[option_name] # If no known_modules were give, make it an empty dict. if not known_modules: known_modules = {} module_path = known_modules.get(option_value) or option_value return path_to_reference(module_path)
[ "def", "_get_configured_module", "(", "option_name", ",", "known_modules", "=", "None", ")", ":", "from", "furious", ".", "job_utils", "import", "path_to_reference", "config", "=", "get_config", "(", ")", "option_value", "=", "config", "[", "option_name", "]", "# If no known_modules were give, make it an empty dict.", "if", "not", "known_modules", ":", "known_modules", "=", "{", "}", "module_path", "=", "known_modules", ".", "get", "(", "option_value", ")", "or", "option_value", "return", "path_to_reference", "(", "module_path", ")" ]
40.409091
20.681818
def get_no_record_response(self, request): """ Get an HTTPResponse that can be used when there's no related EnterpriseCustomer. """ username, course_id, program_uuid, enterprise_customer_uuid = self.get_required_query_params(request) data = { self.REQUIRED_PARAM_USERNAME: username, self.REQUIRED_PARAM_ENTERPRISE_CUSTOMER: enterprise_customer_uuid, self.CONSENT_EXISTS: False, self.CONSENT_GRANTED: False, self.CONSENT_REQUIRED: False, } if course_id: data[self.REQUIRED_PARAM_COURSE_ID] = course_id if program_uuid: data[self.REQUIRED_PARAM_PROGRAM_UUID] = program_uuid return Response(data, status=HTTP_200_OK)
[ "def", "get_no_record_response", "(", "self", ",", "request", ")", ":", "username", ",", "course_id", ",", "program_uuid", ",", "enterprise_customer_uuid", "=", "self", ".", "get_required_query_params", "(", "request", ")", "data", "=", "{", "self", ".", "REQUIRED_PARAM_USERNAME", ":", "username", ",", "self", ".", "REQUIRED_PARAM_ENTERPRISE_CUSTOMER", ":", "enterprise_customer_uuid", ",", "self", ".", "CONSENT_EXISTS", ":", "False", ",", "self", ".", "CONSENT_GRANTED", ":", "False", ",", "self", ".", "CONSENT_REQUIRED", ":", "False", ",", "}", "if", "course_id", ":", "data", "[", "self", ".", "REQUIRED_PARAM_COURSE_ID", "]", "=", "course_id", "if", "program_uuid", ":", "data", "[", "self", ".", "REQUIRED_PARAM_PROGRAM_UUID", "]", "=", "program_uuid", "return", "Response", "(", "data", ",", "status", "=", "HTTP_200_OK", ")" ]
39.631579
20.684211
def readFromProto(cls, proto): """ Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.readFromProto`. Read state from proto object. :param proto: SPRegionProto capnproto object """ instance = cls(proto.columnCount, proto.inputWidth) instance.spatialImp = proto.spatialImp instance.learningMode = proto.learningMode instance.inferenceMode = proto.inferenceMode instance.anomalyMode = proto.anomalyMode instance.topDownMode = proto.topDownMode spatialImp = proto.spatialImp instance._sfdr = getSPClass(spatialImp).read(proto.spatialPooler) return instance
[ "def", "readFromProto", "(", "cls", ",", "proto", ")", ":", "instance", "=", "cls", "(", "proto", ".", "columnCount", ",", "proto", ".", "inputWidth", ")", "instance", ".", "spatialImp", "=", "proto", ".", "spatialImp", "instance", ".", "learningMode", "=", "proto", ".", "learningMode", "instance", ".", "inferenceMode", "=", "proto", ".", "inferenceMode", "instance", ".", "anomalyMode", "=", "proto", ".", "anomalyMode", "instance", ".", "topDownMode", "=", "proto", ".", "topDownMode", "spatialImp", "=", "proto", ".", "spatialImp", "instance", ".", "_sfdr", "=", "getSPClass", "(", "spatialImp", ")", ".", "read", "(", "proto", ".", "spatialPooler", ")", "return", "instance" ]
30.35
17.75
def _activation_summary(x): """Helper to create summaries for activations. Creates a summary that provides a histogram of activations. Creates a summary that measure the sparsity of activations. Args: x: Tensor Returns: nothing """ # Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training # session. This helps the clarity of presentation on tensorboard. tf.histogram_summary(x.name + '/activations', x) tf.scalar_summary(x.name + '/sparsity', tf.nn.zero_fraction(x))
[ "def", "_activation_summary", "(", "x", ")", ":", "# Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training", "# session. This helps the clarity of presentation on tensorboard.", "tf", ".", "histogram_summary", "(", "x", ".", "name", "+", "'/activations'", ",", "x", ")", "tf", ".", "scalar_summary", "(", "x", ".", "name", "+", "'/sparsity'", ",", "tf", ".", "nn", ".", "zero_fraction", "(", "x", ")", ")" ]
38.538462
20.923077
def seek_to_position(position): """Seek to an absolute position in stream.""" message = command(protobuf.CommandInfo_pb2.SeekToPlaybackPosition) send_command = message.inner() send_command.options.playbackPosition = position return message
[ "def", "seek_to_position", "(", "position", ")", ":", "message", "=", "command", "(", "protobuf", ".", "CommandInfo_pb2", ".", "SeekToPlaybackPosition", ")", "send_command", "=", "message", ".", "inner", "(", ")", "send_command", ".", "options", ".", "playbackPosition", "=", "position", "return", "message" ]
42.333333
13.166667
def with_spark_context(application_name, conf=None): """Context manager for a spark context Parameters ---------- application_name : string conf : string, optional Returns ------- sc : SparkContext Examples -------- Used within a context manager >>> with with_spark_context("MyApplication") as sc: ... # Your Code here ... pass """ if conf is None: conf = default_configuration assert isinstance(conf, SparkConfiguration) sc = conf.spark_context(application_name) try: yield sc finally: sc.stop()
[ "def", "with_spark_context", "(", "application_name", ",", "conf", "=", "None", ")", ":", "if", "conf", "is", "None", ":", "conf", "=", "default_configuration", "assert", "isinstance", "(", "conf", ",", "SparkConfiguration", ")", "sc", "=", "conf", ".", "spark_context", "(", "application_name", ")", "try", ":", "yield", "sc", "finally", ":", "sc", ".", "stop", "(", ")" ]
20.172414
21.448276
def remove_elements(target, indices): """Remove multiple elements from a list and return result. This implementation is faster than the alternative below. Also note the creation of a new list to avoid altering the original. We don't have any current use for the original intact list, but may in the future...""" copied = list(target) for index in reversed(indices): del copied[index] return copied
[ "def", "remove_elements", "(", "target", ",", "indices", ")", ":", "copied", "=", "list", "(", "target", ")", "for", "index", "in", "reversed", "(", "indices", ")", ":", "del", "copied", "[", "index", "]", "return", "copied" ]
35.666667
17