text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def scheduled_sample_count(ground_truth_x, generated_x, batch_size, scheduled_sample_var): """Sample batch with specified mix of groundtruth and generated data points. Args: ground_truth_x: tensor of ground-truth data points. generated_x: tensor of generated data points. batch_size: batch size scheduled_sample_var: number of ground-truth examples to include in batch. Returns: New batch with num_ground_truth sampled from ground_truth_x and the rest from generated_x. """ num_ground_truth = scheduled_sample_var idx = tf.random_shuffle(tf.range(batch_size)) ground_truth_idx = tf.gather(idx, tf.range(num_ground_truth)) generated_idx = tf.gather(idx, tf.range(num_ground_truth, batch_size)) ground_truth_examps = tf.gather(ground_truth_x, ground_truth_idx) generated_examps = tf.gather(generated_x, generated_idx) output = tf.dynamic_stitch([ground_truth_idx, generated_idx], [ground_truth_examps, generated_examps]) # if batch size is known set it. if isinstance(batch_size, int): output.set_shape([batch_size] + common_layers.shape_list(output)[1:]) return output
[ "def", "scheduled_sample_count", "(", "ground_truth_x", ",", "generated_x", ",", "batch_size", ",", "scheduled_sample_var", ")", ":", "num_ground_truth", "=", "scheduled_sample_var", "idx", "=", "tf", ".", "random_shuffle", "(", "tf", ".", "range", "(", "batch_size"...
41.655172
19.275862
def QA_util_send_mail(msg, title, from_user, from_password, to_addr, smtp): """邮件发送 Arguments: msg {[type]} -- [description] title {[type]} -- [description] from_user {[type]} -- [description] from_password {[type]} -- [description] to_addr {[type]} -- [description] smtp {[type]} -- [description] """ msg = MIMEText(msg, 'plain', 'utf-8') msg['Subject'] = Header(title, 'utf-8').encode() server = smtplib.SMTP(smtp, 25) # SMTP协议默认端口是25 server.set_debuglevel(1) server.login(from_user, from_password) server.sendmail(from_user, [to_addr], msg.as_string())
[ "def", "QA_util_send_mail", "(", "msg", ",", "title", ",", "from_user", ",", "from_password", ",", "to_addr", ",", "smtp", ")", ":", "msg", "=", "MIMEText", "(", "msg", ",", "'plain'", ",", "'utf-8'", ")", "msg", "[", "'Subject'", "]", "=", "Header", "...
33.105263
13.210526
def install_importer(): """ If in a virtualenv then load spec files to decide which modules can be imported from system site-packages and install path hook. """ logging.debug('install_importer') if not in_venv(): logging.debug('No virtualenv active py:[%s]', sys.executable) return False if disable_vext: logging.debug('Vext disabled by environment variable') return False if GatekeeperFinder.PATH_TRIGGER not in sys.path: try: load_specs() sys.path.append(GatekeeperFinder.PATH_TRIGGER) sys.path_hooks.append(GatekeeperFinder) except Exception as e: """ Dont kill other programmes because of a vext error """ logger.info(str(e)) if logger.getEffectiveLevel() == logging.DEBUG: raise logging.debug("importer installed") return True
[ "def", "install_importer", "(", ")", ":", "logging", ".", "debug", "(", "'install_importer'", ")", "if", "not", "in_venv", "(", ")", ":", "logging", ".", "debug", "(", "'No virtualenv active py:[%s]'", ",", "sys", ".", "executable", ")", "return", "False", "...
30.566667
17.1
def _rule_block(self): """ Parses the production rule:: block : NAME '{' option* '}' Returns tuple (name, options_list). """ name = self._get_token(self.RE_NAME) self._expect_token('{') # consume additional options if available options = [] while self._lookahead_token() != '}': options.append(self._rule_option()) self._expect_token('}') return [name, options]
[ "def", "_rule_block", "(", "self", ")", ":", "name", "=", "self", ".", "_get_token", "(", "self", ".", "RE_NAME", ")", "self", ".", "_expect_token", "(", "'{'", ")", "# consume additional options if available", "options", "=", "[", "]", "while", "self", ".",...
27.352941
15.411765
def to_grayscale(img, num_output_channels=1): """Convert image to grayscale version of image. Args: img (PIL Image): Image to be converted to grayscale. Returns: PIL Image: Grayscale version of the image. if num_output_channels = 1 : returned image is single channel if num_output_channels = 3 : returned image is 3 channel with r = g = b """ if not _is_pil_image(img): raise TypeError('img should be PIL Image. Got {}'.format(type(img))) if num_output_channels == 1: img = img.convert('L') elif num_output_channels == 3: img = img.convert('L') np_img = np.array(img, dtype=np.uint8) np_img = np.dstack([np_img, np_img, np_img]) img = Image.fromarray(np_img, 'RGB') else: raise ValueError('num_output_channels should be either 1 or 3') return img
[ "def", "to_grayscale", "(", "img", ",", "num_output_channels", "=", "1", ")", ":", "if", "not", "_is_pil_image", "(", "img", ")", ":", "raise", "TypeError", "(", "'img should be PIL Image. Got {}'", ".", "format", "(", "type", "(", "img", ")", ")", ")", "i...
33
21.538462
def getargnames(argspecs, with_unbox=False): """Resembles list of arg-names as would be seen in a function signature, including var-args, var-keywords and keyword-only args. """ # todo: We can maybe make use of inspect.formatargspec args = argspecs.args vargs = argspecs.varargs try: kw = argspecs.keywords except AttributeError: kw = argspecs.varkw try: kwonly = argspecs.kwonlyargs except AttributeError: kwonly = None res = [] if not args is None: res.extend(args) if not vargs is None: res.append('*'+vargs if with_unbox else vargs) if not kwonly is None: res.extend(kwonly) if not kw is None: res.append('**'+kw if with_unbox else kw) return res
[ "def", "getargnames", "(", "argspecs", ",", "with_unbox", "=", "False", ")", ":", "# todo: We can maybe make use of inspect.formatargspec", "args", "=", "argspecs", ".", "args", "vargs", "=", "argspecs", ".", "varargs", "try", ":", "kw", "=", "argspecs", ".", "k...
30.16
14.68
def TriArea(v_init, f, normalize): """ Returns a Ch object whose only attribute "v" represents the flattened vertices.""" if normalize: nm = lambda x : NormalizedNx3(x) else: nm = lambda x : x result = Ch(lambda v : (Sum3xN(CrossProduct(TriEdges(f,1,0,nm(v)), TriEdges(f,2,0, nm(v)))**2.) ** 0.5) * 0.5) result.v = v_init return result
[ "def", "TriArea", "(", "v_init", ",", "f", ",", "normalize", ")", ":", "if", "normalize", ":", "nm", "=", "lambda", "x", ":", "NormalizedNx3", "(", "x", ")", "else", ":", "nm", "=", "lambda", "x", ":", "x", "result", "=", "Ch", "(", "lambda", "v"...
37.1
22.7
def _build_tree_by_level(self, time_qualifier, collection_name, since): """ method iterated thru all documents in all job collections and builds a tree of known system state""" invalid_tree_records = dict() invalid_tq_records = dict() try: job_records = self.job_dao.get_all(collection_name, since) for job_record in job_records: tree = self.get_tree(job_record.process_name) if tree is None: utils.increment_family_property(job_record.process_name, invalid_tree_records) continue job_time_qualifier = context.process_context[job_record.process_name].time_qualifier if time_qualifier != job_time_qualifier: utils.increment_family_property(job_record.process_name, invalid_tq_records) continue tree.update_node(job_record) except LookupError: self.logger.warning('No job records in {0}.'.format(collection_name)) for name, counter in invalid_tree_records.items(): self.logger.warning('Skipping {0} job records for {1} since no tree is handling it.' .format(counter, name)) for name, counter in invalid_tq_records.items(): self.logger.warning('Skipping {0} job records for {1} since the process has different time qualifier.' .format(counter, name))
[ "def", "_build_tree_by_level", "(", "self", ",", "time_qualifier", ",", "collection_name", ",", "since", ")", ":", "invalid_tree_records", "=", "dict", "(", ")", "invalid_tq_records", "=", "dict", "(", ")", "try", ":", "job_records", "=", "self", ".", "job_dao...
48.8
27.8
def build_arch(self, arch): """simple shared compile""" env = self.get_recipe_env(arch, with_flags_in_cc=False) for path in ( self.get_build_dir(arch.arch), join(self.ctx.python_recipe.get_build_dir(arch.arch), 'Lib'), join(self.ctx.python_recipe.get_build_dir(arch.arch), 'Include')): if not exists(path): info("creating {}".format(path)) shprint(sh.mkdir, '-p', path) cli = env['CC'].split()[0] # makes sure first CC command is the compiler rather than ccache, refs: # https://github.com/kivy/python-for-android/issues/1398 if 'ccache' in cli: cli = env['CC'].split()[1] cc = sh.Command(cli) with current_directory(self.get_build_dir(arch.arch)): cflags = env['CFLAGS'].split() cflags.extend(['-I.', '-c', '-l.', 'ifaddrs.c', '-I.']) shprint(cc, *cflags, _env=env) cflags = env['CFLAGS'].split() cflags.extend(['-shared', '-I.', 'ifaddrs.o', '-o', 'libifaddrs.so']) cflags.extend(env['LDFLAGS'].split()) shprint(cc, *cflags, _env=env) shprint(sh.cp, 'libifaddrs.so', self.ctx.get_libs_dir(arch.arch))
[ "def", "build_arch", "(", "self", ",", "arch", ")", ":", "env", "=", "self", ".", "get_recipe_env", "(", "arch", ",", "with_flags_in_cc", "=", "False", ")", "for", "path", "in", "(", "self", ".", "get_build_dir", "(", "arch", ".", "arch", ")", ",", "...
48.076923
16.961538
def calculate_marginal_likelihoods(tree, feature, frequencies): """ Calculates marginal likelihoods for each tree node by multiplying state frequencies with their bottom-up and top-down likelihoods. :param tree: ete3.Tree, the tree of interest :param feature: str, character for which the likelihood is calculated :param frequencies: numpy array of state frequencies :return: void, stores the node marginal likelihoods in the get_personalised_feature_name(feature, LH) feature. """ bu_lh_feature = get_personalized_feature_name(feature, BU_LH) bu_lh_sf_feature = get_personalized_feature_name(feature, BU_LH_SF) td_lh_feature = get_personalized_feature_name(feature, TD_LH) td_lh_sf_feature = get_personalized_feature_name(feature, TD_LH_SF) lh_feature = get_personalized_feature_name(feature, LH) lh_sf_feature = get_personalized_feature_name(feature, LH_SF) allowed_state_feature = get_personalized_feature_name(feature, ALLOWED_STATES) for node in tree.traverse('preorder'): likelihood = getattr(node, bu_lh_feature) * getattr(node, td_lh_feature) * frequencies \ * getattr(node, allowed_state_feature) node.add_feature(lh_feature, likelihood) node.add_feature(lh_sf_feature, getattr(node, td_lh_sf_feature) + getattr(node, bu_lh_sf_feature)) node.del_feature(bu_lh_feature) node.del_feature(bu_lh_sf_feature) node.del_feature(td_lh_feature) node.del_feature(td_lh_sf_feature)
[ "def", "calculate_marginal_likelihoods", "(", "tree", ",", "feature", ",", "frequencies", ")", ":", "bu_lh_feature", "=", "get_personalized_feature_name", "(", "feature", ",", "BU_LH", ")", "bu_lh_sf_feature", "=", "get_personalized_feature_name", "(", "feature", ",", ...
53.428571
24.5
def get_command(arguments): """Extract the first argument from arguments parsed by docopt. :param arguments parsed by docopt: :return: command """ return [k for k, v in arguments.items() if not k.startswith('-') and v is True][0]
[ "def", "get_command", "(", "arguments", ")", ":", "return", "[", "k", "for", "k", ",", "v", "in", "arguments", ".", "items", "(", ")", "if", "not", "k", ".", "startswith", "(", "'-'", ")", "and", "v", "is", "True", "]", "[", "0", "]" ]
31.875
11.5
def _protobuf_value_type(value): """Returns the type of the google.protobuf.Value message as an api.DataType. Returns None if the type of 'value' is not one of the types supported in api_pb2.DataType. Args: value: google.protobuf.Value message. """ if value.HasField("number_value"): return api_pb2.DATA_TYPE_FLOAT64 if value.HasField("string_value"): return api_pb2.DATA_TYPE_STRING if value.HasField("bool_value"): return api_pb2.DATA_TYPE_BOOL return None
[ "def", "_protobuf_value_type", "(", "value", ")", ":", "if", "value", ".", "HasField", "(", "\"number_value\"", ")", ":", "return", "api_pb2", ".", "DATA_TYPE_FLOAT64", "if", "value", ".", "HasField", "(", "\"string_value\"", ")", ":", "return", "api_pb2", "."...
29.9375
14.625
def create_from_template(self, client_id, subject, name, from_name, from_email, reply_to, list_ids, segment_ids, template_id, template_content): """Creates a new campaign for a client, from a template. :param client_id: String representing the ID of the client for whom the campaign will be created. :param subject: String representing the subject of the campaign. :param name: String representing the name of the campaign. :param from_name: String representing the from name for the campaign. :param from_email: String representing the from address for the campaign. :param reply_to: String representing the reply-to address for the campaign. :param list_ids: Array of Strings representing the IDs of the lists to which the campaign will be sent. :param segment_ids: Array of Strings representing the IDs of the segments to which the campaign will be sent. :param template_id: String representing the ID of the template on which the campaign will be based. :param template_content: Hash representing the content to be used for the editable areas of the template. See documentation at campaignmonitor.com/api/campaigns/#creating_a_campaign_from_template for full details of template content format. :returns String representing the ID of the newly created campaign. """ body = { "Subject": subject, "Name": name, "FromName": from_name, "FromEmail": from_email, "ReplyTo": reply_to, "ListIDs": list_ids, "SegmentIDs": segment_ids, "TemplateID": template_id, "TemplateContent": template_content} response = self._post("/campaigns/%s/fromtemplate.json" % client_id, json.dumps(body)) self.campaign_id = json_to_py(response) return self.campaign_id
[ "def", "create_from_template", "(", "self", ",", "client_id", ",", "subject", ",", "name", ",", "from_name", ",", "from_email", ",", "reply_to", ",", "list_ids", ",", "segment_ids", ",", "template_id", ",", "template_content", ")", ":", "body", "=", "{", "\"...
53.567568
21
def getViews(self, path, year=None, month=None, day=None, hour=None): """Use this method to get the number of views for a Telegraph article. :param path: Required. Path to the Telegraph page (in the format Title-12-31, where 12 is the month and 31 the day the article was first published). :type path: str :param year: Required if month is passed. If passed, the number of page views for the requested year will be returned. :type year: int :param month: Required if day is passed. If passed, the number of page views for the requested month will be returned. :type month: int :param day: Required if hour is passed. If passed, the number of page views for the requested day will be returned. :type day: int :param hour: If passed, the number of page views for the requested hour will be returned. :type hour: int :return: """ if path is None: raise TelegraphAPIException("Error while executing getViews: " "PAGE_NOT_FOUND") r = requests.post(BASE_URL + "getViews/" + path, data={ "year": year, "month": month, "day": day, "hour": hour, }) if r.json()['ok'] is not True: raise TelegraphAPIException("Error while executing getViews: " + r.json()['error']) return r.json()['result']
[ "def", "getViews", "(", "self", ",", "path", ",", "year", "=", "None", ",", "month", "=", "None", ",", "day", "=", "None", ",", "hour", "=", "None", ")", ":", "if", "path", "is", "None", ":", "raise", "TelegraphAPIException", "(", "\"Error while execut...
40.918919
24.675676
def node_label_folder_absent(name, node, **kwargs): ''' Ensures the label folder doesn't exist on the specified node. name The name of label folder node The name of the node ''' ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} labels = __salt__['kubernetes.node_labels'](node, **kwargs) folder = name.strip("/") + "/" labels_to_drop = [] new_labels = [] for label in labels: if label.startswith(folder): labels_to_drop.append(label) else: new_labels.append(label) if not labels_to_drop: ret['result'] = True if not __opts__['test'] else None ret['comment'] = 'The label folder does not exist' return ret if __opts__['test']: ret['comment'] = 'The label folder is going to be deleted' ret['result'] = None return ret for label in labels_to_drop: __salt__['kubernetes.node_remove_label']( node_name=node, label_name=label, **kwargs) ret['result'] = True ret['changes'] = { 'kubernetes.node_label_folder_absent': { 'old': list(labels), 'new': new_labels, } } ret['comment'] = 'Label folder removed from node' return ret
[ "def", "node_label_folder_absent", "(", "name", ",", "node", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "False", ",", "'comment'", ":", "''", "}", "labels", "=", ...
24.807692
21.153846
def _get_doc_by_raw_offset(self, doc_id): """ Load document from xml using bytes offset information. XXX: this is not tested under Windows. """ bounds = self._get_meta()[str(doc_id)].bounds return xml_utils.load_chunk(self.filename, bounds)
[ "def", "_get_doc_by_raw_offset", "(", "self", ",", "doc_id", ")", ":", "bounds", "=", "self", ".", "_get_meta", "(", ")", "[", "str", "(", "doc_id", ")", "]", ".", "bounds", "return", "xml_utils", ".", "load_chunk", "(", "self", ".", "filename", ",", "...
40.285714
8.571429
def _get_site_amplification(self, C_AMP, vs30, pga_rock): """ Gets the site amplification term based on equations 7 and 8 of Atkinson & Boore (2006) """ # Get nonlinear term bnl = self._get_bnl(C_AMP, vs30) # f_nl_coeff = np.log(60.0 / 100.0) * np.ones_like(vs30) idx = pga_rock > 60.0 f_nl_coeff[idx] = np.log(pga_rock[idx] / 100.0) return np.log(np.exp( C_AMP["blin"] * np.log(vs30 / self.CONSTS["Vref"]) + bnl * f_nl_coeff))
[ "def", "_get_site_amplification", "(", "self", ",", "C_AMP", ",", "vs30", ",", "pga_rock", ")", ":", "# Get nonlinear term", "bnl", "=", "self", ".", "_get_bnl", "(", "C_AMP", ",", "vs30", ")", "#", "f_nl_coeff", "=", "np", ".", "log", "(", "60.0", "/", ...
37.571429
13.714286
def __load_asset_class(self, ac_id: int): """ Loads Asset Class entity """ # open database db = self.__get_session() entity = db.query(dal.AssetClass).filter(dal.AssetClass.id == ac_id).first() return entity
[ "def", "__load_asset_class", "(", "self", ",", "ac_id", ":", "int", ")", ":", "# open database", "db", "=", "self", ".", "__get_session", "(", ")", "entity", "=", "db", ".", "query", "(", "dal", ".", "AssetClass", ")", ".", "filter", "(", "dal", ".", ...
40.333333
14.666667
def _read_routine_metadata(self): """ Returns the metadata of stored routines. :rtype: dict """ metadata = {} if os.path.isfile(self._metadata_filename): with open(self._metadata_filename, 'r') as file: metadata = json.load(file) return metadata
[ "def", "_read_routine_metadata", "(", "self", ")", ":", "metadata", "=", "{", "}", "if", "os", ".", "path", ".", "isfile", "(", "self", ".", "_metadata_filename", ")", ":", "with", "open", "(", "self", ".", "_metadata_filename", ",", "'r'", ")", "as", ...
26.666667
15.333333
def get_sized_root_folder(self): """Return the location where sized images are stored.""" folder, filename = os.path.split(self.name) return os.path.join(VERSATILEIMAGEFIELD_SIZED_DIRNAME, folder, '')
[ "def", "get_sized_root_folder", "(", "self", ")", ":", "folder", ",", "filename", "=", "os", ".", "path", ".", "split", "(", "self", ".", "name", ")", "return", "os", ".", "path", ".", "join", "(", "VERSATILEIMAGEFIELD_SIZED_DIRNAME", ",", "folder", ",", ...
55.25
13.25
def setAttribute(values, value): """ Takes the values of an attribute value list and attempts to append attributes of the proper type, inferred from their Python type. """ if isinstance(value, int): values.add().int32_value = value elif isinstance(value, float): values.add().double_value = value elif isinstance(value, long): values.add().int64_value = value elif isinstance(value, str): values.add().string_value = value elif isinstance(value, bool): values.add().bool_value = value elif isinstance(value, (list, tuple, array.array)): for v in value: setAttribute(values, v) elif isinstance(value, dict): for key in value: setAttribute( values.add().attributes.attr[key].values, value[key]) else: values.add().string_value = str(value)
[ "def", "setAttribute", "(", "values", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "int", ")", ":", "values", ".", "add", "(", ")", ".", "int32_value", "=", "value", "elif", "isinstance", "(", "value", ",", "float", ")", ":", "value...
36.083333
10.25
def create_and_start_migration( self, resource_group_name, namespace_name, target_namespace, post_migration_name, custom_headers=None, raw=False, polling=True, **operation_config): """Creates Migration configuration and starts migration of entities from Standard to Premium namespace. :param resource_group_name: Name of the Resource group within the Azure subscription. :type resource_group_name: str :param namespace_name: The namespace name :type namespace_name: str :param target_namespace: Existing premium Namespace ARM Id name which has no entities, will be used for migration :type target_namespace: str :param post_migration_name: Name to access Standard Namespace after migration :type post_migration_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns MigrationConfigProperties or ClientRawResponse<MigrationConfigProperties> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.servicebus.models.MigrationConfigProperties] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.servicebus.models.MigrationConfigProperties]] :raises: :class:`ErrorResponseException<azure.mgmt.servicebus.models.ErrorResponseException>` """ raw_result = self._create_and_start_migration_initial( resource_group_name=resource_group_name, namespace_name=namespace_name, target_namespace=target_namespace, post_migration_name=post_migration_name, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('MigrationConfigProperties', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
[ "def", "create_and_start_migration", "(", "self", ",", "resource_group_name", ",", "namespace_name", ",", "target_namespace", ",", "post_migration_name", ",", "custom_headers", "=", "None", ",", "raw", "=", "False", ",", "polling", "=", "True", ",", "*", "*", "o...
49.105263
24.140351
def _aix_loadavg(): ''' Return the load average on AIX ''' # 03:42PM up 9 days, 20:41, 2 users, load average: 0.28, 0.47, 0.69 uptime = __salt__['cmd.run']('uptime') ldavg = uptime.split('load average') load_avg = ldavg[1].split() return {'1-min': load_avg[1].strip(','), '5-min': load_avg[2].strip(','), '15-min': load_avg[3]}
[ "def", "_aix_loadavg", "(", ")", ":", "# 03:42PM up 9 days, 20:41, 2 users, load average: 0.28, 0.47, 0.69", "uptime", "=", "__salt__", "[", "'cmd.run'", "]", "(", "'uptime'", ")", "ldavg", "=", "uptime", ".", "split", "(", "'load average'", ")", "load_avg", "="...
34.454545
14.090909
def _handle_list_marker(self): """Handle a list marker at the head (``#``, ``*``, ``;``, ``:``).""" markup = self._read() if markup == ";": self._context |= contexts.DL_TERM self._emit(tokens.TagOpenOpen(wiki_markup=markup)) self._emit_text(get_html_tag(markup)) self._emit(tokens.TagCloseSelfclose())
[ "def", "_handle_list_marker", "(", "self", ")", ":", "markup", "=", "self", ".", "_read", "(", ")", "if", "markup", "==", "\";\"", ":", "self", ".", "_context", "|=", "contexts", ".", "DL_TERM", "self", ".", "_emit", "(", "tokens", ".", "TagOpenOpen", ...
44.25
8.75
def nextCmd(snmpEngine, authData, transportTarget, contextData, *varBinds, **options): """Creates a generator to perform one or more SNMP GETNEXT queries. On each iteration, new SNMP GETNEXT request is send (:RFC:`1905#section-4.2.2`). The iterator blocks waiting for response to arrive or error to occur. Parameters ---------- snmpEngine : :py:class:`~pysnmp.hlapi.SnmpEngine` Class instance representing SNMP engine. authData : :py:class:`~pysnmp.hlapi.CommunityData` or :py:class:`~pysnmp.hlapi.UsmUserData` Class instance representing SNMP credentials. transportTarget : :py:class:`~pysnmp.hlapi.asyncore.UdpTransportTarget` or :py:class:`~pysnmp.hlapi.asyncore.Udp6TransportTarget` Class instance representing transport type along with SNMP peer address. contextData : :py:class:`~pysnmp.hlapi.ContextData` Class instance representing SNMP ContextEngineId and ContextName values. \*varBinds : :py:class:`~pysnmp.smi.rfc1902.ObjectType` One or more class instances representing MIB variables to place into SNMP request. Other Parameters ---------------- \*\*options : Request options: * `lookupMib` - load MIB and resolve response MIB variables at the cost of slightly reduced performance. Default is `True`. Default is `True`. * `lexicographicMode` - walk SNMP agent's MIB till the end (if `True`), otherwise (if `False`) stop iteration when all response MIB variables leave the scope of initial MIB variables in `varBinds`. Default is `True`. * `ignoreNonIncreasingOid` - continue iteration even if response MIB variables (OIDs) are not greater then request MIB variables. Be aware that setting it to `True` may cause infinite loop between SNMP management and agent applications. Default is `False`. * `maxRows` - stop iteration once this generator instance processed `maxRows` of SNMP conceptual table. Default is `0` (no limit). * `maxCalls` - stop iteration once this generator instance processed `maxCalls` responses. Default is 0 (no limit). Yields ------ errorIndication : str True value indicates SNMP engine error. errorStatus : str True value indicates SNMP PDU error. errorIndex : int Non-zero value refers to `varBinds[errorIndex-1]` varBinds : tuple A sequence of :py:class:`~pysnmp.smi.rfc1902.ObjectType` class instances representing MIB variables returned in SNMP response. Raises ------ PySnmpError Or its derivative indicating that an error occurred while performing SNMP operation. Notes ----- The `nextCmd` generator will be exhausted on any of the following conditions: * SNMP engine error occurs thus `errorIndication` is `True` * SNMP PDU `errorStatus` is reported as `True` * SNMP :py:class:`~pysnmp.proto.rfc1905.EndOfMibView` values (also known as *SNMP exception values*) are reported for all MIB variables in `varBinds` * *lexicographicMode* option is `True` and SNMP agent reports end-of-mib or *lexicographicMode* is `False` and all response MIB variables leave the scope of `varBinds` At any moment a new sequence of `varBinds` could be send back into running generator (supported since Python 2.6). Examples -------- >>> from pysnmp.hlapi import * >>> g = nextCmd(SnmpEngine(), ... CommunityData('public'), ... UdpTransportTarget(('demo.snmplabs.com', 161)), ... ContextData(), ... ObjectType(ObjectIdentity('SNMPv2-MIB', 'sysDescr'))) >>> next(g) (None, 0, 0, [ObjectType(ObjectIdentity(ObjectName('1.3.6.1.2.1.1.1.0')), DisplayString('SunOS zeus.snmplabs.com 4.1.3_U1 1 sun4m'))]) >>> g.send( [ ObjectType(ObjectIdentity('IF-MIB', 'ifInOctets')) ] ) (None, 0, 0, [(ObjectName('1.3.6.1.2.1.2.2.1.10.1'), Counter32(284817787))]) """ # noinspection PyShadowingNames def cbFun(snmpEngine, sendRequestHandle, errorIndication, errorStatus, errorIndex, varBindTable, cbCtx): cbCtx['errorIndication'] = errorIndication cbCtx['errorStatus'] = errorStatus cbCtx['errorIndex'] = errorIndex cbCtx['varBindTable'] = varBindTable lexicographicMode = options.get('lexicographicMode', True) ignoreNonIncreasingOid = options.get('ignoreNonIncreasingOid', False) maxRows = options.get('maxRows', 0) maxCalls = options.get('maxCalls', 0) cbCtx = {} vbProcessor = CommandGeneratorVarBinds() initialVars = [x[0] for x in vbProcessor.makeVarBinds(snmpEngine.cache, varBinds)] totalRows = totalCalls = 0 while True: previousVarBinds = varBinds if varBinds: cmdgen.nextCmd(snmpEngine, authData, transportTarget, contextData, *[(x[0], Null('')) for x in varBinds], cbFun=cbFun, cbCtx=cbCtx, lookupMib=options.get('lookupMib', True)) snmpEngine.transportDispatcher.runDispatcher() errorIndication = cbCtx['errorIndication'] errorStatus = cbCtx['errorStatus'] errorIndex = cbCtx['errorIndex'] if ignoreNonIncreasingOid and errorIndication and isinstance(errorIndication, errind.OidNotIncreasing): errorIndication = None if errorIndication: yield (errorIndication, errorStatus, errorIndex, varBinds) return elif errorStatus: if errorStatus == 2: # Hide SNMPv1 noSuchName error which leaks in here # from SNMPv1 Agent through internal pysnmp proxy. errorStatus = errorStatus.clone(0) errorIndex = errorIndex.clone(0) yield (errorIndication, errorStatus, errorIndex, varBinds) return else: stopFlag = True varBinds = cbCtx['varBindTable'] and cbCtx['varBindTable'][0] for col, varBind in enumerate(varBinds): name, val = varBind if isinstance(val, Null): varBinds[col] = previousVarBinds[col][0], endOfMibView if not lexicographicMode and not initialVars[col].isPrefixOf(name): varBinds[col] = previousVarBinds[col][0], endOfMibView if stopFlag and varBinds[col][1] is not endOfMibView: stopFlag = False if stopFlag: return totalRows += 1 totalCalls += 1 else: errorIndication = errorStatus = errorIndex = None varBinds = [] initialVarBinds = (yield errorIndication, errorStatus, errorIndex, varBinds) if initialVarBinds: varBinds = initialVarBinds initialVars = [x[0] for x in vbProcessor.makeVarBinds(snmpEngine.cache, varBinds)] if maxRows and totalRows >= maxRows: return if maxCalls and totalCalls >= maxCalls: return
[ "def", "nextCmd", "(", "snmpEngine", ",", "authData", ",", "transportTarget", ",", "contextData", ",", "*", "varBinds", ",", "*", "*", "options", ")", ":", "# noinspection PyShadowingNames", "def", "cbFun", "(", "snmpEngine", ",", "sendRequestHandle", ",", "erro...
39.081081
25.513514
def check_arguments(self): """Sanity check the arguments passed in. Uses the boolean functions specified in the subclasses in the _valid_arguments dictionary to determine if an argument is valid or invalid. """ for k, v in self.Parameters.iteritems(): if self.Parameters[k].isOn(): if k in self._valid_arguments: if not self._valid_arguments[k](v.Value): error_message = 'Invalid argument (%s) ' % v.Value error_message += 'for parameter %s\n' % k raise InvalidArgumentApplicationError(error_message)
[ "def", "check_arguments", "(", "self", ")", ":", "for", "k", ",", "v", "in", "self", ".", "Parameters", ".", "iteritems", "(", ")", ":", "if", "self", ".", "Parameters", "[", "k", "]", ".", "isOn", "(", ")", ":", "if", "k", "in", "self", ".", "...
46.857143
19.071429
def retrieve_connection(self, session=None): """ Retrieves the dynamically created connection from the Connection table. :param session: Session of the SQL Alchemy ORM (automatically generated with decorator). """ self.log.info("Retrieving connection %s", self.db_conn_id) connections = session.query(Connection).filter( Connection.conn_id == self.db_conn_id) if connections.count(): return connections[0] return None
[ "def", "retrieve_connection", "(", "self", ",", "session", "=", "None", ")", ":", "self", ".", "log", ".", "info", "(", "\"Retrieving connection %s\"", ",", "self", ".", "db_conn_id", ")", "connections", "=", "session", ".", "query", "(", "Connection", ")", ...
39.846154
16.923077
def get_default_config(self): """ Returns default configuration options. """ config = super(SmartCollector, self).get_default_config() config.update({ 'path': 'smart', 'bin': 'smartctl', 'use_sudo': False, 'sudo_cmd': '/usr/bin/sudo', 'devices': '^disk[0-9]$|^sd[a-z]$|^hd[a-z]$', }) return config
[ "def", "get_default_config", "(", "self", ")", ":", "config", "=", "super", "(", "SmartCollector", ",", "self", ")", ".", "get_default_config", "(", ")", "config", ".", "update", "(", "{", "'path'", ":", "'smart'", ",", "'bin'", ":", "'smartctl'", ",", "...
32.076923
12.076923
def on_success(self, retval, task_id, args, kwargs): """on_success http://docs.celeryproject.org/en/latest/reference/celery.app.task.html :param retval: return value :param task_id: celery task id :param args: arguments passed into task :param kwargs: keyword arguments passed into task """ log.info(("{} SUCCESS - retval={} task_id={} " "args={} kwargs={}") .format( self.log_label, retval, task_id, args, kwargs))
[ "def", "on_success", "(", "self", ",", "retval", ",", "task_id", ",", "args", ",", "kwargs", ")", ":", "log", ".", "info", "(", "(", "\"{} SUCCESS - retval={} task_id={} \"", "\"args={} kwargs={}\"", ")", ".", "format", "(", "self", ".", "log_label", ",", "r...
31.789474
14.947368
def new(self, lits=[], ubound=1, top_id=None): """ The actual constructor of :class:`ITotalizer`. Invoked from ``self.__init__()``. Creates an object of :class:`ITotalizer` given a list of literals in the sum, the largest potential bound to consider, as well as the top variable identifier used so far. See the description of :class:`ITotalizer` for details. """ self.lits = list(lits) self.ubound = ubound self.top_id = max(map(lambda x: abs(x), self.lits + [top_id if top_id != None else 0])) # saving default SIGINT handler def_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_DFL) # creating the object self.tobj, clauses, self.rhs, self.top_id = pycard.itot_new(self.lits, self.ubound, self.top_id) # recovering default SIGINT handler def_sigint_handler = signal.signal(signal.SIGINT, def_sigint_handler) # saving the result self.cnf.clauses = clauses self.cnf.nv = self.top_id # for convenience, keeping the number of clauses self.nof_new = len(clauses)
[ "def", "new", "(", "self", ",", "lits", "=", "[", "]", ",", "ubound", "=", "1", ",", "top_id", "=", "None", ")", ":", "self", ".", "lits", "=", "list", "(", "lits", ")", "self", ".", "ubound", "=", "ubound", "self", ".", "top_id", "=", "max", ...
39.62069
22.655172
def write_membership(filename,config,srcfile,section=None): """ Top level interface to write the membership from a config and source model. """ source = Source() source.load(srcfile,section=section) loglike = createLoglike(config,source) loglike.write_membership(filename)
[ "def", "write_membership", "(", "filename", ",", "config", ",", "srcfile", ",", "section", "=", "None", ")", ":", "source", "=", "Source", "(", ")", "source", ".", "load", "(", "srcfile", ",", "section", "=", "section", ")", "loglike", "=", "createLoglik...
36.625
10.125
def stonith_create(stonith_id, stonith_device_type, stonith_device_options=None, cibfile=None): ''' Create a stonith resource via pcs command stonith_id name for the stonith resource stonith_device_type name of the stonith agent fence_eps, fence_xvm f.e. stonith_device_options additional options for creating the stonith resource cibfile use cibfile instead of the live CIB for manipulation CLI Example: .. code-block:: bash salt '*' pcs.stonith_create stonith_id='eps_fence' stonith_device_type='fence_eps' stonith_device_options="['pcmk_host_map=node1.example.org:01;node2.example.org:02', 'ipaddr=myepsdevice.example.org', 'action=reboot', 'power_wait=5', 'verbose=1', 'debug=/var/log/pcsd/eps_fence.log', 'login=hidden', 'passwd=hoonetorg']" cibfile='/tmp/cib_for_stonith.cib' ''' return item_create(item='stonith', item_id=stonith_id, item_type=stonith_device_type, extra_args=stonith_device_options, cibfile=cibfile)
[ "def", "stonith_create", "(", "stonith_id", ",", "stonith_device_type", ",", "stonith_device_options", "=", "None", ",", "cibfile", "=", "None", ")", ":", "return", "item_create", "(", "item", "=", "'stonith'", ",", "item_id", "=", "stonith_id", ",", "item_type"...
44.44
33.08
def lock(self, request, *args, **kwargs): """ Locks the considered topic and retirects the user to the success URL. """ self.object = self.get_object() success_url = self.get_success_url() self.object.status = Topic.TOPIC_LOCKED self.object.save() messages.success(self.request, self.success_message) return HttpResponseRedirect(success_url)
[ "def", "lock", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "object", "=", "self", ".", "get_object", "(", ")", "success_url", "=", "self", ".", "get_success_url", "(", ")", "self", ".", "object", "...
48.75
6.875
def hidden_tags(self): """ Returns a list of tags to be hidden from the 'ls' output. """ hidden_tags = self.cp.get('ls', 'hide_tags') # pylint: disable=no-member return [] if hidden_tags == '' else [tag.strip() for tag in hidden_tags.split(',')]
[ "def", "hidden_tags", "(", "self", ")", ":", "hidden_tags", "=", "self", ".", "cp", ".", "get", "(", "'ls'", ",", "'hide_tags'", ")", "# pylint: disable=no-member", "return", "[", "]", "if", "hidden_tags", "==", "''", "else", "[", "tag", ".", "strip", "(...
52.833333
15
def get_anchor_labels(anchors, gt_boxes, crowd_boxes): """ Label each anchor as fg/bg/ignore. Args: anchors: Ax4 float gt_boxes: Bx4 float, non-crowd crowd_boxes: Cx4 float Returns: anchor_labels: (A,) int. Each element is {-1, 0, 1} anchor_boxes: Ax4. Contains the target gt_box for each anchor when the anchor is fg. """ # This function will modify labels and return the filtered inds def filter_box_label(labels, value, max_num): curr_inds = np.where(labels == value)[0] if len(curr_inds) > max_num: disable_inds = np.random.choice( curr_inds, size=(len(curr_inds) - max_num), replace=False) labels[disable_inds] = -1 # ignore them curr_inds = np.where(labels == value)[0] return curr_inds NA, NB = len(anchors), len(gt_boxes) assert NB > 0 # empty images should have been filtered already box_ious = np_iou(anchors, gt_boxes) # NA x NB ious_argmax_per_anchor = box_ious.argmax(axis=1) # NA, ious_max_per_anchor = box_ious.max(axis=1) ious_max_per_gt = np.amax(box_ious, axis=0, keepdims=True) # 1xNB # for each gt, find all those anchors (including ties) that has the max ious with it anchors_with_max_iou_per_gt = np.where(box_ious == ious_max_per_gt)[0] # Setting NA labels: 1--fg 0--bg -1--ignore anchor_labels = -np.ones((NA,), dtype='int32') # NA, # the order of setting neg/pos labels matter anchor_labels[anchors_with_max_iou_per_gt] = 1 anchor_labels[ious_max_per_anchor >= cfg.RPN.POSITIVE_ANCHOR_THRESH] = 1 anchor_labels[ious_max_per_anchor < cfg.RPN.NEGATIVE_ANCHOR_THRESH] = 0 # label all non-ignore candidate boxes which overlap crowd as ignore if crowd_boxes.size > 0: cand_inds = np.where(anchor_labels >= 0)[0] cand_anchors = anchors[cand_inds] ioas = np_ioa(crowd_boxes, cand_anchors) overlap_with_crowd = cand_inds[ioas.max(axis=0) > cfg.RPN.CROWD_OVERLAP_THRESH] anchor_labels[overlap_with_crowd] = -1 # Subsample fg labels: ignore some fg if fg is too many target_num_fg = int(cfg.RPN.BATCH_PER_IM * cfg.RPN.FG_RATIO) fg_inds = filter_box_label(anchor_labels, 1, target_num_fg) # Keep an image even if there is no foreground anchors # if len(fg_inds) == 0: # raise MalformedData("No valid foreground for RPN!") # Subsample bg labels. num_bg is not allowed to be too many old_num_bg = np.sum(anchor_labels == 0) if old_num_bg == 0: # No valid bg in this image, skip. raise MalformedData("No valid background for RPN!") target_num_bg = cfg.RPN.BATCH_PER_IM - len(fg_inds) filter_box_label(anchor_labels, 0, target_num_bg) # ignore return values # Set anchor boxes: the best gt_box for each fg anchor anchor_boxes = np.zeros((NA, 4), dtype='float32') fg_boxes = gt_boxes[ious_argmax_per_anchor[fg_inds], :] anchor_boxes[fg_inds, :] = fg_boxes # assert len(fg_inds) + np.sum(anchor_labels == 0) == cfg.RPN.BATCH_PER_IM return anchor_labels, anchor_boxes
[ "def", "get_anchor_labels", "(", "anchors", ",", "gt_boxes", ",", "crowd_boxes", ")", ":", "# This function will modify labels and return the filtered inds", "def", "filter_box_label", "(", "labels", ",", "value", ",", "max_num", ")", ":", "curr_inds", "=", "np", ".",...
44.594203
19.521739
def create(configs): """Creates AndroidDevice controller objects. Args: configs: A list of dicts, each representing a configuration for an Android device. Returns: A list of AndroidDevice objects. """ if not configs: raise Error(ANDROID_DEVICE_EMPTY_CONFIG_MSG) elif configs == ANDROID_DEVICE_PICK_ALL_TOKEN: ads = get_all_instances() elif not isinstance(configs, list): raise Error(ANDROID_DEVICE_NOT_LIST_CONFIG_MSG) elif isinstance(configs[0], dict): # Configs is a list of dicts. ads = get_instances_with_configs(configs) elif isinstance(configs[0], basestring): # Configs is a list of strings representing serials. ads = get_instances(configs) else: raise Error('No valid config found in: %s' % configs) valid_ad_identifiers = list_adb_devices() + list_adb_devices_by_usb_id() for ad in ads: if ad.serial not in valid_ad_identifiers: raise DeviceError(ad, 'Android device is specified in config but' ' is not attached.') _start_services_on_ads(ads) return ads
[ "def", "create", "(", "configs", ")", ":", "if", "not", "configs", ":", "raise", "Error", "(", "ANDROID_DEVICE_EMPTY_CONFIG_MSG", ")", "elif", "configs", "==", "ANDROID_DEVICE_PICK_ALL_TOKEN", ":", "ads", "=", "get_all_instances", "(", ")", "elif", "not", "isins...
35.4375
17.34375
def run_job(job_ini, log_level='info', log_file=None, exports='', username=getpass.getuser(), **kw): """ Run a job using the specified config file and other options. :param str job_ini: Path to calculation config (INI-style) files. :param str log_level: 'debug', 'info', 'warn', 'error', or 'critical' :param str log_file: Path to log file. :param exports: A comma-separated string of export types requested by the user. :param username: Name of the user running the job :param kw: Extra parameters like hazard_calculation_id and calculation_mode """ job_id = logs.init('job', getattr(logging, log_level.upper())) with logs.handle(job_id, log_level, log_file): job_ini = os.path.abspath(job_ini) oqparam = eng.job_from_file(job_ini, job_id, username, **kw) kw['username'] = username eng.run_calc(job_id, oqparam, exports, **kw) for line in logs.dbcmd('list_outputs', job_id, False): safeprint(line) return job_id
[ "def", "run_job", "(", "job_ini", ",", "log_level", "=", "'info'", ",", "log_file", "=", "None", ",", "exports", "=", "''", ",", "username", "=", "getpass", ".", "getuser", "(", ")", ",", "*", "*", "kw", ")", ":", "job_id", "=", "logs", ".", "init"...
38.777778
17
def lock(self): """Returns a JSON representation of the Pipfile.""" data = self.data data['_meta']['hash'] = {"sha256": self.hash} data['_meta']['pipfile-spec'] = 6 return json.dumps(data, indent=4, separators=(',', ': '))
[ "def", "lock", "(", "self", ")", ":", "data", "=", "self", ".", "data", "data", "[", "'_meta'", "]", "[", "'hash'", "]", "=", "{", "\"sha256\"", ":", "self", ".", "hash", "}", "data", "[", "'_meta'", "]", "[", "'pipfile-spec'", "]", "=", "6", "re...
42.833333
13.333333
def check_message(message, **kwargs): """Check the message format. Rules: - the first line must start by a component name - and a short description (52 chars), - then bullet points are expected - and finally signatures. :param components: compontents, e.g. ``('auth', 'utils', 'misc')`` :type components: `list` :param signatures: signatures, e.g. ``('Signed-off-by', 'Reviewed-by')`` :type signatures: `list` :param alt_signatures: alternative signatures, e.g. ``('Tested-by',)`` :type alt_signatures: `list` :param trusted: optional list of reviewers, e.g. ``('john.doe@foo.org',)`` :type trusted: `list` :param max_length: optional maximum line length (by default: 72) :type max_length: int :param max_first_line: optional maximum first line length (by default: 50) :type max_first_line: int :param allow_empty: optional way to allow empty message (by default: False) :type allow_empty: bool :return: errors sorted by line number :rtype: `list` """ if kwargs.pop("allow_empty", False): if not message or message.isspace(): return [] lines = re.split(r"\r\n|\r|\n", message) errors = _check_1st_line(lines[0], **kwargs) err, signature_lines = _check_bullets(lines, **kwargs) errors += err errors += _check_signatures(signature_lines, **kwargs) def _format(code, lineno, args): return "{0}: {1} {2}".format(lineno, code, _messages_codes[code].format(*args)) return list(map(lambda x: _format(x[0], x[1], x[2:]), sorted(errors, key=lambda x: x[0])))
[ "def", "check_message", "(", "message", ",", "*", "*", "kwargs", ")", ":", "if", "kwargs", ".", "pop", "(", "\"allow_empty\"", ",", "False", ")", ":", "if", "not", "message", "or", "message", ".", "isspace", "(", ")", ":", "return", "[", "]", "lines"...
37.681818
18.659091
def _format(self, format): """ Performs strftime(), always returning a unicode string :param format: A strftime() format string :return: A unicode string of the formatted datetime """ format = format.replace('%Y', '0000') # Year 0 is 1BC and a leap year. Leap years repeat themselves # every 28 years. Because of adjustments and the proleptic gregorian # calendar, the simplest way to format is to substitute year 2000. temp = datetime( 2000, self.month, self.day, self.hour, self.minute, self.second, self.microsecond, self.tzinfo ) if '%c' in format: c_out = temp.strftime('%c') # Handle full years c_out = c_out.replace('2000', '0000') c_out = c_out.replace('%', '%%') format = format.replace('%c', c_out) if '%x' in format: x_out = temp.strftime('%x') # Handle formats such as 08/16/2000 or 16.08.2000 x_out = x_out.replace('2000', '0000') x_out = x_out.replace('%', '%%') format = format.replace('%x', x_out) return temp.strftime(format)
[ "def", "_format", "(", "self", ",", "format", ")", ":", "format", "=", "format", ".", "replace", "(", "'%Y'", ",", "'0000'", ")", "# Year 0 is 1BC and a leap year. Leap years repeat themselves", "# every 28 years. Because of adjustments and the proleptic gregorian", "# calend...
33.157895
16
def _storage_get_key_names(bucket, pattern): """ Get names of all storage keys in a specified bucket that match a pattern. """ return [item.metadata.name for item in _storage_get_keys(bucket, pattern)]
[ "def", "_storage_get_key_names", "(", "bucket", ",", "pattern", ")", ":", "return", "[", "item", ".", "metadata", ".", "name", "for", "item", "in", "_storage_get_keys", "(", "bucket", ",", "pattern", ")", "]" ]
67.666667
13.333333
def killProcess(self, pid): """ Kills the process with the specified PID (if possible) """ SYNCHRONIZE = 0x00100000 PROCESS_TERMINATE = 0x0001 hProcess = self._kernel32.OpenProcess(SYNCHRONIZE|PROCESS_TERMINATE, True, pid) result = self._kernel32.TerminateProcess(hProcess, 0) self._kernel32.CloseHandle(hProcess)
[ "def", "killProcess", "(", "self", ",", "pid", ")", ":", "SYNCHRONIZE", "=", "0x00100000", "PROCESS_TERMINATE", "=", "0x0001", "hProcess", "=", "self", ".", "_kernel32", ".", "OpenProcess", "(", "SYNCHRONIZE", "|", "PROCESS_TERMINATE", ",", "True", ",", "pid",...
50.714286
14.142857
def allocate_map_coloring(self, tolerance, threshold_steps = 10): """! @brief Returns list of color indexes that are assigned to each object from input data space accordingly. @param[in] tolerance (double): Tolerance level that define maximal difference between outputs of oscillators in one synchronous ensemble. @param[in] threshold_steps (uint): Number of steps from the end of simulation that should be analysed for ensemble allocation. If amount of simulation steps has been less than threshold steps than amount of steps will be reduced to amount of simulation steps. @remark Results can be obtained only after network simulation (graph processing by the network). @return (list) Color indexes that are assigned to each object from input data space accordingly. @see allocate_clusters() """ clusters = self.allocate_clusters(tolerance, threshold_steps) coloring_map = [0] * len(self._dynamic[0]) for color_index in range(len(clusters)): for node_index in clusters[color_index]: coloring_map[node_index] = color_index return coloring_map
[ "def", "allocate_map_coloring", "(", "self", ",", "tolerance", ",", "threshold_steps", "=", "10", ")", ":", "clusters", "=", "self", ".", "allocate_clusters", "(", "tolerance", ",", "threshold_steps", ")", "coloring_map", "=", "[", "0", "]", "*", "len", "(",...
50.48
34.28
def photo(self, args): """ Retrieves metadata for a specific photo. flickr:(credsfile),photo,(photo_id) """ rsp = self._load_rsp(self.flickr.photos_getInfo(photo_id=args[0])) p = rsp['photo'] yield self._prep(p)
[ "def", "photo", "(", "self", ",", "args", ")", ":", "rsp", "=", "self", ".", "_load_rsp", "(", "self", ".", "flickr", ".", "photos_getInfo", "(", "photo_id", "=", "args", "[", "0", "]", ")", ")", "p", "=", "rsp", "[", "'photo'", "]", "yield", "se...
28.888889
14.666667
def get_callee_address( global_state: GlobalState, dynamic_loader: DynLoader, symbolic_to_address: Expression, ): """Gets the address of the callee. :param global_state: state to look in :param dynamic_loader: dynamic loader to use :param symbolic_to_address: The (symbolic) callee address :return: Address of the callee """ environment = global_state.environment try: callee_address = hex(util.get_concrete_int(symbolic_to_address)) except TypeError: log.debug("Symbolic call encountered") match = re.search(r"storage_(\d+)", str(simplify(symbolic_to_address))) log.debug("CALL to: " + str(simplify(symbolic_to_address))) if match is None or dynamic_loader is None: raise ValueError() index = int(match.group(1)) log.debug("Dynamic contract address at storage index {}".format(index)) # attempt to read the contract address from instance storage try: callee_address = dynamic_loader.read_storage( environment.active_account.address, index ) # TODO: verify whether this happens or not except: log.debug("Error accessing contract storage.") raise ValueError # testrpc simply returns the address, geth response is more elaborate. if not re.match(r"^0x[0-9a-f]{40}$", callee_address): callee_address = "0x" + callee_address[26:] return callee_address
[ "def", "get_callee_address", "(", "global_state", ":", "GlobalState", ",", "dynamic_loader", ":", "DynLoader", ",", "symbolic_to_address", ":", "Expression", ",", ")", ":", "environment", "=", "global_state", ".", "environment", "try", ":", "callee_address", "=", ...
33.906977
21.604651
def version(bin_env=None): ''' .. versionadded:: 0.17.0 Returns the version of pip. Use ``bin_env`` to specify the path to a virtualenv and get the version of pip in that virtualenv. If unable to detect the pip version, returns ``None``. CLI Example: .. code-block:: bash salt '*' pip.version ''' contextkey = 'pip.version' if bin_env is not None: contextkey = '{0}.{1}'.format(contextkey, bin_env) if contextkey in __context__: return __context__[contextkey] cmd = _get_pip_bin(bin_env)[:] cmd.append('--version') ret = __salt__['cmd.run_all'](cmd, python_shell=False) if ret['retcode']: raise CommandNotFoundError('Could not find a `pip` binary') try: pip_version = re.match(r'^pip (\S+)', ret['stdout']).group(1) except AttributeError: pip_version = None __context__[contextkey] = pip_version return pip_version
[ "def", "version", "(", "bin_env", "=", "None", ")", ":", "contextkey", "=", "'pip.version'", "if", "bin_env", "is", "not", "None", ":", "contextkey", "=", "'{0}.{1}'", ".", "format", "(", "contextkey", ",", "bin_env", ")", "if", "contextkey", "in", "__cont...
25.361111
23.75
def _add_partition(self, connection, partition): """ Creates FDW for the partition. Args: connection: partition (orm.Partition): """ logger.debug('Creating foreign table for partition.\n partition: {}'.format(partition.name)) with connection.cursor() as cursor: postgres_med.add_partition(cursor, partition.datafile, partition.vid)
[ "def", "_add_partition", "(", "self", ",", "connection", ",", "partition", ")", ":", "logger", ".", "debug", "(", "'Creating foreign table for partition.\\n partition: {}'", ".", "format", "(", "partition", ".", "name", ")", ")", "with", "connection", ".", "cur...
36.545455
21.909091
def spot_from_dummy(self, dummy): """Make a real place and its spot from a dummy spot. Create a new :class:`board.Spot` instance, along with the underlying :class:`LiSE.Place` instance, and give it the name, position, and imagery of the provided dummy. """ (x, y) = self.to_local(*dummy.pos_up) x /= self.board.width y /= self.board.height self.board.spotlayout.add_widget( self.board.make_spot( self.board.character.new_place( dummy.name, _x=x, _y=y, _image_paths=list(dummy.paths) ) ) ) dummy.num += 1
[ "def", "spot_from_dummy", "(", "self", ",", "dummy", ")", ":", "(", "x", ",", "y", ")", "=", "self", ".", "to_local", "(", "*", "dummy", ".", "pos_up", ")", "x", "/=", "self", ".", "board", ".", "width", "y", "/=", "self", ".", "board", ".", "h...
32.181818
15.590909
def _valid_table_name(self, table_name): """Check if the table name is obviously invalid. """ if table_name is None or not len(table_name.strip()): raise ValueError("Invalid table name: %r" % table_name) return table_name.strip()
[ "def", "_valid_table_name", "(", "self", ",", "table_name", ")", ":", "if", "table_name", "is", "None", "or", "not", "len", "(", "table_name", ".", "strip", "(", ")", ")", ":", "raise", "ValueError", "(", "\"Invalid table name: %r\"", "%", "table_name", ")",...
44.666667
9.166667
def get_playlist_songs(self, playlist_id, limit=1000): """Get a playlists's all songs. :params playlist_id: playlist id. :params limit: length of result returned by weapi. :return: a list of Song object. """ url = 'http://music.163.com/weapi/v3/playlist/detail?csrf_token=' csrf = '' params = {'id': playlist_id, 'offset': 0, 'total': True, 'limit': limit, 'n': 1000, 'csrf_token': csrf} result = self.post_request(url, params) songs = result['playlist']['tracks'] songs = [Song(song['id'], song['name']) for song in songs] return songs
[ "def", "get_playlist_songs", "(", "self", ",", "playlist_id", ",", "limit", "=", "1000", ")", ":", "url", "=", "'http://music.163.com/weapi/v3/playlist/detail?csrf_token='", "csrf", "=", "''", "params", "=", "{", "'id'", ":", "playlist_id", ",", "'offset'", ":", ...
37.470588
18.529412
def collect_by_typename(obj_sequence, cache=None): """ collects objects from obj_sequence and stores them into buckets by type name. cache is an optional dict into which we collect the results. """ if cache is None: cache = {} for val in obj_sequence: key = type(val).__name__ bucket = cache.get(key, None) if bucket is not None: bucket.append(val) else: cache[key] = [val] return cache
[ "def", "collect_by_typename", "(", "obj_sequence", ",", "cache", "=", "None", ")", ":", "if", "cache", "is", "None", ":", "cache", "=", "{", "}", "for", "val", "in", "obj_sequence", ":", "key", "=", "type", "(", "val", ")", ".", "__name__", "bucket", ...
23.35
19.95
def SNR_hinken(imgs, bg=0, roi=None): ''' signal-to-noise ratio (SNR) as mean(images) / std(images) as defined in Hinken et.al. 2011 (DOI: 10.1063/1.3541766) works on unloaded images no memory overload if too many images are given ''' mean = None M = len(imgs) if bg is not 0: bg = imread(bg)[roi] if roi is not None: bg = bg[roi] #calc mean: for i in imgs: img = imread(i).asfarray() if roi is not None: img = img[roi] img -= bg if mean is None: #init mean = np.zeros_like(img) std = np.zeros_like(img) mean += img del img mean /= M #calc std of mean: for i in imgs: img = imread(i).asfarray() if roi is not None: img = img[roi] img -= bg std += (mean - img)**2 del img std = (std / M)**0.5 return mean.mean() / std.mean()
[ "def", "SNR_hinken", "(", "imgs", ",", "bg", "=", "0", ",", "roi", "=", "None", ")", ":", "mean", "=", "None", "M", "=", "len", "(", "imgs", ")", "if", "bg", "is", "not", "0", ":", "bg", "=", "imread", "(", "bg", ")", "[", "roi", "]", "if",...
26.162162
16.864865
def connect(servers=None, framed_transport=False, timeout=None, retry_time=60, recycle=None, round_robin=None, max_retries=3): """ Constructs a single ElasticSearch connection. Connects to a randomly chosen server on the list. If the connection fails, it will attempt to connect to each server on the list in turn until one succeeds. If it is unable to find an active server, it will throw a NoServerAvailable exception. Failing servers are kept on a separate list and eventually retried, no sooner than `retry_time` seconds after failure. :keyword servers: [server] List of ES servers with format: "hostname:port" Default: [("127.0.0.1",9500)] :keyword framed_transport: If True, use a TFramedTransport instead of a TBufferedTransport :keyword timeout: Timeout in seconds (e.g. 0.5) Default: None (it will stall forever) :keyword retry_time: Minimum time in seconds until a failed server is reinstated. (e.g. 0.5) Default: 60 :keyword recycle: Max time in seconds before an open connection is closed and returned to the pool. Default: None (Never recycle) :keyword max_retries: Max retry time on connection down :keyword round_robin: *DEPRECATED* :return ES client """ if servers is None: servers = [DEFAULT_SERVER] return ThreadLocalConnection(servers, framed_transport, timeout, retry_time, recycle, max_retries=max_retries)
[ "def", "connect", "(", "servers", "=", "None", ",", "framed_transport", "=", "False", ",", "timeout", "=", "None", ",", "retry_time", "=", "60", ",", "recycle", "=", "None", ",", "round_robin", "=", "None", ",", "max_retries", "=", "3", ")", ":", "if",...
38.948718
27.051282
def _shallow_copy_with_infer(self, values, **kwargs): """ Create a new Index inferring the class with passed value, don't copy the data, use the same object attributes with passed in attributes taking precedence. *this is an internal non-public method* Parameters ---------- values : the values to create the new Index, optional kwargs : updates the default attributes for this Index """ attributes = self._get_attributes_dict() attributes.update(kwargs) attributes['copy'] = False if not len(values) and 'dtype' not in kwargs: attributes['dtype'] = self.dtype if self._infer_as_myclass: try: return self._constructor(values, **attributes) except (TypeError, ValueError): pass return Index(values, **attributes)
[ "def", "_shallow_copy_with_infer", "(", "self", ",", "values", ",", "*", "*", "kwargs", ")", ":", "attributes", "=", "self", ".", "_get_attributes_dict", "(", ")", "attributes", ".", "update", "(", "kwargs", ")", "attributes", "[", "'copy'", "]", "=", "Fal...
36.916667
16.083333
def size(ctx, dataset, kwargs): "Show dataset size" kwargs = parse_kwargs(kwargs) (print)(data(dataset, **ctx.obj).get(**kwargs).complete_set.size)
[ "def", "size", "(", "ctx", ",", "dataset", ",", "kwargs", ")", ":", "kwargs", "=", "parse_kwargs", "(", "kwargs", ")", "(", "print", ")", "(", "data", "(", "dataset", ",", "*", "*", "ctx", ".", "obj", ")", ".", "get", "(", "*", "*", "kwargs", "...
31.2
20.4
def _complete(self): """ Performs completion at the current cursor location. """ context = self._get_context() if context: # Send the completion request to the kernel msg_id = self.kernel_manager.shell_channel.complete( '.'.join(context), # text self._get_input_buffer_cursor_line(), # line self._get_input_buffer_cursor_column(), # cursor_pos self.input_buffer) # block pos = self._get_cursor().position() info = self._CompletionRequest(msg_id, pos) self._request_info['complete'] = info
[ "def", "_complete", "(", "self", ")", ":", "context", "=", "self", ".", "_get_context", "(", ")", "if", "context", ":", "# Send the completion request to the kernel", "msg_id", "=", "self", ".", "kernel_manager", ".", "shell_channel", ".", "complete", "(", "'.'"...
48.5
15.214286
def run(data): """Proxy function to run the tool""" sample = data[0][0] work_dir = dd.get_work_dir(sample) out_dir = os.path.join(work_dir, "mirge") lib = _find_lib(sample) mirge = _find_mirge(sample) bowtie = _find_bowtie(sample) sps = dd.get_species(sample) species = SPS.get(sps, "") if not species: raise ValueError("species not supported (hsa, mmu, rno, dre, cel, dme): %s" % sps) if not lib: raise ValueError("-lib option is not set up in resources for mirge tool." " Read above warnings lines.") if not utils.file_exists(out_dir): with tx_tmpdir() as tmp_dir: sample_file = _create_sample_file(data, tmp_dir) do.run(_cmd().format(**locals()), "Running miRge2.0.") shutil.move(tmp_dir, out_dir) return [os.path.abspath(fn) for fn in glob.glob(os.path.join(out_dir, "*", "*"))]
[ "def", "run", "(", "data", ")", ":", "sample", "=", "data", "[", "0", "]", "[", "0", "]", "work_dir", "=", "dd", ".", "get_work_dir", "(", "sample", ")", "out_dir", "=", "os", ".", "path", ".", "join", "(", "work_dir", ",", "\"mirge\"", ")", "lib...
40.863636
17.590909
def init_bn_weight(layer): '''initilize batch norm layer weight. ''' n_filters = layer.num_features new_weights = [ add_noise(np.ones(n_filters, dtype=np.float32), np.array([0, 1])), add_noise(np.zeros(n_filters, dtype=np.float32), np.array([0, 1])), add_noise(np.zeros(n_filters, dtype=np.float32), np.array([0, 1])), add_noise(np.ones(n_filters, dtype=np.float32), np.array([0, 1])), ] layer.set_weights(new_weights)
[ "def", "init_bn_weight", "(", "layer", ")", ":", "n_filters", "=", "layer", ".", "num_features", "new_weights", "=", "[", "add_noise", "(", "np", ".", "ones", "(", "n_filters", ",", "dtype", "=", "np", ".", "float32", ")", ",", "np", ".", "array", "(",...
42.181818
23.090909
def service_info(self, short_name): """Get static information about a service. Args: short_name (string): The short name of the service to query Returns: dict: A dictionary with the long_name and preregistered info on this service. """ if short_name not in self.services: raise ArgumentError("Unknown service name", short_name=short_name) info = {} info['short_name'] = short_name info['long_name'] = self.services[short_name]['state'].long_name info['preregistered'] = self.services[short_name]['state'].preregistered return info
[ "def", "service_info", "(", "self", ",", "short_name", ")", ":", "if", "short_name", "not", "in", "self", ".", "services", ":", "raise", "ArgumentError", "(", "\"Unknown service name\"", ",", "short_name", "=", "short_name", ")", "info", "=", "{", "}", "info...
32.4
24.25
def _enqueue_capture(build, release, run, url, config_data, baseline=False): """Enqueues a task to run a capture process.""" # Validate the JSON config parses. try: config_dict = json.loads(config_data) except Exception, e: abort(utils.jsonify_error(e)) # Rewrite the config JSON to include the URL specified in this request. # Blindly overwrite anything that was there. config_dict['targetUrl'] = url config_data = json.dumps(config_dict) config_artifact = _save_artifact(build, config_data, 'application/json') db.session.add(config_artifact) db.session.flush() suffix = '' if baseline: suffix = ':baseline' task_id = '%s:%s%s' % (run.id, hashlib.sha1(url).hexdigest(), suffix) logging.info('Enqueueing capture task=%r, baseline=%r', task_id, baseline) work_queue.add( constants.CAPTURE_QUEUE_NAME, payload=dict( build_id=build.id, release_name=release.name, release_number=release.number, run_name=run.name, url=url, config_sha1sum=config_artifact.id, baseline=baseline, ), build_id=build.id, release_id=release.id, run_id=run.id, source='request_run', task_id=task_id) # Set the URL and config early to indicate to report_run that there is # still data pending even if 'image' and 'ref_image' are unset. if baseline: run.ref_url = url run.ref_config = config_artifact.id else: run.url = url run.config = config_artifact.id
[ "def", "_enqueue_capture", "(", "build", ",", "release", ",", "run", ",", "url", ",", "config_data", ",", "baseline", "=", "False", ")", ":", "# Validate the JSON config parses.", "try", ":", "config_dict", "=", "json", ".", "loads", "(", "config_data", ")", ...
32
19
def MultimodeCombine(pupils): """ Return the instantaneous coherent fluxes and photometric fluxes for a multiway multimode combiner (no spatial filtering) """ fluxes=[np.vdot(pupils[i],pupils[i]).real for i in range(len(pupils))] coherentFluxes=[np.vdot(pupils[i],pupils[j]) for i in range(1,len(pupils)) for j in range(i)] return fluxes,coherentFluxes
[ "def", "MultimodeCombine", "(", "pupils", ")", ":", "fluxes", "=", "[", "np", ".", "vdot", "(", "pupils", "[", "i", "]", ",", "pupils", "[", "i", "]", ")", ".", "real", "for", "i", "in", "range", "(", "len", "(", "pupils", ")", ")", "]", "coher...
41.1
11.9
def delete_token(): ''' Delete current token, file & CouchDB admin user ''' username = get_admin()[0] admins = get_couchdb_admins() # Delete current admin if exist if username in admins: print 'I delete {} CouchDB user'.format(username) delete_couchdb_admin(username) # Delete token file if exist if os.path.isfile(LOGIN_FILENAME): print 'I delete {} token file'.format(LOGIN_FILENAME) os.remove(LOGIN_FILENAME)
[ "def", "delete_token", "(", ")", ":", "username", "=", "get_admin", "(", ")", "[", "0", "]", "admins", "=", "get_couchdb_admins", "(", ")", "# Delete current admin if exist", "if", "username", "in", "admins", ":", "print", "'I delete {} CouchDB user'", ".", "for...
29.375
17.25
def generate(env): """Add Builders and construction variables for cyglink to an Environment.""" gnulink.generate(env) env['LINKFLAGS'] = SCons.Util.CLVar('-Wl,-no-undefined') env['SHLINKCOM'] = shlib_action env['LDMODULECOM'] = ldmod_action env.Append(SHLIBEMITTER = [shlib_emitter]) env.Append(LDMODULEEMITTER = [ldmod_emitter]) env['SHLIBPREFIX'] = 'cyg' env['SHLIBSUFFIX'] = '.dll' env['IMPLIBPREFIX'] = 'lib' env['IMPLIBSUFFIX'] = '.dll.a' # Variables used by versioned shared libraries env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS' env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS' # SHLIBVERSIONFLAGS and LDMODULEVERSIONFLAGS are same as in gnulink... # LINKCALLBACKS are NOT inherited from gnulink env['LINKCALLBACKS'] = { 'VersionedShLibSuffix' : _versioned_lib_suffix, 'VersionedLdModSuffix' : _versioned_lib_suffix, 'VersionedImpLibSuffix' : _versioned_lib_suffix, 'VersionedShLibName' : link._versioned_shlib_name, 'VersionedLdModName' : link._versioned_ldmod_name, 'VersionedShLibImpLibName' : lambda *args: _versioned_implib_name(*args, libtype='ShLib'), 'VersionedLdModImpLibName' : lambda *args: _versioned_implib_name(*args, libtype='LdMod'), 'VersionedShLibImpLibSymlinks' : lambda *args: _versioned_implib_symlinks(*args, libtype='ShLib'), 'VersionedLdModImpLibSymlinks' : lambda *args: _versioned_implib_symlinks(*args, libtype='LdMod'), } # these variables were set by gnulink but are not used in cyglink try: del env['_SHLIBSONAME'] except KeyError: pass try: del env['_LDMODULESONAME'] except KeyError: pass
[ "def", "generate", "(", "env", ")", ":", "gnulink", ".", "generate", "(", "env", ")", "env", "[", "'LINKFLAGS'", "]", "=", "SCons", ".", "Util", ".", "CLVar", "(", "'-Wl,-no-undefined'", ")", "env", "[", "'SHLINKCOM'", "]", "=", "shlib_action", "env", ...
43.121951
24.585366
def indication(self, pdu): """Client requests are queued for delivery.""" if _debug: UDPDirector._debug("indication %r", pdu) # get the destination addr = pdu.pduDestination # get the peer peer = self.peers.get(addr, None) if not peer: peer = self.actorClass(self, addr) # send the message peer.indication(pdu)
[ "def", "indication", "(", "self", ",", "pdu", ")", ":", "if", "_debug", ":", "UDPDirector", ".", "_debug", "(", "\"indication %r\"", ",", "pdu", ")", "# get the destination", "addr", "=", "pdu", ".", "pduDestination", "# get the peer", "peer", "=", "self", "...
27.428571
17.285714
def elixir_decode(elixir_filename): """ Takes an elixir style file name and decodes it's content. Values returned as a dictionary. Elixir filenames have the format RUNID.TYPE.FILTER/EXPTIME.CHIPID.VERSION.fits """ import re, pyfits parts_RE=re.compile(r'([^\.\s]+)') dataset_name = parts_RE.findall(elixir_filename) ### check that this was a valid elixir_filename if not dataset_name or len(dataset_name)<5 : raise ValueError('String %s does not parse as elixir filename' % elixir_filename ) comments={'exptime': 'Integration time (seconds)', 'filter': 'Name of filter in position ', 'crunid': 'CFHT Q RunID', 'obstype': 'Observation or Exposure type', 'imageid': 'CCD chip number', 'filename': 'file name at creation of this MEF file' } keywords={} keywords['filename']=elixir_filename keywords['runid']=dataset_name[0] keywords['obstype']=dataset_name[1] keywords['exptime']=None keywords['filter']=None ### if the third part of the name is all numbers we assume exposure time if re.match(r'\d+',dataset_name[2]): keyword['exptime']=int(dataset_name[2]) else: keyword['filter']=dataset_name[2] keywords['imageid']=dataset_name[3] keywords['version']=dataset_name[4] header=pyfits.Header() for keyword in keywords.keys(): if keywords[keyword]: header.update(keyword,keywords[keyword],comment=comment[keyword]) return header
[ "def", "elixir_decode", "(", "elixir_filename", ")", ":", "import", "re", ",", "pyfits", "parts_RE", "=", "re", ".", "compile", "(", "r'([^\\.\\s]+)'", ")", "dataset_name", "=", "parts_RE", ".", "findall", "(", "elixir_filename", ")", "### check that this was a va...
33.319149
17.191489
def lines_from_tree(tree, nodes_and_set:bool=False) -> iter: """Yield lines of bubble describing given BubbleTree""" NODE = 'NODE\t{}' INCL = 'IN\t{}\t{}' EDGE = 'EDGE\t{}\t{}\t1.0' SET = 'SET\t{}' if nodes_and_set: for node in tree.nodes(): yield NODE.format(node) for node in tree.powernodes(): yield SET.format(node) for node, includeds in tree.inclusions.items(): for included in includeds: yield INCL.format(included, node) for node, succs in tree.edges.items(): for succ in succs: yield EDGE.format(node, succ)
[ "def", "lines_from_tree", "(", "tree", ",", "nodes_and_set", ":", "bool", "=", "False", ")", "->", "iter", ":", "NODE", "=", "'NODE\\t{}'", "INCL", "=", "'IN\\t{}\\t{}'", "EDGE", "=", "'EDGE\\t{}\\t{}\\t1.0'", "SET", "=", "'SET\\t{}'", "if", "nodes_and_set", "...
29.190476
15.428571
def get_pages(url): """ Return the 'pages' from the starting url Technically, look for the 'next 50' link, yield and download it, repeat """ while True: yield url doc = html.parse(url).find("body") links = [a for a in doc.findall(".//a") if a.text and a.text.startswith("next ")] if not links: break url = urljoin(url, links[0].get('href'))
[ "def", "get_pages", "(", "url", ")", ":", "while", "True", ":", "yield", "url", "doc", "=", "html", ".", "parse", "(", "url", ")", ".", "find", "(", "\"body\"", ")", "links", "=", "[", "a", "for", "a", "in", "doc", ".", "findall", "(", "\".//a\""...
33.5
17.5
def get_safe_redirect_target(arg='next'): """Get URL to redirect to and ensure that it is local.""" for target in request.args.get(arg), request.referrer: if not target: continue if is_local_url(target): return target return None
[ "def", "get_safe_redirect_target", "(", "arg", "=", "'next'", ")", ":", "for", "target", "in", "request", ".", "args", ".", "get", "(", "arg", ")", ",", "request", ".", "referrer", ":", "if", "not", "target", ":", "continue", "if", "is_local_url", "(", ...
34.25
13.125
def site_symbols(self): """ Sequence of symbols associated with the Xdatcar. Similar to 6th line in vasp 5+ Xdatcar. """ syms = [site.specie.symbol for site in self.structures[0]] return [a[0] for a in itertools.groupby(syms)]
[ "def", "site_symbols", "(", "self", ")", ":", "syms", "=", "[", "site", ".", "specie", ".", "symbol", "for", "site", "in", "self", ".", "structures", "[", "0", "]", "]", "return", "[", "a", "[", "0", "]", "for", "a", "in", "itertools", ".", "grou...
38.285714
16
def set_attribute(self, name, value): """ Default handler for those not explicitly defined """ if value is True: self.widget.set(name, name) elif value is False: del self.widget.attrib[name] else: self.widget.set(name, str(value))
[ "def", "set_attribute", "(", "self", ",", "name", ",", "value", ")", ":", "if", "value", "is", "True", ":", "self", ".", "widget", ".", "set", "(", "name", ",", "name", ")", "elif", "value", "is", "False", ":", "del", "self", ".", "widget", ".", ...
36.375
7.875
def get_default_configs(self): """ returns default configs list, from /etc, home dir and package_data""" # initialize basic defaults configs = [resource_filename(__name__, 'config/00-base.ini')] try: conf_files = sorted(os.listdir(self.baseconfigs_location)) for filename in conf_files: if fnmatch.fnmatch(filename, '*.ini'): configs += [ os.path.realpath( self.baseconfigs_location + os.sep + filename) ] except OSError: self.log.warn( self.baseconfigs_location + ' is not accessible to get configs list') configs += [os.path.expanduser('~/.yandex-tank')] return configs
[ "def", "get_default_configs", "(", "self", ")", ":", "# initialize basic defaults", "configs", "=", "[", "resource_filename", "(", "__name__", ",", "'config/00-base.ini'", ")", "]", "try", ":", "conf_files", "=", "sorted", "(", "os", ".", "listdir", "(", "self",...
42.833333
18.333333
def load(self, service_name, api_version=None, cached=True): """ Loads the desired JSON for a service. (uncached) This will fall back through all the ``data_dirs`` provided to the constructor, returning the **first** one it finds. :param service_name: The name of the desired service :type service_name: string :param api_version: (Optional) The desired API version to load :type service_name: string :param cached: (Optional) Whether or not the cache should be used when attempting to load the data. Default is ``True``. :type cached: boolean :returns: The loaded JSON as a dict """ # Fetch from the cache first if it's there. if cached: if service_name in self._loaded_data: if api_version in self._loaded_data[service_name]: return self._loaded_data[service_name][api_version] data = {} options = self.get_available_options(service_name) match, version = self.get_best_match( options, service_name, api_version=api_version ) with open(match, 'r') as json_file: data = json.load(json_file) # Embed where we found it from for debugging purposes. data['__file__'] = match data['api_version'] = version if cached: self._loaded_data.setdefault(service_name, {}) self._loaded_data[service_name][api_version] = data return data
[ "def", "load", "(", "self", ",", "service_name", ",", "api_version", "=", "None", ",", "cached", "=", "True", ")", ":", "# Fetch from the cache first if it's there.", "if", "cached", ":", "if", "service_name", "in", "self", ".", "_loaded_data", ":", "if", "api...
34.659091
20.840909
def state(self): """Compute and return the device state. :returns: Device state. """ # Check if device is disconnected. if not self.available: return STATE_UNKNOWN # Check if device is off. if not self.screen_on: return STATE_OFF # Check if screen saver is on. if not self.awake: return STATE_IDLE # Check if the launcher is active. if self.launcher or self.settings: return STATE_STANDBY # Check for a wake lock (device is playing). if self.wake_lock: return STATE_PLAYING # Otherwise, device is paused. return STATE_PAUSED
[ "def", "state", "(", "self", ")", ":", "# Check if device is disconnected.", "if", "not", "self", ".", "available", ":", "return", "STATE_UNKNOWN", "# Check if device is off.", "if", "not", "self", ".", "screen_on", ":", "return", "STATE_OFF", "# Check if screen saver...
31.090909
9.545455
def forward_committor(T, A, B): r"""Forward committor between given sets. The forward committor u(x) between sets A and B is the probability for the chain starting in x to reach B before reaching A. Parameters ---------- T : (M, M) scipy.sparse matrix Transition matrix A : array_like List of integer state labels for set A B : array_like List of integer state labels for set B Returns ------- u : (M, ) ndarray Vector of forward committor probabilities Notes ----- The forward committor is a solution to the following boundary-value problem .. math:: \sum_j L_{ij} u_{j}=0 for i in X\(A u B) (I) u_{i}=0 for i \in A (II) u_{i}=1 for i \in B (III) with generator matrix L=(P-I). """ X = set(range(T.shape[0])) A = set(A) B = set(B) AB = A.intersection(B) notAB = X.difference(A).difference(B) if len(AB) > 0: raise ValueError("Sets A and B have to be disjoint") L = T - eye(T.shape[0], T.shape[0]) """Assemble left hand-side W for linear system""" """Equation (I)""" W = 1.0 * L """Equation (II)""" W = W.todok() W[list(A), :] = 0.0 W.tocsr() W = W + coo_matrix((np.ones(len(A)), (list(A), list(A))), shape=W.shape).tocsr() """Equation (III)""" W = W.todok() W[list(B), :] = 0.0 W.tocsr() W = W + coo_matrix((np.ones(len(B)), (list(B), list(B))), shape=W.shape).tocsr() """Assemble right hand side r for linear system""" """Equation (I+II)""" r = np.zeros(T.shape[0]) """Equation (III)""" r[list(B)] = 1.0 u = spsolve(W, r) return u
[ "def", "forward_committor", "(", "T", ",", "A", ",", "B", ")", ":", "X", "=", "set", "(", "range", "(", "T", ".", "shape", "[", "0", "]", ")", ")", "A", "=", "set", "(", "A", ")", "B", "=", "set", "(", "B", ")", "AB", "=", "A", ".", "in...
24.985075
21.089552
def setRti(self, rti): """ Updates the current VisItem from the contents of the repo tree item. Is a slot but the signal is usually connected to the Collector, which then calls this function directly. """ check_class(rti, BaseRti) #assert rti.isSliceable, "RTI must be sliceable" # TODO: maybe later self._rti = rti self._updateWidgets() self._updateRtiInfo()
[ "def", "setRti", "(", "self", ",", "rti", ")", ":", "check_class", "(", "rti", ",", "BaseRti", ")", "#assert rti.isSliceable, \"RTI must be sliceable\" # TODO: maybe later", "self", ".", "_rti", "=", "rti", "self", ".", "_updateWidgets", "(", ")", "self", ".", "...
35.833333
19.75
def initialize( # type: ignore self, max_clients: int = 10, hostname_mapping: Dict[str, str] = None, max_buffer_size: int = 104857600, resolver: Resolver = None, defaults: Dict[str, Any] = None, max_header_size: int = None, max_body_size: int = None, ) -> None: """Creates a AsyncHTTPClient. Only a single AsyncHTTPClient instance exists per IOLoop in order to provide limitations on the number of pending connections. ``force_instance=True`` may be used to suppress this behavior. Note that because of this implicit reuse, unless ``force_instance`` is used, only the first call to the constructor actually uses its arguments. It is recommended to use the ``configure`` method instead of the constructor to ensure that arguments take effect. ``max_clients`` is the number of concurrent requests that can be in progress; when this limit is reached additional requests will be queued. Note that time spent waiting in this queue still counts against the ``request_timeout``. ``hostname_mapping`` is a dictionary mapping hostnames to IP addresses. It can be used to make local DNS changes when modifying system-wide settings like ``/etc/hosts`` is not possible or desirable (e.g. in unittests). ``max_buffer_size`` (default 100MB) is the number of bytes that can be read into memory at once. ``max_body_size`` (defaults to ``max_buffer_size``) is the largest response body that the client will accept. Without a ``streaming_callback``, the smaller of these two limits applies; with a ``streaming_callback`` only ``max_body_size`` does. .. versionchanged:: 4.2 Added the ``max_body_size`` argument. """ super(SimpleAsyncHTTPClient, self).initialize(defaults=defaults) self.max_clients = max_clients self.queue = ( collections.deque() ) # type: Deque[Tuple[object, HTTPRequest, Callable[[HTTPResponse], None]]] self.active = ( {} ) # type: Dict[object, Tuple[HTTPRequest, Callable[[HTTPResponse], None]]] self.waiting = ( {} ) # type: Dict[object, Tuple[HTTPRequest, Callable[[HTTPResponse], None], object]] self.max_buffer_size = max_buffer_size self.max_header_size = max_header_size self.max_body_size = max_body_size # TCPClient could create a Resolver for us, but we have to do it # ourselves to support hostname_mapping. if resolver: self.resolver = resolver self.own_resolver = False else: self.resolver = Resolver() self.own_resolver = True if hostname_mapping is not None: self.resolver = OverrideResolver( resolver=self.resolver, mapping=hostname_mapping ) self.tcp_client = TCPClient(resolver=self.resolver)
[ "def", "initialize", "(", "# type: ignore", "self", ",", "max_clients", ":", "int", "=", "10", ",", "hostname_mapping", ":", "Dict", "[", "str", ",", "str", "]", "=", "None", ",", "max_buffer_size", ":", "int", "=", "104857600", ",", "resolver", ":", "Re...
43.391304
20.84058
def reload_(name): ''' Reload the named service CLI Example: .. code-block:: bash salt '*' service.reload <service name> ''' cmd = '/usr/sbin/svcadm refresh {0}'.format(name) if not __salt__['cmd.retcode'](cmd, python_shell=False): # calling reload doesn't clear maintenance # or tell us that the service is in the 'online' state return start(name) return False
[ "def", "reload_", "(", "name", ")", ":", "cmd", "=", "'/usr/sbin/svcadm refresh {0}'", ".", "format", "(", "name", ")", "if", "not", "__salt__", "[", "'cmd.retcode'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", ":", "# calling reload doesn't clear...
25.8125
23.0625
def on_user_init(target, args, kwargs): """Provide hook on :class:`~invenio_accounts.models.User` initialization. Automatically convert a dict to a :class:`~.UserProfile` instance. This is needed during e.g. user registration where Flask-Security will initialize a user model with all the form data (which when Invenio-UserProfiles is enabled includes a ``profile`` key). This will make the user creation fail unless we convert the profile dict into a :class:`~.UserProfile` instance. """ profile = kwargs.pop('profile', None) if profile is not None and not isinstance(profile, UserProfile): profile = UserProfile(**profile) if kwargs.get('id'): profile.user_id = kwargs['id'] kwargs['profile'] = profile
[ "def", "on_user_init", "(", "target", ",", "args", ",", "kwargs", ")", ":", "profile", "=", "kwargs", ".", "pop", "(", "'profile'", ",", "None", ")", "if", "profile", "is", "not", "None", "and", "not", "isinstance", "(", "profile", ",", "UserProfile", ...
47.875
15.25
def hide_tool(self, context_name, tool_name): """Hide a tool so that it is not exposed in the suite. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to hide. """ data = self._context(context_name) hidden_tools = data["hidden_tools"] if tool_name not in hidden_tools: self._validate_tool(context_name, tool_name) hidden_tools.add(tool_name) self._flush_tools()
[ "def", "hide_tool", "(", "self", ",", "context_name", ",", "tool_name", ")", ":", "data", "=", "self", ".", "_context", "(", "context_name", ")", "hidden_tools", "=", "data", "[", "\"hidden_tools\"", "]", "if", "tool_name", "not", "in", "hidden_tools", ":", ...
37.923077
10.307692
def set_meta(mcs, bases, attr): """ Get all of the ``Meta`` classes from bases and combine them with this class. Pops or creates ``Meta`` from attributes, combines all bases, adds ``_meta`` to attributes with all meta :param bases: bases of this class :param attr: class attributes :return: attributes with ``Meta`` class from combined parents """ # pop the meta class from the attributes meta = attr.pop(mcs._meta_cls, types.ClassType(mcs._meta_cls, (), {})) # get a list of the meta public class attributes meta_attrs = get_public_attributes(meta) # check all bases for meta for base in bases: base_meta = getattr(base, mcs._meta_cls, None) # skip if base has no meta if base_meta is None: continue # loop over base meta for a in get_public_attributes(base_meta, as_list=False): # skip if already in meta if a in meta_attrs: continue # copy meta-option attribute from base setattr(meta, a, getattr(base_meta, a)) attr[mcs._meta_attr] = meta # set _meta combined from bases return attr
[ "def", "set_meta", "(", "mcs", ",", "bases", ",", "attr", ")", ":", "# pop the meta class from the attributes", "meta", "=", "attr", ".", "pop", "(", "mcs", ".", "_meta_cls", ",", "types", ".", "ClassType", "(", "mcs", ".", "_meta_cls", ",", "(", ")", ",...
40.483871
15.774194
def get_loginclass(name): ''' Get the login class of the user name User to get the information .. note:: This function only applies to OpenBSD systems. CLI Example: .. code-block:: bash salt '*' user.get_loginclass foo ''' if __grains__['kernel'] != 'OpenBSD': return False userinfo = __salt__['cmd.run_stdout']( ['userinfo', name], python_shell=False) for line in userinfo.splitlines(): if line.startswith('class'): try: ret = line.split(None, 1)[1] break except (ValueError, IndexError): continue else: ret = '' return ret
[ "def", "get_loginclass", "(", "name", ")", ":", "if", "__grains__", "[", "'kernel'", "]", "!=", "'OpenBSD'", ":", "return", "False", "userinfo", "=", "__salt__", "[", "'cmd.run_stdout'", "]", "(", "[", "'userinfo'", ",", "name", "]", ",", "python_shell", "...
22.032258
19.580645
def evaluate(data_eval, model, nsp_loss, mlm_loss, vocab_size, ctx, log_interval, dtype): """Evaluation function.""" mlm_metric = MaskedAccuracy() nsp_metric = MaskedAccuracy() mlm_metric.reset() nsp_metric.reset() eval_begin_time = time.time() begin_time = time.time() step_num = 0 running_mlm_loss = running_nsp_loss = 0 total_mlm_loss = total_nsp_loss = 0 running_num_tks = 0 for _, dataloader in enumerate(data_eval): for _, data_batch in enumerate(dataloader): step_num += 1 data_list = split_and_load(data_batch, ctx) loss_list = [] ns_label_list, ns_pred_list = [], [] mask_label_list, mask_pred_list, mask_weight_list = [], [], [] for data in data_list: out = forward(data, model, mlm_loss, nsp_loss, vocab_size, dtype) (ls, next_sentence_label, classified, masked_id, decoded, masked_weight, ls1, ls2, valid_length) = out loss_list.append(ls) ns_label_list.append(next_sentence_label) ns_pred_list.append(classified) mask_label_list.append(masked_id) mask_pred_list.append(decoded) mask_weight_list.append(masked_weight) running_mlm_loss += ls1.as_in_context(mx.cpu()) running_nsp_loss += ls2.as_in_context(mx.cpu()) running_num_tks += valid_length.sum().as_in_context(mx.cpu()) nsp_metric.update(ns_label_list, ns_pred_list) mlm_metric.update(mask_label_list, mask_pred_list, mask_weight_list) # logging if (step_num + 1) % (log_interval) == 0: total_mlm_loss += running_mlm_loss total_nsp_loss += running_nsp_loss log(begin_time, running_num_tks, running_mlm_loss, running_nsp_loss, step_num, mlm_metric, nsp_metric, None, log_interval) begin_time = time.time() running_mlm_loss = running_nsp_loss = running_num_tks = 0 mlm_metric.reset_local() nsp_metric.reset_local() mx.nd.waitall() eval_end_time = time.time() total_mlm_loss /= step_num total_nsp_loss /= step_num logging.info('mlm_loss={:.3f}\tmlm_acc={:.1f}\tnsp_loss={:.3f}\tnsp_acc={:.1f}\t' .format(total_mlm_loss.asscalar(), mlm_metric.get_global()[1] * 100, total_nsp_loss.asscalar(), nsp_metric.get_global()[1] * 100)) logging.info('Eval cost={:.1f}s'.format(eval_end_time - eval_begin_time))
[ "def", "evaluate", "(", "data_eval", ",", "model", ",", "nsp_loss", ",", "mlm_loss", ",", "vocab_size", ",", "ctx", ",", "log_interval", ",", "dtype", ")", ":", "mlm_metric", "=", "MaskedAccuracy", "(", ")", "nsp_metric", "=", "MaskedAccuracy", "(", ")", "...
45.280702
20.157895
def maps_get_default_rules_output_rules_rbridgeid(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") maps_get_default_rules = ET.Element("maps_get_default_rules") config = maps_get_default_rules output = ET.SubElement(maps_get_default_rules, "output") rules = ET.SubElement(output, "rules") rbridgeid = ET.SubElement(rules, "rbridgeid") rbridgeid.text = kwargs.pop('rbridgeid') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "maps_get_default_rules_output_rules_rbridgeid", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "maps_get_default_rules", "=", "ET", ".", "Element", "(", "\"maps_get_default_rules\"", ")", "confi...
42.384615
13.538462
def conv_to_json(obj, fields=None): """ return cdx as json dictionary string if ``fields`` is ``None``, output will include all fields in order stored, otherwise only specified fields will be included :param fields: list of field names to output """ if fields is None: return json_encode(OrderedDict(((x, obj[x]) for x in obj if not x.startswith('_')))) + '\n' result = json_encode(OrderedDict([(x, obj[x]) for x in fields if x in obj])) + '\n' return result
[ "def", "conv_to_json", "(", "obj", ",", "fields", "=", "None", ")", ":", "if", "fields", "is", "None", ":", "return", "json_encode", "(", "OrderedDict", "(", "(", "(", "x", ",", "obj", "[", "x", "]", ")", "for", "x", "in", "obj", "if", "not", "x"...
35.933333
24.066667
def file_rows(self, fo): """Return the lines in the file as a list. fo is the open file object.""" rows = [] for i in range(NUMROWS): line = fo.readline() if not line: break rows += [line] return rows
[ "def", "file_rows", "(", "self", ",", "fo", ")", ":", "rows", "=", "[", "]", "for", "i", "in", "range", "(", "NUMROWS", ")", ":", "line", "=", "fo", ".", "readline", "(", ")", "if", "not", "line", ":", "break", "rows", "+=", "[", "line", "]", ...
21.769231
18.846154
def get_thin_interval(self): """Gets the thin interval to use. If ``max_samples_per_chain`` is set, this will figure out what thin interval is needed to satisfy that criteria. In that case, the thin interval used must be a multiple of the currently used thin interval. """ if self.max_samples_per_chain is not None: # the extra factor of 2 is to account for the fact that the thin # interval will need to be at least twice as large as a previously # used interval thinfactor = 2 * self.niterations // self.max_samples_per_chain # make the new interval is a multiple of the previous, to ensure # that any samples currently on disk can be thinned accordingly thin_interval = (thinfactor // self.thin_interval) * \ self.thin_interval # make sure it's at least 1 thin_interval = max(thin_interval, 1) else: thin_interval = self.thin_interval return thin_interval
[ "def", "get_thin_interval", "(", "self", ")", ":", "if", "self", ".", "max_samples_per_chain", "is", "not", "None", ":", "# the extra factor of 2 is to account for the fact that the thin", "# interval will need to be at least twice as large as a previously", "# used interval", "thi...
49.47619
21.380952
def set_options(self, option_type, option_dict, force_options=False): """set plot options """ if force_options: self.options[option_type].update(option_dict) elif (option_type == 'yAxis' or option_type == 'xAxis') and isinstance(option_dict, list): # For multi-Axis self.options[option_type] = MultiAxis(option_type) for each_dict in option_dict: self.options[option_type].update(**each_dict) elif option_type == 'colors': self.options["colors"].set_colors(option_dict) # option_dict should be a list elif option_type in ["global" , "lang"]: #Highcharts.setOptions: self.setOptions[option_type].update_dict(**option_dict) else: self.options[option_type].update_dict(**option_dict)
[ "def", "set_options", "(", "self", ",", "option_type", ",", "option_dict", ",", "force_options", "=", "False", ")", ":", "if", "force_options", ":", "self", ".", "options", "[", "option_type", "]", ".", "update", "(", "option_dict", ")", "elif", "(", "opti...
54.333333
22.533333
def list_fonts_with_info(self, pattern, max_names): """Return a list of fonts matching pattern. No more than max_names will be returned. Each list item represents one font and has the following properties: name The name of the font. min_bounds max_bounds min_char_or_byte2 max_char_or_byte2 default_char draw_direction min_byte1 max_byte1 all_chars_exist font_ascent font_descent replies_hint See the description of XFontStruct in XGetFontProperty(3X11) for details on these values. properties A list of properties. Each entry has two attributes: name The atom identifying this property. value A 32-bit unsigned value. """ return request.ListFontsWithInfo(display = self.display, max_names = max_names, pattern = pattern)
[ "def", "list_fonts_with_info", "(", "self", ",", "pattern", ",", "max_names", ")", ":", "return", "request", ".", "ListFontsWithInfo", "(", "display", "=", "self", ".", "display", ",", "max_names", "=", "max_names", ",", "pattern", "=", "pattern", ")" ]
32.1875
18.59375
def contribute_to_class(self, cls, name): """ Makes sure thumbnail gets set when image field initialized. """ super(SizedImageField, self).contribute_to_class(cls, name) signals.post_init.connect(self._set_thumbnail, sender=cls)
[ "def", "contribute_to_class", "(", "self", ",", "cls", ",", "name", ")", ":", "super", "(", "SizedImageField", ",", "self", ")", ".", "contribute_to_class", "(", "cls", ",", "name", ")", "signals", ".", "post_init", ".", "connect", "(", "self", ".", "_se...
43.833333
13.5
def trending(params): """gets trending content values """ # get params try: series = params.get("site", [DEFAULT_SERIES])[0] offset = params.get("offset", [DEFAULT_GROUP_BY])[0] limit = params.get("limit", [20])[0] except Exception as e: LOGGER.exception(e) return json.dumps({"error": e.message}), "500 Internal Error" # check the cache cache_key = "{}:{}:{}:{}:{}".format(memcached_prefix, "trending.json", series, offset, limit) try: data = MEMCACHED_CLIENT.get(cache_key) if data: return data, "200 OK" except Exception as e: LOGGER.exception(e) # update series name series = update_trending_series(series) # parse the limit try: limit = int(limit) except ValueError: LOGGER.error("limit param must be an integer") return json.dumps({"error": "limit param must be an integer"}), "400 Bad Request" # build the query query = "SELECT content_id, sum(value) as value " \ "FROM {series} " \ "WHERE time > now() - {offset} " \ "GROUP BY content_id;" args = {"series": series, "offset": offset} # send the request try: res = INFLUXDB_CLIENT.query(query.format(**args)) # capture errors and send them back along with the query (for inspection/debugging) except Exception as e: LOGGER.exception(e) return json.dumps({"error": e.message, "query": query.format(**args)}), "500 Internal Error" # build the response object response = flatten_response(res) # limit the number of content per site for site, points in response.items(): sorted_content = sorted(points, key=lambda p: p["value"], reverse=True)[:limit] response[site] = sorted_content clean_response = {} for site, values in response.items(): clean_name = site.split("-")[0] clean_response[clean_name] = values res = json.dumps(clean_response) # cache the response try: MEMCACHED_CLIENT.set(cache_key, res, time=MEMCACHED_EXPIRATION) except Exception as e: LOGGER.exception(e) return res, "200 OK"
[ "def", "trending", "(", "params", ")", ":", "# get params", "try", ":", "series", "=", "params", ".", "get", "(", "\"site\"", ",", "[", "DEFAULT_SERIES", "]", ")", "[", "0", "]", "offset", "=", "params", ".", "get", "(", "\"offset\"", ",", "[", "DEFA...
31.294118
20.941176
def read_actions(): """Yields actions for pressed keys.""" while True: key = get_key() # Handle arrows, j/k (qwerty), and n/e (colemak) if key in (const.KEY_UP, const.KEY_CTRL_N, 'k', 'e'): yield const.ACTION_PREVIOUS elif key in (const.KEY_DOWN, const.KEY_CTRL_P, 'j', 'n'): yield const.ACTION_NEXT elif key in (const.KEY_CTRL_C, 'q'): yield const.ACTION_ABORT elif key in ('\n', '\r'): yield const.ACTION_SELECT
[ "def", "read_actions", "(", ")", ":", "while", "True", ":", "key", "=", "get_key", "(", ")", "# Handle arrows, j/k (qwerty), and n/e (colemak)", "if", "key", "in", "(", "const", ".", "KEY_UP", ",", "const", ".", "KEY_CTRL_N", ",", "'k'", ",", "'e'", ")", "...
36.071429
13.5
def browse( plugins, parent = None, default = None ): """ Prompts the user to browse the wizards based on the inputed plugins \ allowing them to launch any particular wizard of choice. :param plugins | [<XWizardPlugin>, ..] parent | <QWidget> default | <XWizardPlugin> || None :return <bool> success """ dlg = XWizardBrowserDialog( parent ) dlg.setPlugins(plugins) dlg.setCurrentPlugin(default) if ( dlg.exec_() ): return True return False
[ "def", "browse", "(", "plugins", ",", "parent", "=", "None", ",", "default", "=", "None", ")", ":", "dlg", "=", "XWizardBrowserDialog", "(", "parent", ")", "dlg", ".", "setPlugins", "(", "plugins", ")", "dlg", ".", "setCurrentPlugin", "(", "default", ")"...
34.823529
13.882353
def fill_dcnm_net_info(self, tenant_id, direc, vlan_id=0, segmentation_id=0): """Fill DCNM network parameters. Function that fills the network parameters for a tenant required by DCNM. """ serv_obj = self.get_service_obj(tenant_id) fw_dict = serv_obj.get_fw_dict() fw_id = fw_dict.get('fw_id') net_dict = {'status': 'ACTIVE', 'admin_state_up': True, 'tenant_id': tenant_id, 'provider:network_type': 'local', 'vlan_id': vlan_id, 'segmentation_id': segmentation_id} if vlan_id == 0: net_dict.update({'mob_domain': False, 'mob_domain_name': None}) else: net_dict.update({'mob_domain': True}) # TODO(padkrish) NWK ID are not filled. if direc == 'in': name = fw_id[0:4] + fw_const.IN_SERVICE_NWK + ( fw_id[len(fw_id) - 4:]) net_dict.update({'name': name, 'part_name': None, 'config_profile': self.serv_host_prof, 'fwd_mode': self.serv_host_mode}) else: name = fw_id[0:4] + fw_const.OUT_SERVICE_NWK + ( fw_id[len(fw_id) - 4:]) net_dict.update({'name': name, 'part_name': fw_const.SERV_PART_NAME, 'config_profile': self.serv_ext_prof, 'fwd_mode': self.serv_ext_mode}) return net_dict
[ "def", "fill_dcnm_net_info", "(", "self", ",", "tenant_id", ",", "direc", ",", "vlan_id", "=", "0", ",", "segmentation_id", "=", "0", ")", ":", "serv_obj", "=", "self", ".", "get_service_obj", "(", "tenant_id", ")", "fw_dict", "=", "serv_obj", ".", "get_fw...
46.0625
17.90625
def from_bytes(TxIn, byte_string): ''' byte_string -> TxIn parses a TxIn from a byte-like object ''' outpoint = Outpoint.from_bytes(byte_string[:36]) script_sig_len = VarInt.from_bytes(byte_string[36:45]) script_start = 36 + len(script_sig_len) script_end = script_start + script_sig_len.number script_sig = byte_string[script_start:script_end] sequence = byte_string[script_end:script_end + 4] if script_sig == b'': stack_script = b'' redeem_script = b'' else: stack_script, redeem_script = TxIn._parse_script_sig(script_sig) return TxIn( outpoint=outpoint, stack_script=stack_script, redeem_script=redeem_script, sequence=sequence)
[ "def", "from_bytes", "(", "TxIn", ",", "byte_string", ")", ":", "outpoint", "=", "Outpoint", ".", "from_bytes", "(", "byte_string", "[", ":", "36", "]", ")", "script_sig_len", "=", "VarInt", ".", "from_bytes", "(", "byte_string", "[", "36", ":", "45", "]...
34.826087
17.086957
def refreshUserMembership(self, users): """ This operation iterates over every enterprise group configured in the portal and determines if the input user accounts belong to any of the configured enterprise groups. If there is any change in membership, the database and the indexes are updated for each user account. While portal automatically refreshes the memberships during a user login and during a periodic refresh (configured through the Update Identity Store operation), this operation allows an administrator to force a refresh. Parameters: users - comma seperated list of user names """ params = { "f" : "json", "users" : users } url = self._url + "/users/refreshMembership" return self._post(url=url, param_dict=params, proxy_port=self._proxy_port, proxy_url=self._proxy_url)
[ "def", "refreshUserMembership", "(", "self", ",", "users", ")", ":", "params", "=", "{", "\"f\"", ":", "\"json\"", ",", "\"users\"", ":", "users", "}", "url", "=", "self", ".", "_url", "+", "\"/users/refreshMembership\"", "return", "self", ".", "_post", "(...
43.26087
18.73913
def predict(self, X, with_noise=True): """ Predictions with the model. Returns posterior means and standard deviations at X. Note that this is different in GPy where the variances are given. Parameters: X (np.ndarray) - points to run the prediction for. with_noise (bool) - whether to add noise to the prediction. Default is True. """ m, v = self._predict(X, False, with_noise) # We can take the square root because v is just a diagonal matrix of variances return m, np.sqrt(v)
[ "def", "predict", "(", "self", ",", "X", ",", "with_noise", "=", "True", ")", ":", "m", ",", "v", "=", "self", ".", "_predict", "(", "X", ",", "False", ",", "with_noise", ")", "# We can take the square root because v is just a diagonal matrix of variances", "ret...
49.818182
28.727273
def implements_storage(self): """ True if combination of field access properties imply that the field implements a storage element. """ # 9.4.1, Table 12 sw = self.get_property('sw') hw = self.get_property('hw') if sw in (rdltypes.AccessType.rw, rdltypes.AccessType.rw1): # Software can read and write, implying a storage element return True if hw == rdltypes.AccessType.rw: # Hardware can read and write, implying a storage element return True if (sw in (rdltypes.AccessType.w, rdltypes.AccessType.w1)) and (hw == rdltypes.AccessType.r): # Write-only register visible to hardware is stored return True onread = self.get_property('onread') if onread is not None: # 9.6.1-c: Onread side-effects imply storage regardless of whether # or not the field is writable by sw return True if self.get_property('hwset') or self.get_property('hwclr'): # Not in spec, but these imply that a storage element exists return True return False
[ "def", "implements_storage", "(", "self", ")", ":", "# 9.4.1, Table 12", "sw", "=", "self", ".", "get_property", "(", "'sw'", ")", "hw", "=", "self", ".", "get_property", "(", "'hw'", ")", "if", "sw", "in", "(", "rdltypes", ".", "AccessType", ".", "rw", ...
35.75
22.0625
def dict_copy(func): "copy dict keyword args, to avoid modifying caller's copy" @functools.wraps(func) def wrapper(*args, **kwargs): copied_kwargs = copy.deepcopy(kwargs) return func(*args, **copied_kwargs) return wrapper
[ "def", "dict_copy", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "copied_kwargs", "=", "copy", ".", "deepcopy", "(", "kwargs", ")", "return", "func", ...
35.285714
13.285714
def gen_mu(K, delta, c): """The Robust Soliton Distribution on the degree of transmitted blocks """ S = c * log(K/delta) * sqrt(K) tau = gen_tau(S, K, delta) rho = gen_rho(K) normalizer = sum(rho) + sum(tau) return [(rho[d] + tau[d])/normalizer for d in range(K)]
[ "def", "gen_mu", "(", "K", ",", "delta", ",", "c", ")", ":", "S", "=", "c", "*", "log", "(", "K", "/", "delta", ")", "*", "sqrt", "(", "K", ")", "tau", "=", "gen_tau", "(", "S", ",", "K", ",", "delta", ")", "rho", "=", "gen_rho", "(", "K"...
28.9
13.2
def run_in_background(coroutine: "Callable[[concurrent.futures.Future[T], Coroutine[Any, Any, None]]", *, debug: bool = False, _policy_lock: threading.Lock = threading.Lock()) -> T: """ Runs ``coroutine(future)`` in a new event loop on a background thread. Blocks and returns the *future* result as soon as it is resolved. The coroutine and all remaining tasks continue running in the background until it is complete. Note: This installs a :class:`chess.engine.EventLoopPolicy` for the entire process. """ assert asyncio.iscoroutinefunction(coroutine) with _policy_lock: if not isinstance(asyncio.get_event_loop_policy(), EventLoopPolicy): asyncio.set_event_loop_policy(EventLoopPolicy()) future = concurrent.futures.Future() # type: concurrent.futures.Future[T] def background() -> None: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.set_debug(debug) try: loop.run_until_complete(coroutine(future)) future.cancel() except Exception as exc: future.set_exception(exc) return finally: try: # Finish all remaining tasks. pending = _all_tasks(loop) loop.run_until_complete(asyncio.gather(*pending, loop=loop, return_exceptions=True)) # Shutdown async generators. try: loop.run_until_complete(loop.shutdown_asyncgens()) except AttributeError: # Before Python 3.6. pass finally: loop.close() threading.Thread(target=background).start() return future.result()
[ "def", "run_in_background", "(", "coroutine", ":", "\"Callable[[concurrent.futures.Future[T], Coroutine[Any, Any, None]]\"", ",", "*", ",", "debug", ":", "bool", "=", "False", ",", "_policy_lock", ":", "threading", ".", "Lock", "=", "threading", ".", "Lock", "(", ")...
36.234043
23.765957