text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def calculate_leapdays(init_date, final_date): """Currently unsupported, it only works for differences in years.""" leap_days = (final_date.year - 1) // 4 - (init_date.year - 1) // 4 leap_days -= (final_date.year - 1) // 100 - (init_date.year - 1) // 100 leap_days += (final_date.year - 1) // 400 - (init_date.year - 1) // 400 # TODO: Internal date correction (e.g. init_date is 1-March or later) return datetime.timedelta(days=leap_days)
[ "def", "calculate_leapdays", "(", "init_date", ",", "final_date", ")", ":", "leap_days", "=", "(", "final_date", ".", "year", "-", "1", ")", "//", "4", "-", "(", "init_date", ".", "year", "-", "1", ")", "//", "4", "leap_days", "-=", "(", "final_date", ...
45.6
26.4
def on_event_pre(self, e: Event) -> None: """Set values set on browser before calling event listeners.""" super().on_event_pre(e) ct_msg = e.init.get('currentTarget', dict()) if e.type in ('input', 'change'): self._set_attribute('value', ct_msg.get('value')) _selected = ct_msg.get('selectedOptions', []) self._selected_options.clear() for opt in self.options: if opt.wdom_id in _selected: self._selected_options.append(opt) opt._set_attribute('selected', True) else: opt._remove_attribute('selected')
[ "def", "on_event_pre", "(", "self", ",", "e", ":", "Event", ")", "->", "None", ":", "super", "(", ")", ".", "on_event_pre", "(", "e", ")", "ct_msg", "=", "e", ".", "init", ".", "get", "(", "'currentTarget'", ",", "dict", "(", ")", ")", "if", "e",...
47.142857
9.5
def timedelta2period(duration): """Convert timedelta to different formats.""" seconds = duration.seconds minutes = (seconds % 3600) // 60 seconds = (seconds % 60) return '{0:0>2}:{1:0>2}'.format(minutes, seconds)
[ "def", "timedelta2period", "(", "duration", ")", ":", "seconds", "=", "duration", ".", "seconds", "minutes", "=", "(", "seconds", "%", "3600", ")", "//", "60", "seconds", "=", "(", "seconds", "%", "60", ")", "return", "'{0:0>2}:{1:0>2}'", ".", "format", ...
37.833333
8
def main(): """ Continues to validate patterns until it encounters EOF within a pattern file or Ctrl-C is pressed by the user. """ parser = argparse.ArgumentParser(description='Validate STIX Patterns.') parser.add_argument('-f', '--file', help="Specify this arg to read patterns from a file.", type=argparse.FileType("r")) args = parser.parse_args() pass_count = fail_count = 0 # I tried using a generator (where each iteration would run raw_input()), # but raw_input()'s behavior seems to change when called from within a # generator: I only get one line, then the generator completes! I don't # know why behavior changes... import functools if args.file: nextpattern = args.file.readline else: nextpattern = functools.partial(six.moves.input, "Enter a pattern to validate: ") try: while True: pattern = nextpattern() if not pattern: break tests_passed, err_strings = validate(pattern, True) if tests_passed: print("\nPASS: %s" % pattern) pass_count += 1 else: for err in err_strings: print(err, '\n') fail_count += 1 except (EOFError, KeyboardInterrupt): pass finally: if args.file: args.file.close() print("\nPASSED:", pass_count, " patterns") print("FAILED:", fail_count, " patterns")
[ "def", "main", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Validate STIX Patterns.'", ")", "parser", ".", "add_argument", "(", "'-f'", ",", "'--file'", ",", "help", "=", "\"Specify this arg to read patterns from a file...
32.391304
20.043478
def switch_delete_record_for_userid(self, userid): """Remove userid switch record from switch table.""" with get_network_conn() as conn: conn.execute("DELETE FROM switch WHERE userid=?", (userid,)) LOG.debug("Switch record for user %s is removed from " "switch table" % userid)
[ "def", "switch_delete_record_for_userid", "(", "self", ",", "userid", ")", ":", "with", "get_network_conn", "(", ")", "as", "conn", ":", "conn", ".", "execute", "(", "\"DELETE FROM switch WHERE userid=?\"", ",", "(", "userid", ",", ")", ")", "LOG", ".", "debug...
51.142857
9.714286
def make_transaction_frame(transactions): """ Formats a transaction DataFrame. Parameters ---------- transactions : pd.DataFrame Contains improperly formatted transactional data. Returns ------- df : pd.DataFrame Daily transaction volume and dollar ammount. - See full explanation in tears.create_full_tear_sheet. """ transaction_list = [] for dt in transactions.index: txns = transactions.loc[dt] if len(txns) == 0: continue for txn in txns: txn = map_transaction(txn) transaction_list.append(txn) df = pd.DataFrame(sorted(transaction_list, key=lambda x: x['dt'])) df['txn_dollars'] = -df['amount'] * df['price'] df.index = list(map(pd.Timestamp, df.dt.values)) return df
[ "def", "make_transaction_frame", "(", "transactions", ")", ":", "transaction_list", "=", "[", "]", "for", "dt", "in", "transactions", ".", "index", ":", "txns", "=", "transactions", ".", "loc", "[", "dt", "]", "if", "len", "(", "txns", ")", "==", "0", ...
26.433333
18.5
def _find_listeners(): """Find GPIB listeners. """ for i in range(31): try: if gpib.listener(BOARD, i) and gpib.ask(BOARD, 1) != i: yield i except gpib.GpibError as e: logger.debug("GPIB error in _find_listeners(): %s", repr(e))
[ "def", "_find_listeners", "(", ")", ":", "for", "i", "in", "range", "(", "31", ")", ":", "try", ":", "if", "gpib", ".", "listener", "(", "BOARD", ",", "i", ")", "and", "gpib", ".", "ask", "(", "BOARD", ",", "1", ")", "!=", "i", ":", "yield", ...
32
16
def handle_connection(stream): ''' Handle a connection. The server operates a request/response cycle, so it performs a synchronous loop: 1) Read data from network into wsproto 2) Get next wsproto event 3) Handle event 4) Send data from wsproto to network :param stream: a socket stream ''' ws = WSConnection(ConnectionType.SERVER) # events is a generator that yields websocket event objects. Usually you # would say `for event in ws.events()`, but the synchronous nature of this # server requires us to use next(event) instead so that we can interleave # the network I/O. events = ws.events() running = True while running: # 1) Read data from network in_data = stream.recv(RECEIVE_BYTES) print('Received {} bytes'.format(len(in_data))) ws.receive_data(in_data) # 2) Get next wsproto event try: event = next(events) except StopIteration: print('Client connection dropped unexpectedly') return # 3) Handle event if isinstance(event, Request): # Negotiate new WebSocket connection print('Accepting WebSocket upgrade') out_data = ws.send(AcceptConnection()) elif isinstance(event, CloseConnection): # Print log message and break out print('Connection closed: code={}/{} reason={}'.format( event.code.value, event.code.name, event.reason)) out_data = ws.send(event.response()) running = False elif isinstance(event, TextMessage): # Reverse text and send it back to wsproto print('Received request and sending response') out_data = ws.send(Message(data=event.data[::-1])) elif isinstance(event, Ping): # wsproto handles ping events for you by placing a pong frame in # the outgoing buffer. You should not call pong() unless you want to # send an unsolicited pong frame. print('Received ping and sending pong') out_data = ws.send(event.response()) else: print('Unknown event: {!r}'.format(event)) # 4) Send data from wsproto to network print('Sending {} bytes'.format(len(out_data))) stream.send(out_data)
[ "def", "handle_connection", "(", "stream", ")", ":", "ws", "=", "WSConnection", "(", "ConnectionType", ".", "SERVER", ")", "# events is a generator that yields websocket event objects. Usually you", "# would say `for event in ws.events()`, but the synchronous nature of this", "# serv...
36.31746
19.079365
def append(self, sc): """ Add scale 'sc' and remove any previous scales that cover the same aesthetics """ ae = sc.aesthetics[0] cover_ae = self.find(ae) if any(cover_ae): warn(_TPL_DUPLICATE_SCALE.format(ae), PlotnineWarning) idx = cover_ae.index(True) self.pop(idx) # super() does not work well with reloads list.append(self, sc)
[ "def", "append", "(", "self", ",", "sc", ")", ":", "ae", "=", "sc", ".", "aesthetics", "[", "0", "]", "cover_ae", "=", "self", ".", "find", "(", "ae", ")", "if", "any", "(", "cover_ae", ")", ":", "warn", "(", "_TPL_DUPLICATE_SCALE", ".", "format", ...
32.846154
9.769231
def remove_temporary_source(self): """Remove the source files from this requirement, if they are marked for deletion""" if self.source_dir and os.path.exists( os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)): logger.debug('Removing source in %s', self.source_dir) rmtree(self.source_dir) self.source_dir = None if self._temp_build_dir and os.path.exists(self._temp_build_dir): rmtree(self._temp_build_dir) self._temp_build_dir = None
[ "def", "remove_temporary_source", "(", "self", ")", ":", "if", "self", ".", "source_dir", "and", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "self", ".", "source_dir", ",", "PIP_DELETE_MARKER_FILENAME", ")", ")", ":", "log...
48.454545
11.454545
def post(self, url, access_token=None, **kwargs): """ 使用 POST 方法向微信服务器发出请求 :param url: 请求地址 :param access_token: access token 值, 如果初始化时传入 conf 会自动获取, 如果没有传入则请提供此值 :param kwargs: 附加数据 :return: 微信服务器响应的 JSON 数据 """ return self.request( method="post", url=url, access_token=access_token, **kwargs )
[ "def", "post", "(", "self", ",", "url", ",", "access_token", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "request", "(", "method", "=", "\"post\"", ",", "url", "=", "url", ",", "access_token", "=", "access_token", ",", "*",...
28.714286
13.857143
def pad_length(s): """ Appends characters to the end of the string to increase the string length per IBM Globalization Design Guideline A3: UI Expansion. https://www-01.ibm.com/software/globalization/guidelines/a3.html :param s: String to pad. :returns: Padded string. """ padding_chars = [ u'\ufe4e', # ﹎: CENTRELINE LOW LINE u'\u040d', # Ѝ: CYRILLIC CAPITAL LETTER I WITH GRAVE u'\u05d0', # א: HEBREW LETTER ALEF u'\u01c6', # dž: LATIN SMALL LETTER DZ WITH CARON u'\u1f8f', # ᾏ: GREEK CAPITAL LETTER ALPHA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI u'\u2167', # Ⅷ: ROMAN NUMERAL EIGHT u'\u3234', # ㈴: PARENTHESIZED IDEOGRAPH NAME u'\u32f9', # ㋹: CIRCLED KATAKANA RE u'\ud4db', # 퓛: HANGUL SYLLABLE PWILH u'\ufe8f', # ﺏ: ARABIC LETTER BEH ISOLATED FORM u'\U0001D7D8', # 𝟘: MATHEMATICAL DOUBLE-STRUCK DIGIT ZERO u'\U0001F6A6', # 🚦: VERTICAL TRAFFIC LIGHT ] padding_generator = itertools.cycle(padding_chars) target_lengths = { six.moves.range(1, 11): 3, six.moves.range(11, 21): 2, six.moves.range(21, 31): 1.8, six.moves.range(31, 51): 1.6, six.moves.range(51, 71): 1.4, } if len(s) > 70: target_length = int(math.ceil(len(s) * 1.3)) else: for r, v in target_lengths.items(): if len(s) in r: target_length = int(math.ceil(len(s) * v)) diff = target_length - len(s) pad = u"".join([next(padding_generator) for _ in range(diff)]) return s + pad
[ "def", "pad_length", "(", "s", ")", ":", "padding_chars", "=", "[", "u'\\ufe4e'", ",", "# ﹎: CENTRELINE LOW LINE", "u'\\u040d'", ",", "# Ѝ: CYRILLIC CAPITAL LETTER I WITH GRAVE", "u'\\u05d0'", ",", "# א: HEBREW LETTER ALEF", "u'\\u01c6'", ",", "# dž: LATIN SMALL LETTER DZ WITH...
38.317073
16.658537
def loaddeposit(sources, depid): """Load deposit. Usage: invenio dumps loaddeposit ~/data/deposit_dump_*.json invenio dumps loaddeposit -d 12345 ~/data/deposit_dump_*.json """ from .tasks.deposit import load_deposit if depid is not None: def pred(dep): return int(dep["_p"]["id"]) == depid loadcommon(sources, load_deposit, predicate=pred, asynchronous=False) else: loadcommon(sources, load_deposit)
[ "def", "loaddeposit", "(", "sources", ",", "depid", ")", ":", "from", ".", "tasks", ".", "deposit", "import", "load_deposit", "if", "depid", "is", "not", "None", ":", "def", "pred", "(", "dep", ")", ":", "return", "int", "(", "dep", "[", "\"_p\"", "]...
33.071429
17.142857
def xpathNextFollowing(self, cur): """Traversal function for the "following" direction The following axis contains all nodes in the same document as the context node that are after the context node in document order, excluding any descendants and excluding attribute nodes and namespace nodes; the nodes are ordered in document order """ if cur is None: cur__o = None else: cur__o = cur._o ret = libxml2mod.xmlXPathNextFollowing(self._o, cur__o) if ret is None:raise xpathError('xmlXPathNextFollowing() failed') __tmp = xmlNode(_obj=ret) return __tmp
[ "def", "xpathNextFollowing", "(", "self", ",", "cur", ")", ":", "if", "cur", "is", "None", ":", "cur__o", "=", "None", "else", ":", "cur__o", "=", "cur", ".", "_o", "ret", "=", "libxml2mod", ".", "xmlXPathNextFollowing", "(", "self", ".", "_o", ",", ...
49.615385
15.692308
def neighbors(self, node_id): """Find all the nodes where there is an edge from the specified node to that node. Returns a list of node ids.""" node = self.get_node(node_id) return [self.get_edge(edge_id)['vertices'][1] for edge_id in node['edges']]
[ "def", "neighbors", "(", "self", ",", "node_id", ")", ":", "node", "=", "self", ".", "get_node", "(", "node_id", ")", "return", "[", "self", ".", "get_edge", "(", "edge_id", ")", "[", "'vertices'", "]", "[", "1", "]", "for", "edge_id", "in", "node", ...
55.4
11.4
def init_state_from_encoder(self, encoder_outputs, encoder_valid_length=None): """Initialize the state from the encoder outputs. Parameters ---------- encoder_outputs : list encoder_valid_length : NDArray or None Returns ------- decoder_states : list The decoder states, includes: - mem_value : NDArray - mem_masks : NDArray, optional """ mem_value = encoder_outputs decoder_states = [mem_value] mem_length = mem_value.shape[1] if encoder_valid_length is not None: dtype = encoder_valid_length.dtype ctx = encoder_valid_length.context mem_masks = mx.nd.broadcast_lesser( mx.nd.arange(mem_length, ctx=ctx, dtype=dtype).reshape((1, -1)), encoder_valid_length.reshape((-1, 1))) decoder_states.append(mem_masks) self._encoder_valid_length = encoder_valid_length return decoder_states
[ "def", "init_state_from_encoder", "(", "self", ",", "encoder_outputs", ",", "encoder_valid_length", "=", "None", ")", ":", "mem_value", "=", "encoder_outputs", "decoder_states", "=", "[", "mem_value", "]", "mem_length", "=", "mem_value", ".", "shape", "[", "1", ...
35.392857
14.607143
def kernel_matrix(svm_model, original_X): if (svm_model.svm_kernel == 'polynomial_kernel' or svm_model.svm_kernel == 'soft_polynomial_kernel'): K = (svm_model.zeta + svm_model.gamma * np.dot(original_X, original_X.T)) ** svm_model.Q elif (svm_model.svm_kernel == 'gaussian_kernel' or svm_model.svm_kernel == 'soft_gaussian_kernel'): pairwise_dists = squareform(pdist(original_X, 'euclidean')) K = np.exp(-svm_model.gamma * (pairwise_dists ** 2)) ''' K = np.zeros((svm_model.data_num, svm_model.data_num)) for i in range(svm_model.data_num): for j in range(svm_model.data_num): if (svm_model.svm_kernel == 'polynomial_kernel' or svm_model.svm_kernel == 'soft_polynomial_kernel'): K[i, j] = Kernel.polynomial_kernel(svm_model, original_X[i], original_X[j]) elif (svm_model.svm_kernel == 'gaussian_kernel' or svm_model.svm_kernel == 'soft_gaussian_kernel'): K[i, j] = Kernel.gaussian_kernel(svm_model, original_X[i], original_X[j]) ''' return K
[ "def", "kernel_matrix", "(", "svm_model", ",", "original_X", ")", ":", "if", "(", "svm_model", ".", "svm_kernel", "==", "'polynomial_kernel'", "or", "svm_model", ".", "svm_kernel", "==", "'soft_polynomial_kernel'", ")", ":", "K", "=", "(", "svm_model", ".", "z...
55.1
39.3
def MAC(self,days,rev = 0): """ Comparing yesterday price is high, low or equal. return ↑,↓ or - 與前一天 days 日收盤價移動平均比較 rev = 0 回傳 ↑,↓ or - rev = 1 回傳 1,-1 or 0 """ yesterday = self.raw_data[:] yesterday.pop() yes_MA = float(sum(yesterday[-days:]) / days) today_MA = self.MA(days) return self.high_or_low(today_MA, yes_MA, rev)
[ "def", "MAC", "(", "self", ",", "days", ",", "rev", "=", "0", ")", ":", "yesterday", "=", "self", ".", "raw_data", "[", ":", "]", "yesterday", ".", "pop", "(", ")", "yes_MA", "=", "float", "(", "sum", "(", "yesterday", "[", "-", "days", ":", "]...
26.133333
15.266667
def group_values(self, group_name): """Return all distinct group values for given group.""" group_index = self.groups.index(group_name) values = [] for key in self.data_keys: if key[group_index] not in values: values.append(key[group_index]) return values
[ "def", "group_values", "(", "self", ",", "group_name", ")", ":", "group_index", "=", "self", ".", "groups", ".", "index", "(", "group_name", ")", "values", "=", "[", "]", "for", "key", "in", "self", ".", "data_keys", ":", "if", "key", "[", "group_index...
39.5
9.375
def authenticate(self, driver): """Authenticate using the Console Server protocol specific FSM.""" # 0 1 2 3 events = [driver.username_re, driver.password_re, self.device.prompt_re, driver.rommon_re, # 4 5 6 7 8 driver.unable_to_connect_re, driver.authentication_error_re, pexpect.TIMEOUT, pexpect.EOF] transitions = [ (driver.username_re, [0], 1, partial(a_send_username, self.username), 10), (driver.username_re, [1], 1, None, 10), (driver.password_re, [0, 1], 2, partial(a_send_password, self._acquire_password()), _C['first_prompt_timeout']), (driver.username_re, [2], -1, a_authentication_error, 0), (driver.password_re, [2], -1, a_authentication_error, 0), (driver.authentication_error_re, [1, 2], -1, a_authentication_error, 0), (self.device.prompt_re, [0, 1, 2], -1, None, 0), (driver.rommon_re, [0], -1, partial(a_send, "\r\n"), 0), (pexpect.TIMEOUT, [0], 1, partial(a_send, "\r\n"), 10), (pexpect.TIMEOUT, [2], -1, None, 0), (pexpect.TIMEOUT, [3, 7], -1, ConnectionTimeoutError("Connection Timeout", self.hostname), 0), (driver.unable_to_connect_re, [0, 1, 2], -1, a_unable_to_connect, 0), ] self.log("EXPECTED_PROMPT={}".format(pattern_to_str(self.device.prompt_re))) fsm = FSM("CONSOLE-SERVER-AUTH", self.device, events, transitions, timeout=_C['connect_timeout'], init_pattern=self.last_pattern) return fsm.run()
[ "def", "authenticate", "(", "self", ",", "driver", ")", ":", "# 0 1 2 3", "events", "=", "[", "driver", ".", "username_re", ",", "driver", ".", "password_re", ",", "self", ".", "device", "."...
66.5
33.884615
def _set_port_security(self, v, load=False): """ Setter method for port_security, mapped from YANG variable /interface/ethernet/switchport/port_security (container) If this variable is read-only (config: false) in the source YANG file, then _set_port_security is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_port_security() directly. YANG Description: Enable port-security feature """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=port_security.port_security, is_container='container', presence=True, yang_name="port-security", rest_name="port-security", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable port-security feature', u'callpoint': u'interface_portsecurity'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """port_security must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=port_security.port_security, is_container='container', presence=True, yang_name="port-security", rest_name="port-security", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable port-security feature', u'callpoint': u'interface_portsecurity'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)""", }) self.__port_security = t if hasattr(self, '_set'): self._set()
[ "def", "_set_port_security", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "...
75.291667
36.416667
def reformat_meta(self): """Collect the meta data information in a more user friendly format. Function looks through the meta data, collecting the channel related information into a dataframe and moving it into the _channels_ key. """ meta = self.annotation # For shorthand (passed by reference) channel_properties = [] for key, value in meta.items(): if key[:3] == '$P1': if key[3] not in string.digits: channel_properties.append(key[3:]) # Capture all the channel information in a list of lists -- used to create a data frame channel_matrix = [ [meta.get('$P{0}{1}'.format(ch, p)) for p in channel_properties] for ch in self.channel_numbers ] # Remove this information from the dictionary for ch in self.channel_numbers: for p in channel_properties: key = '$P{0}{1}'.format(ch, p) if key in meta: meta.pop(key) num_channels = meta['$PAR'] column_names = ['$Pn{0}'.format(p) for p in channel_properties] df = pd.DataFrame(channel_matrix, columns=column_names, index=(1 + numpy.arange(num_channels))) if '$PnE' in column_names: df['$PnE'] = df['$PnE'].apply(lambda x: x.split(',')) df.index.name = 'Channel Number' meta['_channels_'] = df meta['_channel_names_'] = self.get_channel_names()
[ "def", "reformat_meta", "(", "self", ")", ":", "meta", "=", "self", ".", "annotation", "# For shorthand (passed by reference)", "channel_properties", "=", "[", "]", "for", "key", ",", "value", "in", "meta", ".", "items", "(", ")", ":", "if", "key", "[", ":...
38.102564
20.282051
def change_owner(ctx, owner, uuid): """Changes the ownership of objects""" objects = ctx.obj['objects'] database = ctx.obj['db'] if uuid is True: owner_filter = {'uuid': owner} else: owner_filter = {'name': owner} owner = database.objectmodels['user'].find_one(owner_filter) if owner is None: log('User unknown.', lvl=error) return for item in objects: item.owner = owner.uuid item.save() log('Done')
[ "def", "change_owner", "(", "ctx", ",", "owner", ",", "uuid", ")", ":", "objects", "=", "ctx", ".", "obj", "[", "'objects'", "]", "database", "=", "ctx", ".", "obj", "[", "'db'", "]", "if", "uuid", "is", "True", ":", "owner_filter", "=", "{", "'uui...
22.333333
20.047619
def branch(self): ''' :param branch: Checks out specified branch (tracking if it exists on remote). If set to ``None``, 'master' will be checked out :returns: The current branch (This could also be 'master (Detatched-Head)' - Be warned) ''' branch = self._get_branch().get('stdout') if branch: return ''.join( [b for b in branch if '*' in b] ).replace('*', '').strip()
[ "def", "branch", "(", "self", ")", ":", "branch", "=", "self", ".", "_get_branch", "(", ")", ".", "get", "(", "'stdout'", ")", "if", "branch", ":", "return", "''", ".", "join", "(", "[", "b", "for", "b", "in", "branch", "if", "'*'", "in", "b", ...
32.733333
20.6
def cmd_status(opts): """Print status of containers and networks """ config = load_config(opts.config) b = get_blockade(config, opts) containers = b.status() print_containers(containers, opts.json)
[ "def", "cmd_status", "(", "opts", ")", ":", "config", "=", "load_config", "(", "opts", ".", "config", ")", "b", "=", "get_blockade", "(", "config", ",", "opts", ")", "containers", "=", "b", ".", "status", "(", ")", "print_containers", "(", "containers", ...
30.714286
6.285714
def hide_routemap_holder_route_map_action_rm(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") hide_routemap_holder = ET.SubElement(config, "hide-routemap-holder", xmlns="urn:brocade.com:mgmt:brocade-ip-policy") route_map = ET.SubElement(hide_routemap_holder, "route-map") name_key = ET.SubElement(route_map, "name") name_key.text = kwargs.pop('name') instance_key = ET.SubElement(route_map, "instance") instance_key.text = kwargs.pop('instance') action_rm = ET.SubElement(route_map, "action-rm") action_rm.text = kwargs.pop('action_rm') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "hide_routemap_holder_route_map_action_rm", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "hide_routemap_holder", "=", "ET", ".", "SubElement", "(", "config", ",", "\"hide-routemap-holder\"", ...
48.4
17.933333
def fraction_illuminated(ephemeris, body, t): """Compute the illuminated fraction of a body viewed from Earth. The ``body`` should be an integer or string that can be looked up in the given ``ephemeris``, which will also be asked to provide positions for the Earth and Sun. The return value will be a floating point number between zero and one. This simple routine assumes that the body is a perfectly uniform sphere. """ a = phase_angle(ephemeris, body, t).radians return 0.5 * (1.0 + cos(a))
[ "def", "fraction_illuminated", "(", "ephemeris", ",", "body", ",", "t", ")", ":", "a", "=", "phase_angle", "(", "ephemeris", ",", "body", ",", "t", ")", ".", "radians", "return", "0.5", "*", "(", "1.0", "+", "cos", "(", "a", ")", ")" ]
43.5
18.75
def job(self): """REST binding for the job associated with the submitted build. Returns: Job: REST binding for running job or ``None`` if connection information was not available or no job was submitted. """ if self._submitter and hasattr(self._submitter, '_job_access'): return self._submitter._job_access() return None
[ "def", "job", "(", "self", ")", ":", "if", "self", ".", "_submitter", "and", "hasattr", "(", "self", ".", "_submitter", ",", "'_job_access'", ")", ":", "return", "self", ".", "_submitter", ".", "_job_access", "(", ")", "return", "None" ]
41.888889
26.222222
def _get_dict_from_list(dict_key, list_of_dicts): """Retrieve a specific dict from a list of dicts. Parameters ---------- dict_key : str The (single) key of the dict to be retrieved from the list. list_of_dicts : list The list of dicts to search for the specific dict. Returns ------- dict value The value associated with the dict_key (e.g., a list of nodes or edges). """ the_dict = [cur_dict for cur_dict in list_of_dicts if cur_dict.get(dict_key)] if not the_dict: raise ValueError('Could not find a dict with key %s' % dict_key) return the_dict[0][dict_key]
[ "def", "_get_dict_from_list", "(", "dict_key", ",", "list_of_dicts", ")", ":", "the_dict", "=", "[", "cur_dict", "for", "cur_dict", "in", "list_of_dicts", "if", "cur_dict", ".", "get", "(", "dict_key", ")", "]", "if", "not", "the_dict", ":", "raise", "ValueE...
30.809524
21
def _UpdateProcessingStatus(self, pid, process_status, used_memory): """Updates the processing status. Args: pid (int): process identifier (PID) of the worker process. process_status (dict[str, object]): status values received from the worker process. used_memory (int): size of used memory in bytes. Raises: KeyError: if the process is not registered with the engine. """ self._RaiseIfNotRegistered(pid) if not process_status: return process = self._processes_per_pid[pid] status_indicator = process_status.get('processing_status', None) self._RaiseIfNotMonitored(pid) display_name = process_status.get('display_name', '') number_of_consumed_event_tags = process_status.get( 'number_of_consumed_event_tags', None) number_of_produced_event_tags = process_status.get( 'number_of_produced_event_tags', None) number_of_consumed_events = process_status.get( 'number_of_consumed_events', None) number_of_produced_events = process_status.get( 'number_of_produced_events', None) number_of_consumed_reports = process_status.get( 'number_of_consumed_reports', None) number_of_produced_reports = process_status.get( 'number_of_produced_reports', None) number_of_consumed_sources = process_status.get( 'number_of_consumed_sources', None) number_of_produced_sources = process_status.get( 'number_of_produced_sources', None) number_of_consumed_warnings = process_status.get( 'number_of_consumed_warnings', None) number_of_produced_warnings = process_status.get( 'number_of_produced_warnings', None) if status_indicator != definitions.STATUS_INDICATOR_IDLE: last_activity_timestamp = process_status.get( 'last_activity_timestamp', 0.0) if last_activity_timestamp: last_activity_timestamp += self._PROCESS_WORKER_TIMEOUT current_timestamp = time.time() if current_timestamp > last_activity_timestamp: logger.error(( 'Process {0:s} (PID: {1:d}) has not reported activity within ' 'the timeout period.').format(process.name, pid)) status_indicator = definitions.STATUS_INDICATOR_NOT_RESPONDING self._processing_status.UpdateWorkerStatus( process.name, status_indicator, pid, used_memory, display_name, number_of_consumed_sources, number_of_produced_sources, number_of_consumed_events, number_of_produced_events, number_of_consumed_event_tags, number_of_produced_event_tags, number_of_consumed_reports, number_of_produced_reports, number_of_consumed_warnings, number_of_produced_warnings)
[ "def", "_UpdateProcessingStatus", "(", "self", ",", "pid", ",", "process_status", ",", "used_memory", ")", ":", "self", ".", "_RaiseIfNotRegistered", "(", "pid", ")", "if", "not", "process_status", ":", "return", "process", "=", "self", ".", "_processes_per_pid"...
37.549296
20.253521
def set(self, id, translation, domain='messages'): """ Sets a message translation. """ assert isinstance(id, (str, unicode)) assert isinstance(translation, (str, unicode)) assert isinstance(domain, (str, unicode)) self.add({id: translation}, domain)
[ "def", "set", "(", "self", ",", "id", ",", "translation", ",", "domain", "=", "'messages'", ")", ":", "assert", "isinstance", "(", "id", ",", "(", "str", ",", "unicode", ")", ")", "assert", "isinstance", "(", "translation", ",", "(", "str", ",", "uni...
33.111111
9.555556
def get_methods(self, node): """return visible methods""" methods = [ m for m in node.values() if isinstance(m, astroid.FunctionDef) and not decorated_with_property(m) and self.show_attr(m.name) ] return sorted(methods, key=lambda n: n.name)
[ "def", "get_methods", "(", "self", ",", "node", ")", ":", "methods", "=", "[", "m", "for", "m", "in", "node", ".", "values", "(", ")", "if", "isinstance", "(", "m", ",", "astroid", ".", "FunctionDef", ")", "and", "not", "decorated_with_property", "(", ...
32.4
12.6
def get_human_key(self, key): """Return the human key (aka Python identifier) of a key (aka database value).""" for human_key, k in self._identifier_map.items(): if k == key: return human_key raise KeyError(key)
[ "def", "get_human_key", "(", "self", ",", "key", ")", ":", "for", "human_key", ",", "k", "in", "self", ".", "_identifier_map", ".", "items", "(", ")", ":", "if", "k", "==", "key", ":", "return", "human_key", "raise", "KeyError", "(", "key", ")" ]
43
10.833333
def expand_composites (properties): """ Expand all composite properties in the set so that all components are explicitly expressed. """ if __debug__: from .property import Property assert is_iterable_typed(properties, Property) explicit_features = set(p.feature for p in properties) result = [] # now expand composite features for p in properties: expanded = expand_composite(p) for x in expanded: if not x in result: f = x.feature if f.free: result.append (x) elif not x in properties: # x is the result of expansion if not f in explicit_features: # not explicitly-specified if any(r.feature == f for r in result): raise FeatureConflict( "expansions of composite features result in " "conflicting values for '%s'\nvalues: '%s'\none contributing composite property was '%s'" % (f.name, [r.value for r in result if r.feature == f] + [x.value], p)) else: result.append (x) elif any(r.feature == f for r in result): raise FeatureConflict ("explicitly-specified values of non-free feature '%s' conflict\n" "existing values: '%s'\nvalue from expanding '%s': '%s'" % (f, [r.value for r in result if r.feature == f], p, x.value)) else: result.append (x) return result
[ "def", "expand_composites", "(", "properties", ")", ":", "if", "__debug__", ":", "from", ".", "property", "import", "Property", "assert", "is_iterable_typed", "(", "properties", ",", "Property", ")", "explicit_features", "=", "set", "(", "p", ".", "feature", "...
42.473684
23.105263
def load_byte(buf, pos): """Load single byte""" end = pos + 1 if end > len(buf): raise BadRarFile('cannot load byte') return S_BYTE.unpack_from(buf, pos)[0], end
[ "def", "load_byte", "(", "buf", ",", "pos", ")", ":", "end", "=", "pos", "+", "1", "if", "end", ">", "len", "(", "buf", ")", ":", "raise", "BadRarFile", "(", "'cannot load byte'", ")", "return", "S_BYTE", ".", "unpack_from", "(", "buf", ",", "pos", ...
30
11.333333
def reverse_timezone(self, query, timeout=DEFAULT_SENTINEL): """ Find the timezone for a point in `query`. GeoNames always returns a timezone: if the point being queried doesn't have an assigned Olson timezone id, a ``pytz.FixedOffset`` timezone is used to produce the :class:`geopy.timezone.Timezone`. .. versionadded:: 1.18.0 :param query: The coordinates for which you want a timezone. :type query: :class:`geopy.point.Point`, list or tuple of (latitude, longitude), or string as "%(latitude)s, %(longitude)s" :param int timeout: Time, in seconds, to wait for the geocoding service to respond before raising a :class:`geopy.exc.GeocoderTimedOut` exception. Set this only if you wish to override, on this call only, the value set during the geocoder's initialization. :rtype: :class:`geopy.timezone.Timezone` """ ensure_pytz_is_installed() try: lat, lng = self._coerce_point_to_string(query).split(',') except ValueError: raise ValueError("Must be a coordinate pair or Point") params = { "lat": lat, "lng": lng, "username": self.username, } url = "?".join((self.api_timezone, urlencode(params))) logger.debug("%s.reverse_timezone: %s", self.__class__.__name__, url) return self._parse_json_timezone( self._call_geocoder(url, timeout=timeout) )
[ "def", "reverse_timezone", "(", "self", ",", "query", ",", "timeout", "=", "DEFAULT_SENTINEL", ")", ":", "ensure_pytz_is_installed", "(", ")", "try", ":", "lat", ",", "lng", "=", "self", ".", "_coerce_point_to_string", "(", "query", ")", ".", "split", "(", ...
37.375
25.625
def list_topics(self, name): ''' Retrieves the topics in the service namespace. name: Name of the service bus namespace. ''' response = self._perform_get( self._get_list_topics_path(name), None) return _MinidomXmlToObject.convert_response_to_feeds( response, partial( _MinidomXmlToObject.convert_xml_to_azure_object, azure_type=TopicDescription ) )
[ "def", "list_topics", "(", "self", ",", "name", ")", ":", "response", "=", "self", ".", "_perform_get", "(", "self", ".", "_get_list_topics_path", "(", "name", ")", ",", "None", ")", "return", "_MinidomXmlToObject", ".", "convert_response_to_feeds", "(", "resp...
27.388889
20.722222
def retry_ex(callback, times=3, cap=120000): """ Retry a callback function if any exception is raised. :param function callback: The function to call :keyword int times: Number of times to retry on initial failure :keyword int cap: Maximum wait time in milliseconds :returns: The return value of the callback :raises Exception: If the callback raises an exception after exhausting all retries """ for attempt in range(times + 1): if attempt > 0: time.sleep(retry_wait_time(attempt, cap) / 1000.0) try: return callback() except: if attempt == times: raise
[ "def", "retry_ex", "(", "callback", ",", "times", "=", "3", ",", "cap", "=", "120000", ")", ":", "for", "attempt", "in", "range", "(", "times", "+", "1", ")", ":", "if", "attempt", ">", "0", ":", "time", ".", "sleep", "(", "retry_wait_time", "(", ...
34.473684
15.210526
def load_yaml_by_relpath(cls, directories, rel_path, log_debug=False): """Load a yaml file with path that is relative to one of given directories. Args: directories: list of directories to search name: relative path of the yaml file to load log_debug: log all messages as debug Returns: tuple (fullpath, loaded yaml structure) or None if not found """ for d in directories: if d.startswith(os.path.expanduser('~')) and not os.path.exists(d): os.makedirs(d) possible_path = os.path.join(d, rel_path) if os.path.exists(possible_path): loaded = cls.load_yaml_by_path(possible_path, log_debug=log_debug) if loaded is not None: return (possible_path, cls.load_yaml_by_path(possible_path)) return None
[ "def", "load_yaml_by_relpath", "(", "cls", ",", "directories", ",", "rel_path", ",", "log_debug", "=", "False", ")", ":", "for", "d", "in", "directories", ":", "if", "d", ".", "startswith", "(", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", ")...
43.9
20.7
def participants(self): """agents + computers (i.e. all non-observers)""" ret = [] for p in self.players: try: if p.isComputer: ret.append(p) if not p.isObserver: ret.append(p) # could cause an exception if player isn't a PlayerPreGame except AttributeError: pass return ret
[ "def", "participants", "(", "self", ")", ":", "ret", "=", "[", "]", "for", "p", "in", "self", ".", "players", ":", "try", ":", "if", "p", ".", "isComputer", ":", "ret", ".", "append", "(", "p", ")", "if", "not", "p", ".", "isObserver", ":", "re...
39.777778
19.666667
def _ParseIdentifierMappingRecord( self, parser_mediator, table_name, esedb_record): """Extracts an identifier mapping from a SruDbIdMapTable record. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. table_name (str): name of the table the record is stored in. esedb_record (pyesedb.record): record. Returns: tuple[int, str]: numeric identifier and its string representation or None, None if no identifier mapping can be retrieved from the record. """ record_values = self._GetRecordValues( parser_mediator, table_name, esedb_record) identifier = record_values.get('IdIndex', None) if identifier is None: parser_mediator.ProduceExtractionWarning( 'IdIndex value missing from table: SruDbIdMapTable') return None, None identifier_type = record_values.get('IdType', None) if identifier_type not in self._SUPPORTED_IDENTIFIER_TYPES: parser_mediator.ProduceExtractionWarning( 'unsupported IdType value: {0!s} in table: SruDbIdMapTable'.format( identifier_type)) return None, None mapped_value = record_values.get('IdBlob', None) if mapped_value is None: parser_mediator.ProduceExtractionWarning( 'IdBlob value missing from table: SruDbIdMapTable') return None, None if identifier_type == 3: try: fwnt_identifier = pyfwnt.security_identifier() fwnt_identifier.copy_from_byte_stream(mapped_value) mapped_value = fwnt_identifier.get_string() except IOError: parser_mediator.ProduceExtractionWarning( 'unable to decode IdBlob value as Windows NT security identifier') return None, None else: try: mapped_value = mapped_value.decode('utf-16le').rstrip('\0') except UnicodeDecodeError: parser_mediator.ProduceExtractionWarning( 'unable to decode IdBlob value as UTF-16 little-endian string') return None, None return identifier, mapped_value
[ "def", "_ParseIdentifierMappingRecord", "(", "self", ",", "parser_mediator", ",", "table_name", ",", "esedb_record", ")", ":", "record_values", "=", "self", ".", "_GetRecordValues", "(", "parser_mediator", ",", "table_name", ",", "esedb_record", ")", "identifier", "...
37.6
20.818182
def pauli_kraus_map(probabilities): r""" Generate the Kraus operators corresponding to a pauli channel. :params list|floats probabilities: The 4^num_qubits list of probabilities specifying the desired pauli channel. There should be either 4 or 16 probabilities specified in the order I, X, Y, Z for 1 qubit or II, IX, IY, IZ, XI, XX, XY, etc for 2 qubits. For example:: The d-dimensional depolarizing channel \Delta parameterized as \Delta(\rho) = p \rho + [(1-p)/d] I is specified by the list of probabilities [p + (1-p)/d, (1-p)/d, (1-p)/d), ... , (1-p)/d)] :return: A list of the 4^num_qubits Kraus operators that parametrize the map. :rtype: list """ if len(probabilities) not in [4, 16]: raise ValueError("Currently we only support one or two qubits, " "so the provided list of probabilities must have length 4 or 16.") if not np.allclose(sum(probabilities), 1.0, atol=1e-3): raise ValueError("Probabilities must sum to one.") paulis = [np.eye(2), np.array([[0, 1], [1, 0]]), np.array([[0, -1j], [1j, 0]]), np.array([[1, 0], [0, -1]])] if len(probabilities) == 4: operators = paulis else: operators = np.kron(paulis, paulis) return [coeff * op for coeff, op in zip(np.sqrt(probabilities), operators)]
[ "def", "pauli_kraus_map", "(", "probabilities", ")", ":", "if", "len", "(", "probabilities", ")", "not", "in", "[", "4", ",", "16", "]", ":", "raise", "ValueError", "(", "\"Currently we only support one or two qubits, \"", "\"so the provided list of probabilities must h...
42.84375
28.5
def generateKey(password, bits=32): """ Generates a new encryption key based on the inputted password. :param password | <str> bits | <int> | 16 or 32 bits :return <str> """ if bits == 32: hasher = hashlib.sha256 elif bits == 16: hasher = hashlib.md5 else: raise StandardError('Invalid hash type') return hasher(password).digest()
[ "def", "generateKey", "(", "password", ",", "bits", "=", "32", ")", ":", "if", "bits", "==", "32", ":", "hasher", "=", "hashlib", ".", "sha256", "elif", "bits", "==", "16", ":", "hasher", "=", "hashlib", ".", "md5", "else", ":", "raise", "StandardErr...
24.764706
16.764706
def seconds_to_hms(input_seconds): """Convert seconds to human-readable time.""" minutes, seconds = divmod(input_seconds, 60) hours, minutes = divmod(minutes, 60) hours = int(hours) minutes = int(minutes) seconds = str(int(seconds)).zfill(2) return hours, minutes, seconds
[ "def", "seconds_to_hms", "(", "input_seconds", ")", ":", "minutes", ",", "seconds", "=", "divmod", "(", "input_seconds", ",", "60", ")", "hours", ",", "minutes", "=", "divmod", "(", "minutes", ",", "60", ")", "hours", "=", "int", "(", "hours", ")", "mi...
29.3
13.2
def setUnacknowledgedPreKeyMessage(self, preKeyId, signedPreKeyId, baseKey): """ :type preKeyId: int :type signedPreKeyId: int :type baseKey: ECPublicKey """ self.sessionStructure.pendingPreKey.signedPreKeyId = signedPreKeyId self.sessionStructure.pendingPreKey.baseKey = baseKey.serialize() if preKeyId is not None: self.sessionStructure.pendingPreKey.preKeyId = preKeyId
[ "def", "setUnacknowledgedPreKeyMessage", "(", "self", ",", "preKeyId", ",", "signedPreKeyId", ",", "baseKey", ")", ":", "self", ".", "sessionStructure", ".", "pendingPreKey", ".", "signedPreKeyId", "=", "signedPreKeyId", "self", ".", "sessionStructure", ".", "pendin...
39.909091
18.636364
def frames_iter(socket, tty): """ Return a generator of frames read from socket. A frame is a tuple where the first item is the stream number and the second item is a chunk of data. If the tty setting is enabled, the streams are multiplexed into the stdout stream. """ if tty: return ((STDOUT, frame) for frame in frames_iter_tty(socket)) else: return frames_iter_no_tty(socket)
[ "def", "frames_iter", "(", "socket", ",", "tty", ")", ":", "if", "tty", ":", "return", "(", "(", "STDOUT", ",", "frame", ")", "for", "frame", "in", "frames_iter_tty", "(", "socket", ")", ")", "else", ":", "return", "frames_iter_no_tty", "(", "socket", ...
34.666667
23.5
def process_item(self, item, spider): """ Store item data in DB. First determine if a version of the article already exists, if so then 'migrate' the older version to the archive table. Second store the new article in the current version table """ # Set defaults version = 1 ancestor = 0 # Search the CurrentVersion table for an old version of the article try: self.cursor.execute(self.compare_versions, (item['url'],)) except (pymysql.err.OperationalError, pymysql.ProgrammingError, pymysql.InternalError, pymysql.IntegrityError, TypeError) as error: self.log.error("Something went wrong in query: %s", error) # Save the result of the query. Must be done before the add, # otherwise the result will be overwritten in the buffer old_version = self.cursor.fetchone() if old_version is not None: old_version_list = { 'db_id': old_version[0], 'local_path': old_version[1], 'modified_date': old_version[2], 'download_date': old_version[3], 'source_domain': old_version[4], 'url': old_version[5], 'html_title': old_version[6], 'ancestor': old_version[7], 'descendant': old_version[8], 'version': old_version[9], 'rss_title': old_version[10], } # Update the version number and the ancestor variable for later references version = (old_version[9] + 1) ancestor = old_version[0] # Add the new version of the article to the CurrentVersion table current_version_list = { 'local_path': item['local_path'], 'modified_date': item['modified_date'], 'download_date': item['download_date'], 'source_domain': item['source_domain'], 'url': item['url'], 'html_title': item['html_title'], 'ancestor': ancestor, 'descendant': 0, 'version': version, 'rss_title': item['rss_title'], } try: self.cursor.execute(self.insert_current, current_version_list) self.conn.commit() self.log.info("Article inserted into the database.") except (pymysql.err.OperationalError, pymysql.ProgrammingError, pymysql.InternalError, pymysql.IntegrityError, TypeError) as error: self.log.error("Something went wrong in commit: %s", error) # Move the old version from the CurrentVersion table to the ArchiveVersions table if old_version is not None: # Set descendant attribute try: old_version_list['descendant'] = self.cursor.lastrowid except (pymysql.err.OperationalError, pymysql.ProgrammingError, pymysql.InternalError, pymysql.IntegrityError, TypeError) as error: self.log.error("Something went wrong in id query: %s", error) # Delete the old version of the article from the CurrentVersion table try: self.cursor.execute(self.delete_from_current, old_version_list['db_id']) self.conn.commit() except (pymysql.err.OperationalError, pymysql.ProgrammingError, pymysql.InternalError, pymysql.IntegrityError, TypeError) as error: self.log.error("Something went wrong in delete: %s", error) # Add the old version to the ArchiveVersion table try: self.cursor.execute(self.insert_archive, old_version_list) self.conn.commit() self.log.info("Moved old version of an article to the archive.") except (pymysql.err.OperationalError, pymysql.ProgrammingError, pymysql.InternalError, pymysql.IntegrityError, TypeError) as error: self.log.error("Something went wrong in archive: %s", error) return item
[ "def", "process_item", "(", "self", ",", "item", ",", "spider", ")", ":", "# Set defaults", "version", "=", "1", "ancestor", "=", "0", "# Search the CurrentVersion table for an old version of the article", "try", ":", "self", ".", "cursor", ".", "execute", "(", "s...
45.11236
22.685393
def save(self, *args, **kwargs): '''Make sure that the term is valid. If changed, create a QualifiedDublinCoreElementHistory object and save it. ''' if not self.term in self.DCELEMENT_CODE_MAP: raise ValueError('Extended Dublin Core Terms such as '+self.DCTERM_CODE_MAP[self.term]+' are not allowed. Please use only Dublin Core Elements') #HOW TO TELL IF OBJECT CHANGED? RETRIEVE FROM DB and COMPARE changed = False if self.pk:# existing object db_self = QualifiedDublinCoreElement.objects.get(pk=self.pk) #compare values, if changed set changed! if self.term != db_self.term: raise ValueError('Can not change DC element') if self.content != db_self.content: changed = True if self.qualifier != db_self.qualifier: changed = True if changed: hist = QualifiedDublinCoreElementHistory() hist.qdce = self hist.object_id = db_self.object_id hist.content_type = db_self.content_type hist.term = db_self.term hist.qualifier = db_self.qualifier hist.content = db_self.content hist.save() super(QualifiedDublinCoreElement, self).save(*args, **kwargs) obj = self.content_object if hasattr(obj, '_save_dc_xml_file'): obj._save_dc_xml_file()
[ "def", "save", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "term", "in", "self", ".", "DCELEMENT_CODE_MAP", ":", "raise", "ValueError", "(", "'Extended Dublin Core Terms such as '", "+", "self", ".", "DCTERM_...
48.433333
16.966667
def _get_interleague_fl(cls, home_team_lg, away_team_lg): """ get inter league flg :param home_team_lg: home team league :param away_team_lg: away team league :return: inter league flg(T or F or U) """ if (home_team_lg == MlbamConst.UNKNOWN_SHORT) or (away_team_lg == MlbamConst.UNKNOWN_SHORT): return MlbamConst.UNKNOWN_SHORT elif home_team_lg != away_team_lg: return MlbamConst.FLG_TRUE return MlbamConst.FLG_FALSE
[ "def", "_get_interleague_fl", "(", "cls", ",", "home_team_lg", ",", "away_team_lg", ")", ":", "if", "(", "home_team_lg", "==", "MlbamConst", ".", "UNKNOWN_SHORT", ")", "or", "(", "away_team_lg", "==", "MlbamConst", ".", "UNKNOWN_SHORT", ")", ":", "return", "Ml...
41.75
9.75
def explained_variance_visualizer(X, y=None, ax=None, scale=True, center=True, colormap=palettes.DEFAULT_SEQUENCE, **kwargs): """Produce a plot of the explained variance produced by a dimensionality reduction algorithm using n=1 to n=n_components dimensions. This is a single plot to help identify the best trade off between number of dimensions and amount of information retained within the data. Parameters ---------- X : ndarray or DataFrame of shape n x m A matrix of n rows with m features y : ndarray or Series of length n An array or Series of target or class values ax : matplotlib Axes, default: None The aces to plot the figure on scale : bool, default: True Boolean that indicates if the values of X should be scaled. colormap : string or cmap, default: None optional string or matplotlib cmap to colorize lines Use either color to colorize the lines on a per class basis or colormap to color them on a continuous scale. kwargs : dict Keyword arguments that are passed to the base class and may influence the visualization as defined in other Visualizers. Examples -------- >>> from sklearn import datasets >>> bc = datasets.load_breast_cancer() >>> X = bc = bc.data >>> explained_variance_visualizer(X, scale=True, center=True, colormap='RdBu_r') """ # Instantiate the visualizer visualizer = ExplainedVariance(X=X) # Fit and transform the visualizer (calls draw) visualizer.fit(X, y, **kwargs) visualizer.transform(X) # Return the axes object on the visualizer return visualizer.poof()
[ "def", "explained_variance_visualizer", "(", "X", ",", "y", "=", "None", ",", "ax", "=", "None", ",", "scale", "=", "True", ",", "center", "=", "True", ",", "colormap", "=", "palettes", ".", "DEFAULT_SEQUENCE", ",", "*", "*", "kwargs", ")", ":", "# Ins...
38.204082
21.102041
def lows(self, assets, dt): """ The low field's aggregation returns the smallest low seen between the market open and the current dt. If there has been no data on or before the `dt` the low is `nan`. Returns ------- np.array with dtype=float64, in order of assets parameter. """ market_open, prev_dt, dt_value, entries = self._prelude(dt, 'low') lows = [] session_label = self._trading_calendar.minute_to_session_label(dt) for asset in assets: if not asset.is_alive_for_session(session_label): lows.append(np.NaN) continue if prev_dt is None: val = self._minute_reader.get_value(asset, dt, 'low') entries[asset] = (dt_value, val) lows.append(val) continue else: try: last_visited_dt, last_min = entries[asset] if last_visited_dt == dt_value: lows.append(last_min) continue elif last_visited_dt == prev_dt: curr_val = self._minute_reader.get_value( asset, dt, 'low') val = np.nanmin([last_min, curr_val]) entries[asset] = (dt_value, val) lows.append(val) continue else: after_last = pd.Timestamp( last_visited_dt + self._one_min, tz='UTC') window = self._minute_reader.load_raw_arrays( ['low'], after_last, dt, [asset], )[0].T val = np.nanmin(np.append(window, last_min)) entries[asset] = (dt_value, val) lows.append(val) continue except KeyError: window = self._minute_reader.load_raw_arrays( ['low'], market_open, dt, [asset], )[0].T val = np.nanmin(window) entries[asset] = (dt_value, val) lows.append(val) continue return np.array(lows)
[ "def", "lows", "(", "self", ",", "assets", ",", "dt", ")", ":", "market_open", ",", "prev_dt", ",", "dt_value", ",", "entries", "=", "self", ".", "_prelude", "(", "dt", ",", "'low'", ")", "lows", "=", "[", "]", "session_label", "=", "self", ".", "_...
38.873016
15.793651
def set_trace(*args, **kwargs): """Call pdb.set_trace, making sure it receives the unwrapped stdout. This is so we don't keep drawing progress bars over debugger output. """ # There's no stream attr if capture plugin is enabled: out = sys.stdout.stream if hasattr(sys.stdout, 'stream') else None # Python 2.5 can't put an explicit kwarg and **kwargs in the same function # call. kwargs['stdout'] = out debugger = pdb.Pdb(*args, **kwargs) # Ordinarily (and in a silly fashion), pdb refuses to use raw_input() if # you pass it a stream on instantiation. Fix that: debugger.use_rawinput = True debugger.set_trace(sys._getframe().f_back)
[ "def", "set_trace", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# There's no stream attr if capture plugin is enabled:", "out", "=", "sys", ".", "stdout", ".", "stream", "if", "hasattr", "(", "sys", ".", "stdout", ",", "'stream'", ")", "else", "Non...
35.368421
22.894737
def _build_parser(self): """Build command line argument parser. Returns: :class:`argparse.ArgumentParser`: the command line argument parser. You probably won't need to use it directly. To parse command line arguments and update the :class:`ConfigurationManager` instance accordingly, use the :meth:`parse_args` method. """ main_parser = argparse.ArgumentParser(description=self.common.help, prefix_chars='-+') self._add_options_to_parser(self._opt_bare, main_parser) main_parser.set_defaults(**self.common.defaults) if self.bare is not None: main_parser.set_defaults(**self.bare.defaults) subparsers = main_parser.add_subparsers(dest='loam_sub_name') for cmd_name, meta in self.subcmds.items(): kwargs = {'prefix_chars': '+-', 'help': meta.help} dummy_parser = subparsers.add_parser(cmd_name, **kwargs) self._add_options_to_parser(self._opt_cmds[cmd_name], dummy_parser) dummy_parser.set_defaults(**meta.defaults) return main_parser
[ "def", "_build_parser", "(", "self", ")", ":", "main_parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "self", ".", "common", ".", "help", ",", "prefix_chars", "=", "'-+'", ")", "self", ".", "_add_options_to_parser", "(", "self", ".",...
45.84
24.44
def generate_event_set(ucerf, background_sids, src_filter, ses_idx, seed): """ Generates the event set corresponding to a particular branch """ serial = seed + ses_idx * TWO16 # get rates from file with h5py.File(ucerf.source_file, 'r') as hdf5: occurrences = ucerf.tom.sample_number_of_occurrences(ucerf.rate, seed) indices, = numpy.where(occurrences) logging.debug( 'Considering "%s", %d ruptures', ucerf.source_id, len(indices)) # get ruptures from the indices ruptures = [] rupture_occ = [] for iloc, n_occ in zip(indices, occurrences[indices]): ucerf_rup = ucerf.get_ucerf_rupture(iloc, src_filter) if ucerf_rup: ucerf_rup.serial = serial serial += 1 ruptures.append(ucerf_rup) rupture_occ.append(n_occ) # sample background sources background_ruptures, background_n_occ = sample_background_model( hdf5, ucerf.idx_set["grid_key"], ucerf.tom, seed, background_sids, ucerf.min_mag, ucerf.npd, ucerf.hdd, ucerf.usd, ucerf.lsd, ucerf.msr, ucerf.aspect, ucerf.tectonic_region_type) for i, brup in enumerate(background_ruptures): brup.serial = serial serial += 1 ruptures.append(brup) rupture_occ.extend(background_n_occ) assert len(ruptures) < TWO16, len(ruptures) # < 2^16 ruptures per SES return ruptures, rupture_occ
[ "def", "generate_event_set", "(", "ucerf", ",", "background_sids", ",", "src_filter", ",", "ses_idx", ",", "seed", ")", ":", "serial", "=", "seed", "+", "ses_idx", "*", "TWO16", "# get rates from file", "with", "h5py", ".", "File", "(", "ucerf", ".", "source...
41.083333
17.75
def set_widgets(self): """Set widgets on the extra keywords tab.""" self.clear() self.description_label.setText( 'In this step you can set some extra keywords for the layer. This ' 'keywords can be used for creating richer reporting or map.') subcategory = self.parent.step_kw_subcategory.selected_subcategory() extra_keywords = subcategory.get('extra_keywords') for extra_keyword in extra_keywords: check_box, input_widget = extra_keywords_to_widgets(extra_keyword) self.widgets_dict[extra_keyword['key']] = [ check_box, input_widget, extra_keyword ] # Add to layout index = 0 for key, widgets in list(self.widgets_dict.items()): self.extra_keywords_layout.addWidget(widgets[0], index, 0) self.extra_keywords_layout.addWidget(widgets[1], index, 1) index += 1 self.set_existing_extra_keywords()
[ "def", "set_widgets", "(", "self", ")", ":", "self", ".", "clear", "(", ")", "self", ".", "description_label", ".", "setText", "(", "'In this step you can set some extra keywords for the layer. This '", "'keywords can be used for creating richer reporting or map.'", ")", "sub...
41.541667
21.041667
def undeploy(self, id_networkv4): """Remove deployment of network in equipments and set column 'active = 0' in tables redeipv4 ] :param id_networkv4: ID for NetworkIPv4 :return: Equipments configuration output """ uri = 'api/networkv4/%s/equipments/' % id_networkv4 return super(ApiNetworkIPv4, self).delete(uri)
[ "def", "undeploy", "(", "self", ",", "id_networkv4", ")", ":", "uri", "=", "'api/networkv4/%s/equipments/'", "%", "id_networkv4", "return", "super", "(", "ApiNetworkIPv4", ",", "self", ")", ".", "delete", "(", "uri", ")" ]
32.181818
19.545455
def _handle_raw_packet(self, raw_packet): """Parse incoming packet.""" if raw_packet[1:2] == b'\x1f': self._reset_timeout() year = raw_packet[2] month = raw_packet[3] day = raw_packet[4] hour = raw_packet[5] minute = raw_packet[6] sec = raw_packet[7] week = raw_packet[8] self.logger.debug( 'received date: Year: %s, Month: %s, Day: %s, Hour: %s, ' 'Minute: %s, Sec: %s, Week %s', year, month, day, hour, minute, sec, week) elif raw_packet[1:2] == b'\x0c': states = {} changes = [] for switch in range(0, 16): if raw_packet[2+switch:3+switch] == b'\x01': states[format(switch, 'x')] = True if (self.client.states.get(format(switch, 'x'), None) is not True): changes.append(format(switch, 'x')) self.client.states[format(switch, 'x')] = True elif raw_packet[2+switch:3+switch] == b'\x02': states[format(switch, 'x')] = False if (self.client.states.get(format(switch, 'x'), None) is not False): changes.append(format(switch, 'x')) self.client.states[format(switch, 'x')] = False for switch in changes: for status_cb in self.client.status_callbacks.get(switch, []): status_cb(states[switch]) self.logger.debug(states) if self.client.in_transaction: self.client.in_transaction = False self.client.active_packet = False self.client.active_transaction.set_result(states) while self.client.status_waiters: waiter = self.client.status_waiters.popleft() waiter.set_result(states) if self.client.waiters: self.send_packet() else: self._cmd_timeout.cancel() elif self._cmd_timeout: self._cmd_timeout.cancel() else: self.logger.warning('received unknown packet: %s', binascii.hexlify(raw_packet))
[ "def", "_handle_raw_packet", "(", "self", ",", "raw_packet", ")", ":", "if", "raw_packet", "[", "1", ":", "2", "]", "==", "b'\\x1f'", ":", "self", ".", "_reset_timeout", "(", ")", "year", "=", "raw_packet", "[", "2", "]", "month", "=", "raw_packet", "[...
45.921569
12.705882
def _run_query(self, query): """ Run one query against BigQuery and return the result. :param query: the query to run :type query: str :return: list of per-row response dicts (key => value) :rtype: ``list`` """ query_request = self.service.jobs() logger.debug('Running query: %s', query) start = datetime.now() resp = query_request.query( projectId=self.project_id, body={'query': query} ).execute() duration = datetime.now() - start logger.debug('Query response (in %s): %s', duration, resp) if not resp['jobComplete']: logger.error('Error: query reported job not complete!') if int(resp['totalRows']) == 0: return [] if int(resp['totalRows']) != len(resp['rows']): logger.error('Error: query reported %s total rows, but only ' 'returned %d', resp['totalRows'], len(resp['rows'])) data = [] fields = [f['name'] for f in resp['schema']['fields']] for row in resp['rows']: d = {} for idx, val in enumerate(row['f']): d[fields[idx]] = val['v'] data.append(d) return data
[ "def", "_run_query", "(", "self", ",", "query", ")", ":", "query_request", "=", "self", ".", "service", ".", "jobs", "(", ")", "logger", ".", "debug", "(", "'Running query: %s'", ",", "query", ")", "start", "=", "datetime", ".", "now", "(", ")", "resp"...
38.46875
14.96875
def new(n, prefix=None): """lib2to3's AST requires unique objects as children.""" if isinstance(n, Leaf): return Leaf(n.type, n.value, prefix=n.prefix if prefix is None else prefix) # this is hacky, we assume complex nodes are just being reused once from the # original AST. n.parent = None if prefix is not None: n.prefix = prefix return n
[ "def", "new", "(", "n", ",", "prefix", "=", "None", ")", ":", "if", "isinstance", "(", "n", ",", "Leaf", ")", ":", "return", "Leaf", "(", "n", ".", "type", ",", "n", ".", "value", ",", "prefix", "=", "n", ".", "prefix", "if", "prefix", "is", ...
31.25
24.25
def write_compounds(self, stream, compounds, properties=None): """Write iterable of compounds as YAML object to stream. Args: stream: File-like object. compounds: Iterable of compound entries. properties: Set of compound properties to output (or None to output all). """ self._write_entries( stream, compounds, self.convert_compound_entry, properties)
[ "def", "write_compounds", "(", "self", ",", "stream", ",", "compounds", ",", "properties", "=", "None", ")", ":", "self", ".", "_write_entries", "(", "stream", ",", "compounds", ",", "self", ".", "convert_compound_entry", ",", "properties", ")" ]
39.818182
18.636364
def check_recommended_attributes(self, dataset): ''' Feature type specific check of global recommended attributes. :param netCDF4.Dataset dataset: An open netCDF dataset ''' results = [] recommended_ctx = TestCtx(BaseCheck.MEDIUM, 'Recommended global attributes') # Check time_coverage_duration and resolution for attr in ['time_coverage_duration', 'time_coverage_resolution']: attr_value = getattr(dataset, attr, '') try: parse_duration(attr_value) recommended_ctx.assert_true(True, '') # Score it True! except Exception: recommended_ctx.assert_true(False, '{} should exist and be ISO-8601 format (example: PT1M30S), currently: {}'.format(attr, attr_value)) results.append(recommended_ctx.to_result()) return results
[ "def", "check_recommended_attributes", "(", "self", ",", "dataset", ")", ":", "results", "=", "[", "]", "recommended_ctx", "=", "TestCtx", "(", "BaseCheck", ".", "MEDIUM", ",", "'Recommended global attributes'", ")", "# Check time_coverage_duration and resolution", "for...
48.111111
27.111111
def stop(self): """Stops the adb logcat service.""" if not self._adb_logcat_process: return try: utils.stop_standing_subprocess(self._adb_logcat_process) except: self._ad.log.exception('Failed to stop adb logcat.') self._adb_logcat_process = None
[ "def", "stop", "(", "self", ")", ":", "if", "not", "self", ".", "_adb_logcat_process", ":", "return", "try", ":", "utils", ".", "stop_standing_subprocess", "(", "self", ".", "_adb_logcat_process", ")", "except", ":", "self", ".", "_ad", ".", "log", ".", ...
34.888889
17
def predict(self, x): """ Predict values for a single data point or an RDD of points using the model trained. """ if isinstance(x, RDD): return x.map(lambda v: self.predict(v)) x = _convert_to_vector(x) if self.numClasses == 2: margin = self.weights.dot(x) + self._intercept if margin > 0: prob = 1 / (1 + exp(-margin)) else: exp_margin = exp(margin) prob = exp_margin / (1 + exp_margin) if self._threshold is None: return prob else: return 1 if prob > self._threshold else 0 else: best_class = 0 max_margin = 0.0 if x.size + 1 == self._dataWithBiasSize: for i in range(0, self._numClasses - 1): margin = x.dot(self._weightsMatrix[i][0:x.size]) + \ self._weightsMatrix[i][x.size] if margin > max_margin: max_margin = margin best_class = i + 1 else: for i in range(0, self._numClasses - 1): margin = x.dot(self._weightsMatrix[i]) if margin > max_margin: max_margin = margin best_class = i + 1 return best_class
[ "def", "predict", "(", "self", ",", "x", ")", ":", "if", "isinstance", "(", "x", ",", "RDD", ")", ":", "return", "x", ".", "map", "(", "lambda", "v", ":", "self", ".", "predict", "(", "v", ")", ")", "x", "=", "_convert_to_vector", "(", "x", ")"...
37.351351
12.594595
def _check_endings(self): """Check begin/end of slug, raises Error if malformed.""" if self.slug.startswith("/") and self.slug.endswith("/"): raise InvalidSlugError( _("Invalid slug. Did you mean {}, without the leading and trailing slashes?".format(self.slug.strip("/")))) elif self.slug.startswith("/"): raise InvalidSlugError( _("Invalid slug. Did you mean {}, without the leading slash?".format(self.slug.strip("/")))) elif self.slug.endswith("/"): raise InvalidSlugError( _("Invalid slug. Did you mean {}, without the trailing slash?".format(self.slug.strip("/"))))
[ "def", "_check_endings", "(", "self", ")", ":", "if", "self", ".", "slug", ".", "startswith", "(", "\"/\"", ")", "and", "self", ".", "slug", ".", "endswith", "(", "\"/\"", ")", ":", "raise", "InvalidSlugError", "(", "_", "(", "\"Invalid slug. Did you mean ...
61.454545
25.363636
def solve(self, verbose=False, allow_brute_force=True): """Solve the Sudoku. :param verbose: If the steps used for solving the Sudoku should be printed. Default is `False` :type verbose: bool :param allow_brute_force: If Dancing Links Brute Force method should be used if necessary. Default is `True` :type allow_brute_force: bool """ while not self.is_solved: # Update possibles arrays. self._update() # See if any position can be singled out. singles_found = False or self._fill_naked_singles() or self._fill_hidden_singles() # If singles_found is False, then no new uniquely defined cells were found # and this solver cannot solve the Sudoku. We either use brute force or throw an error. # Else, if singles_found is True, run another iteration to see if new singles have shown up. if not singles_found: if allow_brute_force: solution = None try: dlxs = DancingLinksSolver(copy.deepcopy(self._matrix)) solutions = dlxs.solve() solution = next(solutions) more_solutions = next(solutions) except StopIteration as e: if solution is not None: self._matrix = solution else: raise SudokuHasNoSolutionError("Dancing Links solver could not find any solution.") except Exception as e: raise SudokuHasNoSolutionError("Brute Force method failed.") else: # We end up here if the second `next(solutions)` works, # i.e. if multiple solutions exist. raise SudokuHasMultipleSolutionsError("This Sudoku has multiple solutions!") self.solution_steps.append("BRUTE FORCE - Dancing Links") break else: print(self) raise SudokuTooDifficultError("This Sudoku requires more advanced methods!") if verbose: print("Sudoku solved in {0} iterations!\n{1}".format(len(self.solution_steps), self)) for step in self.solution_steps: print(step)
[ "def", "solve", "(", "self", ",", "verbose", "=", "False", ",", "allow_brute_force", "=", "True", ")", ":", "while", "not", "self", ".", "is_solved", ":", "# Update possibles arrays.", "self", ".", "_update", "(", ")", "# See if any position can be singled out.", ...
49.734694
24.163265
def Extract_Checkpoints(self): ''' Extract the checkpoints and store in self.tracking_data ''' # Make sure page is available if self.page is None: raise Exception("The HTML data was not fetched due to some reasons") soup = BeautifulSoup(self.page,'html.parser') invalid_tracking_no = soup.find('span',{'id':'ctl00_ContentPlaceHolder1_lblsMsg','class':'ErrorMessage','style':'font-family:Calibri;font-size:9pt;font-weight:bold;','name':'lblsMsg'}) if invalid_tracking_no is not None: raise ValueError('The Tracking number is invalid') # Assign the current status of the shipment if 'Delivered' in self.page: self.status = 'C' else: # The shipment is in Transit self.status = 'T' # Checkpoints extraction begins here rows = soup.findAll('tr',{'class':'gridItem'}) + soup.findAll('tr',{'class':'gridAltItem'}) for row in rows: ''' Each row will have 4 columns: Date--Time--Status--Location Merge column one and two and format it. Append to tracking_data list ''' row_cells = row.findAll('td') date = row_cells[0].string.strip() time = row_cells[1].string.strip() date_time = ' '.join([date,time]) date_time_format = "%d %b %Y %H:%M" date_time = datetime.strptime(date_time,date_time_format) status = row_cells[2].string.strip() location = row_cells[3].string.strip() self.tracking_data.append({'status':status,'date':date_time,'location':location}) # Sort the checkpoints based on Date and Time --- this is important self.tracking_data = sorted(self.tracking_data, key=lambda k: k['date'])
[ "def", "Extract_Checkpoints", "(", "self", ")", ":", "# Make sure page is available", "if", "self", ".", "page", "is", "None", ":", "raise", "Exception", "(", "\"The HTML data was not fetched due to some reasons\"", ")", "soup", "=", "BeautifulSoup", "(", "self", ".",...
33.319149
25.148936
def makedirs(name): """helper function for python 2 and 3 to call os.makedirs() avoiding an error if the directory to be created already exists""" import os, errno try: os.makedirs(name) except OSError as ex: if ex.errno == errno.EEXIST and os.path.isdir(name): # ignore existing directory pass else: # a different error happened raise
[ "def", "makedirs", "(", "name", ")", ":", "import", "os", ",", "errno", "try", ":", "os", ".", "makedirs", "(", "name", ")", "except", "OSError", "as", "ex", ":", "if", "ex", ".", "errno", "==", "errno", ".", "EEXIST", "and", "os", ".", "path", "...
27.866667
18.533333
async def build_pool_config_request(submitter_did: str, writes: bool, force: bool) -> str: """ Builds a POOL_CONFIG request. Request to change Pool's configuration. :param submitter_did: DID of the submitter stored in secured Wallet. :param writes: Whether any write requests can be processed by the pool (if false, then pool goes to read-only state). True by default. :param force: Whether we should apply transaction (for example, move pool to read-only state) without waiting for consensus of this transaction :return: Request result as json. """ logger = logging.getLogger(__name__) logger.debug("build_pool_config_request: >>> submitter_did: %r, writes: %r, force: %r", submitter_did, writes, force) if not hasattr(build_pool_config_request, "cb"): logger.debug("build_pool_config_request: Creating callback") build_pool_config_request.cb = create_cb(CFUNCTYPE(None, c_int32, c_int32, c_char_p)) c_submitter_did = c_char_p(submitter_did.encode('utf-8')) c_writes = c_bool(writes) c_force = c_bool(force) request_json = await do_call('indy_build_pool_config_request', c_submitter_did, c_writes, c_force, build_pool_config_request.cb) res = request_json.decode() logger.debug("build_pool_config_request: <<< res: %r", res) return res
[ "async", "def", "build_pool_config_request", "(", "submitter_did", ":", "str", ",", "writes", ":", "bool", ",", "force", ":", "bool", ")", "->", "str", ":", "logger", "=", "logging", ".", "getLogger", "(", "__name__", ")", "logger", ".", "debug", "(", "\...
42.756757
23.189189
def delete_currency_by_id(cls, currency_id, **kwargs): """Delete Currency Delete an instance of Currency by its ID. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.delete_currency_by_id(currency_id, async=True) >>> result = thread.get() :param async bool :param str currency_id: ID of currency to delete. (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._delete_currency_by_id_with_http_info(currency_id, **kwargs) else: (data) = cls._delete_currency_by_id_with_http_info(currency_id, **kwargs) return data
[ "def", "delete_currency_by_id", "(", "cls", ",", "currency_id", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async'", ")", ":", "return", "cls", ".", "_delete_currency_by_i...
41.52381
20.142857
def on_status(self, status): """Print out some tweets""" self.out.write(json.dumps(status)) self.out.write(os.linesep) self.received += 1 return not self.terminate
[ "def", "on_status", "(", "self", ",", "status", ")", ":", "self", ".", "out", ".", "write", "(", "json", ".", "dumps", "(", "status", ")", ")", "self", ".", "out", ".", "write", "(", "os", ".", "linesep", ")", "self", ".", "received", "+=", "1", ...
28.285714
11.571429
def Cube(center=(0., 0., 0.), x_length=1.0, y_length=1.0, z_length=1.0, bounds=None): """Create a cube by either specifying the center and side lengths or just the bounds of the cube. If ``bounds`` are given, all other arguments are ignored. Parameters ---------- center : np.ndarray or list Center in [x, y, z]. x_length : float length of the cube in the x-direction. y_length : float length of the cube in the y-direction. z_length : float length of the cube in the z-direction. bounds : np.ndarray or list Specify the bounding box of the cube. If given, all other arguments are ignored. ``(xMin,xMax, yMin,yMax, zMin,zMax)`` """ src = vtk.vtkCubeSource() if bounds is not None: if np.array(bounds).size != 6: raise TypeError('Bounds must be given as length 6 tuple: (xMin,xMax, yMin,yMax, zMin,zMax)') src.SetBounds(bounds) else: src.SetCenter(center) src.SetXLength(x_length) src.SetYLength(y_length) src.SetZLength(z_length) src.Update() return vtki.wrap(src.GetOutput())
[ "def", "Cube", "(", "center", "=", "(", "0.", ",", "0.", ",", "0.", ")", ",", "x_length", "=", "1.0", ",", "y_length", "=", "1.0", ",", "z_length", "=", "1.0", ",", "bounds", "=", "None", ")", ":", "src", "=", "vtk", ".", "vtkCubeSource", "(", ...
31.971429
20.485714
def verify_roster_push(self, fix = False): """Check if `self` is valid roster push item. Valid item must have proper `subscription` value other and valid value for 'ask'. :Parameters: - `fix`: if `True` than replace invalid 'subscription' and 'ask' values with the defaults :Types: - `fix`: `bool` :Raise: `ValueError` if the item is invalid. """ self._verify((None, u"from", u"to", u"both", u"remove"), fix)
[ "def", "verify_roster_push", "(", "self", ",", "fix", "=", "False", ")", ":", "self", ".", "_verify", "(", "(", "None", ",", "u\"from\"", ",", "u\"to\"", ",", "u\"both\"", ",", "u\"remove\"", ")", ",", "fix", ")" ]
33.266667
21.266667
def get_apis(self): """Returns set of api names referenced in this Registry :return: set of api name strings """ out = set(x.api for x in self.types.values() if x.api) for ft in self.features.values(): out.update(ft.get_apis()) for ext in self.extensions.values(): out.update(ext.get_apis()) return out
[ "def", "get_apis", "(", "self", ")", ":", "out", "=", "set", "(", "x", ".", "api", "for", "x", "in", "self", ".", "types", ".", "values", "(", ")", "if", "x", ".", "api", ")", "for", "ft", "in", "self", ".", "features", ".", "values", "(", ")...
33.909091
10.454545
def zip_a_folder(src, dst): """Add a folder and everything inside to zip archive. Example:: |---paper |--- algorithm.pdf |--- images |--- 1.jpg zip_a_folder("paper", "paper.zip") paper.zip |---paper |--- algorithm.pdf |--- images |--- 1.jpg **中文文档** 将整个文件夹添加到压缩包, 包括根目录本身。 """ src, dst = os.path.abspath(src), os.path.abspath(dst) cwd = os.getcwd() todo = list() dirname, basename = os.path.split(src) os.chdir(dirname) for dirname, _, fnamelist in os.walk(basename): for fname in fnamelist: newname = os.path.join(dirname, fname) todo.append(newname) with ZipFile(dst, "w") as f: for newname in todo: f.write(newname) os.chdir(cwd)
[ "def", "zip_a_folder", "(", "src", ",", "dst", ")", ":", "src", ",", "dst", "=", "os", ".", "path", ".", "abspath", "(", "src", ")", ",", "os", ".", "path", ".", "abspath", "(", "dst", ")", "cwd", "=", "os", ".", "getcwd", "(", ")", "todo", "...
22
19.789474
def get_ontology(self, id=None, uri=None, match=None): """ get the saved-ontology with given ID or via other methods... """ if not id and not uri and not match: return None if type(id) == type("string"): uri = id id = None if not is_http(uri): match = uri uri = None if match: if type(match) != type("string"): return [] res = [] for x in self.all_ontologies: if match.lower() in x.uri.lower(): res += [x] return res else: for x in self.all_ontologies: if id and x.id == id: return x if uri and x.uri.lower() == uri.lower(): return x return None
[ "def", "get_ontology", "(", "self", ",", "id", "=", "None", ",", "uri", "=", "None", ",", "match", "=", "None", ")", ":", "if", "not", "id", "and", "not", "uri", "and", "not", "match", ":", "return", "None", "if", "type", "(", "id", ")", "==", ...
29.344828
14.103448
def unlike(self, photo_id): """ Remove a user’s like of a photo. Note: This action is idempotent; sending the DELETE request to a single photo multiple times has no additional effect. :param photo_id [string]: The photo’s ID. Required. :return: [Photo]: The Unsplash Photo. """ url = "/photos/%s/like" % photo_id result = self._delete(url) return PhotoModel.parse(result)
[ "def", "unlike", "(", "self", ",", "photo_id", ")", ":", "url", "=", "\"/photos/%s/like\"", "%", "photo_id", "result", "=", "self", ".", "_delete", "(", "url", ")", "return", "PhotoModel", ".", "parse", "(", "result", ")" ]
33.923077
13.769231
def access_token_handler(self, **args): """Get access token based on cookie sent with this request. This handler deals with two cases: 1) Non-browser client (indicated by no messageId set in request) where the response is a simple JSON response. 2) Browser client (indicate by messageId setin request) where the request must be made from a an iFrame and the response is sent as JSON wrapped in HTML containing a postMessage() script that conveys the access token to the viewer. """ message_id = request.args.get('messageId', default=None) origin = request.args.get('origin', default='unknown_origin') self.logger.info("access_token_handler: origin = " + origin) account = request.cookies.get(self.account_cookie_name, default='') token = self.access_token(account) # Build JSON response data_str = json.dumps(self.access_token_response(token, message_id)) ct = "application/json" # If message_id is set the wrap in HTML with postMessage JavaScript # for a browser client if (message_id is not None): data_str = """<html> <body style="margin: 0px;"> <div>postMessage ACCESS TOKEN %s</div> <script> window.parent.postMessage(%s, '%s'); </script> </body> </html> """ % (token, data_str, origin) ct = "text/html" # Send response along with cookie response = make_response(data_str, 200, {'Content-Type': ct}) if (token): self.logger.info( "access_token_handler: setting access token = " + token) # Set the cookie for the image content cookie = self.access_cookie(token) self.logger.info( "access_token_handler: setting access cookie = " + cookie) response.set_cookie(self.access_cookie_name, cookie) else: self.logger.info( "access_token_handler: auth failed, sending error") response.headers['Access-control-allow-origin'] = '*' return response
[ "def", "access_token_handler", "(", "self", ",", "*", "*", "args", ")", ":", "message_id", "=", "request", ".", "args", ".", "get", "(", "'messageId'", ",", "default", "=", "None", ")", "origin", "=", "request", ".", "args", ".", "get", "(", "'origin'"...
39.384615
20.25
def key_by(self, key_selector): """Applies a key_by operator to the stream. Attributes: key_attribute_index (int): The index of the key attributed (assuming tuple records). """ op = Operator( _generate_uuid(), OpType.KeyBy, "KeyBy", other=key_selector, num_instances=self.env.config.parallelism) return self.__register(op)
[ "def", "key_by", "(", "self", ",", "key_selector", ")", ":", "op", "=", "Operator", "(", "_generate_uuid", "(", ")", ",", "OpType", ".", "KeyBy", ",", "\"KeyBy\"", ",", "other", "=", "key_selector", ",", "num_instances", "=", "self", ".", "env", ".", "...
31.142857
14
def _get_agent_grounding(agent): """Convert an agent to the corresponding PyBEL DSL object (to be filled with variants later).""" def _get_id(_agent, key): _id = _agent.db_refs.get(key) if isinstance(_id, list): _id = _id[0] return _id hgnc_id = _get_id(agent, 'HGNC') if hgnc_id: hgnc_name = hgnc_client.get_hgnc_name(hgnc_id) if not hgnc_name: logger.warning('Agent %s with HGNC ID %s has no HGNC name.', agent, hgnc_id) return return protein('HGNC', hgnc_name) uniprot_id = _get_id(agent, 'UP') if uniprot_id: return protein('UP', uniprot_id) fplx_id = _get_id(agent, 'FPLX') if fplx_id: return protein('FPLX', fplx_id) pfam_id = _get_id(agent, 'PF') if pfam_id: return protein('PFAM', pfam_id) ip_id = _get_id(agent, 'IP') if ip_id: return protein('IP', ip_id) fa_id = _get_id(agent, 'FA') if fa_id: return protein('NXPFA', fa_id) chebi_id = _get_id(agent, 'CHEBI') if chebi_id: if chebi_id.startswith('CHEBI:'): chebi_id = chebi_id[len('CHEBI:'):] return abundance('CHEBI', chebi_id) pubchem_id = _get_id(agent, 'PUBCHEM') if pubchem_id: return abundance('PUBCHEM', pubchem_id) go_id = _get_id(agent, 'GO') if go_id: return bioprocess('GO', go_id) mesh_id = _get_id(agent, 'MESH') if mesh_id: return bioprocess('MESH', mesh_id) return
[ "def", "_get_agent_grounding", "(", "agent", ")", ":", "def", "_get_id", "(", "_agent", ",", "key", ")", ":", "_id", "=", "_agent", ".", "db_refs", ".", "get", "(", "key", ")", "if", "isinstance", "(", "_id", ",", "list", ")", ":", "_id", "=", "_id...
26.607143
17
def gen500(request, baseURI, project=None): """Return a 500 error""" return HttpResponseServerError( render_to_response('plugIt/500.html', { 'context': { 'ebuio_baseUrl': baseURI, 'ebuio_userMode': request.session.get('plugit-standalone-usermode', 'ano'), }, 'project': project }, context_instance=RequestContext(request)))
[ "def", "gen500", "(", "request", ",", "baseURI", ",", "project", "=", "None", ")", ":", "return", "HttpResponseServerError", "(", "render_to_response", "(", "'plugIt/500.html'", ",", "{", "'context'", ":", "{", "'ebuio_baseUrl'", ":", "baseURI", ",", "'ebuio_use...
40.6
13.2
def _create_row_labels(self): """ Take the original labels for rows. Rename if alternative labels are provided. Append label suffix if label_suffix is True. Returns ---------- labels : dictionary Dictionary, keys are original column name, values are final label. """ # start with the original column names labels = {} for c in self._columns: labels[c] = c # replace column names with alternative names if provided if self._alt_labels: for k in self._alt_labels.keys(): labels[k] = self._alt_labels[k] # append the label suffix if self._label_suffix: for k in labels.keys(): if k in self._nonnormal: labels[k] = "{}, {}".format(labels[k],"median [Q1,Q3]") elif k in self._categorical: labels[k] = "{}, {}".format(labels[k],"n (%)") else: labels[k] = "{}, {}".format(labels[k],"mean (SD)") return labels
[ "def", "_create_row_labels", "(", "self", ")", ":", "# start with the original column names", "labels", "=", "{", "}", "for", "c", "in", "self", ".", "_columns", ":", "labels", "[", "c", "]", "=", "c", "# replace column names with alternative names if provided", "if...
34.75
18.0625
def _build_command(self): """ Command to start the Dynamips hypervisor process. (to be passed to subprocess.Popen()) """ command = [self._path] command.extend(["-N1"]) # use instance IDs for filenames command.extend(["-l", "dynamips_i{}_log.txt".format(self._id)]) # log file # Dynamips cannot listen for hypervisor commands and for console connections on # 2 different IP addresses. # See https://github.com/GNS3/dynamips/issues/62 if self._console_host != "0.0.0.0" and self._console_host != "::": command.extend(["-H", "{}:{}".format(self._host, self._port)]) else: command.extend(["-H", str(self._port)]) return command
[ "def", "_build_command", "(", "self", ")", ":", "command", "=", "[", "self", ".", "_path", "]", "command", ".", "extend", "(", "[", "\"-N1\"", "]", ")", "# use instance IDs for filenames", "command", ".", "extend", "(", "[", "\"-l\"", ",", "\"dynamips_i{}_lo...
43.411765
20.352941
def get_query_param(self, key, default=None): """Get query parameter uniformly for GET and POST requests.""" value = self.request.query_params.get(key, None) if value is None: value = self.request.data.get(key, None) if value is None: value = default return value
[ "def", "get_query_param", "(", "self", ",", "key", ",", "default", "=", "None", ")", ":", "value", "=", "self", ".", "request", ".", "query_params", ".", "get", "(", "key", ",", "None", ")", "if", "value", "is", "None", ":", "value", "=", "self", "...
40
12
def _validate(value, optdict, name=""): """return a validated value for an option according to its type optional argument name is only used for error message formatting """ try: _type = optdict["type"] except KeyError: # FIXME return value return _call_validator(_type, optdict, name, value)
[ "def", "_validate", "(", "value", ",", "optdict", ",", "name", "=", "\"\"", ")", ":", "try", ":", "_type", "=", "optdict", "[", "\"type\"", "]", "except", "KeyError", ":", "# FIXME", "return", "value", "return", "_call_validator", "(", "_type", ",", "opt...
30
17.272727
def delete_all(self, filter=None, timeout=-1): """ Delete an SNMPv3 User based on User name specified in filter. The user will be deleted only if it has no associated destinations. Args: username: ID or URI of SNMPv3 user. filter: A general filter/query string to narrow the list of items returned. The default is no filter - all resources are returned. Returns: bool: Indicates if the resource was successfully deleted. """ return self._client.delete_all(filter=filter, timeout=timeout)
[ "def", "delete_all", "(", "self", ",", "filter", "=", "None", ",", "timeout", "=", "-", "1", ")", ":", "return", "self", ".", "_client", ".", "delete_all", "(", "filter", "=", "filter", ",", "timeout", "=", "timeout", ")" ]
44.692308
29.307692
def parse_args(args, kwargs): """Returns a kwargs dictionary by turning args into kwargs""" if 'style' in kwargs: args += (kwargs['style'],) del kwargs['style'] for arg in args: if not isinstance(arg, (bytes, unicode)): raise ValueError("args must be strings:" + repr(args)) if arg.lower() in FG_COLORS: if 'fg' in kwargs: raise ValueError("fg specified twice") kwargs['fg'] = FG_COLORS[arg] elif arg.lower().startswith('on_') and arg[3:].lower() in BG_COLORS: if 'bg' in kwargs: raise ValueError("fg specified twice") kwargs['bg'] = BG_COLORS[arg[3:]] elif arg.lower() in STYLES: kwargs[arg] = True else: raise ValueError("couldn't process arg: "+repr(arg)) for k in kwargs: if k not in ['fg', 'bg'] + list(STYLES.keys()): raise ValueError("Can't apply that transformation") if 'fg' in kwargs: if kwargs['fg'] in FG_COLORS: kwargs['fg'] = FG_COLORS[kwargs['fg']] if kwargs['fg'] not in list(FG_COLORS.values()): raise ValueError("Bad fg value: %r" % kwargs['fg']) if 'bg' in kwargs: if kwargs['bg'] in BG_COLORS: kwargs['bg'] = BG_COLORS[kwargs['bg']] if kwargs['bg'] not in list(BG_COLORS.values()): raise ValueError("Bad bg value: %r" % kwargs['bg']) return kwargs
[ "def", "parse_args", "(", "args", ",", "kwargs", ")", ":", "if", "'style'", "in", "kwargs", ":", "args", "+=", "(", "kwargs", "[", "'style'", "]", ",", ")", "del", "kwargs", "[", "'style'", "]", "for", "arg", "in", "args", ":", "if", "not", "isinst...
43.875
15.34375
def extract_bits(self, val): """Extras the 4 bits, XORS the message data, and does table lookups.""" # Step one, extract the Most significant 4 bits of the CRC register thisval = self.high >> 4 # XOR in the Message Data into the extracted bits thisval = thisval ^ val # Shift the CRC Register left 4 bits self.high = (self.high << 4) | (self.low >> 4) self.high = self.high & constants.BYTEMASK # force char self.low = self.low << 4 self.low = self.low & constants.BYTEMASK # force char # Do the table lookups and XOR the result into the CRC tables self.high = self.high ^ self.LookupHigh[thisval] self.high = self.high & constants.BYTEMASK # force char self.low = self.low ^ self.LookupLow[thisval] self.low = self.low & constants.BYTEMASK
[ "def", "extract_bits", "(", "self", ",", "val", ")", ":", "# Step one, extract the Most significant 4 bits of the CRC register\r", "thisval", "=", "self", ".", "high", ">>", "4", "# XOR in the Message Data into the extracted bits\r", "thisval", "=", "thisval", "^", "val", ...
54.4375
16.0625
def _initial_broks(self, broker_name): """Get initial_broks from the scheduler This is used by the brokers to prepare the initial status broks This do not send broks, it only makes scheduler internal processing. Then the broker must use the *_broks* API to get all the stuff :param broker_name: broker name, used to filter broks :type broker_name: str :return: None """ with self.app.conf_lock: logger.info("A new broker just connected : %s", broker_name) return self.app.sched.fill_initial_broks(broker_name)
[ "def", "_initial_broks", "(", "self", ",", "broker_name", ")", ":", "with", "self", ".", "app", ".", "conf_lock", ":", "logger", ".", "info", "(", "\"A new broker just connected : %s\"", ",", "broker_name", ")", "return", "self", ".", "app", ".", "sched", "....
39.6
22.266667
def calc_ethsw_port(self, port_num, port_def): """ Split and create the port entry for an Ethernet Switch :param port_num: port number :type port_num: str or int :param str port_def: port definition """ # Port String - access 1 SW2 1 # 0: type 1: vlan 2: destination device 3: destination port port_def = port_def.split(' ') if len(port_def) == 4: destination = {'device': port_def[2], 'port': port_def[3]} else: destination = {'device': 'NIO', 'port': port_def[2]} # port entry port = {'id': self.port_id, 'name': str(port_num), 'port_number': int(port_num), 'type': port_def[0], 'vlan': int(port_def[1])} self.node['ports'].append(port) self.calc_link(self.node['id'], self.port_id, port['name'], destination) self.port_id += 1
[ "def", "calc_ethsw_port", "(", "self", ",", "port_num", ",", "port_def", ")", ":", "# Port String - access 1 SW2 1", "# 0: type 1: vlan 2: destination device 3: destination port", "port_def", "=", "port_def", ".", "split", "(", "' '", ")", "if", "len", "(", "port_def", ...
36.925926
9.666667
def handle_api_exceptions(self, method, *url_parts, **kwargs): """Call REST API and handle exceptions Params: method: 'HEAD', 'GET', 'POST', 'PATCH' or 'DELETE' url_parts: like in rest_api_url() method api_ver: like in rest_api_url() method kwargs: other parameters passed to requests.request, but the only notable parameter is: (... json=data) that works like (... headers = {'Content-Type': 'application/json'}, data=json.dumps(data)) """ # The outer part - about error handler assert method in ('HEAD', 'GET', 'POST', 'PATCH', 'DELETE') cursor_context = kwargs.pop('cursor_context', None) errorhandler = cursor_context.errorhandler if cursor_context else self.errorhandler catched_exceptions = (SalesforceError, requests.exceptions.RequestException) if errorhandler else () try: return self.handle_api_exceptions_inter(method, *url_parts, **kwargs) except catched_exceptions: # nothing is catched usually and error handler not used exc_class, exc_value, _ = sys.exc_info() errorhandler(self, cursor_context, exc_class, exc_value) raise
[ "def", "handle_api_exceptions", "(", "self", ",", "method", ",", "*", "url_parts", ",", "*", "*", "kwargs", ")", ":", "# The outer part - about error handler", "assert", "method", "in", "(", "'HEAD'", ",", "'GET'", ",", "'POST'", ",", "'PATCH'", ",", "'DELETE'...
49.153846
22.807692
def find_all(self, cls): """Required functionality.""" final_results = [] table = self.get_class_table(cls) for db_result in table.scan(): obj = cls.from_data(db_result['value']) final_results.append(obj) return final_results
[ "def", "find_all", "(", "self", ",", "cls", ")", ":", "final_results", "=", "[", "]", "table", "=", "self", ".", "get_class_table", "(", "cls", ")", "for", "db_result", "in", "table", ".", "scan", "(", ")", ":", "obj", "=", "cls", ".", "from_data", ...
31.333333
11
def cluster_application_statistics(self, state_list=None, application_type_list=None): """ With the Application Statistics API, you can obtain a collection of triples, each of which contains the application type, the application state and the number of applications of this type and this state in ResourceManager context. This method work in Hadoop > 2.0.0 :param list state_list: states of the applications, specified as a comma-separated list. If states is not provided, the API will enumerate all application states and return the counts of them. :param list application_type_list: types of the applications, specified as a comma-separated list. If application_types is not provided, the API will count the applications of any application type. In this case, the response shows * to indicate any application type. Note that we only support at most one applicationType temporarily. Otherwise, users will expect an BadRequestException. :returns: API response object with JSON data :rtype: :py:class:`yarn_api_client.base.Response` """ path = '/ws/v1/cluster/appstatistics' # TODO: validate state argument states = ','.join(state_list) if state_list is not None else None if application_type_list is not None: application_types = ','.join(application_type_list) else: application_types = None loc_args = ( ('states', states), ('applicationTypes', application_types)) params = self.construct_parameters(loc_args) return self.request(path, **params)
[ "def", "cluster_application_statistics", "(", "self", ",", "state_list", "=", "None", ",", "application_type_list", "=", "None", ")", ":", "path", "=", "'/ws/v1/cluster/appstatistics'", "# TODO: validate state argument", "states", "=", "','", ".", "join", "(", "state_...
46.052632
22
def get(self, name, default="", parent_search=False, multikeys_search=False, __settings_temp=None, __rank_recursion=0): """ Récupération d'une configuration le paramètre ```name``` peut être soit un nom ou un chemin vers la valeur (séparateur /) ```parent_search``` est le boolean qui indique si on doit chercher la valeur dans la hiérarchie plus haute. Si la chaîne "/document/host/val" retourne None, on recherche dans "/document/val" puis dans "/val" ```multikeys_search``` indique si la recherche d'une clef non trouvabe se fait sur les parents en multi clef ie: /graphic/output/logo/enable va aussi chercher dans /graphic/logo/enable ```__settings_temp``` est le dictionnaire temporaire de transmission récursif (intégrant les sous configurations) ```__rank_recursion``` défini le rang de récusion pour chercher aussi depuis la racine du chemin en cas de récursion inverse exemple : valeur = self.settings("document/host/val", "mon_defaut") valeur = self.settings("/document/host/val", "mon_defaut") """ # configuration des settings temporaire pour traitement local if __settings_temp is None: __settings_temp = self.settings # check si le chemin commence par / auquel cas on le supprime if name.startswith("/"): name = name[1:] # check si le chemin termine par / auquel cas on le supprime if name.endswith("/"): name = name[:-1] # check s'il s'agit d'un chemin complet if "/" in name: # récupération du nom de la sous configuraiton name_master = name.split("/")[0] # récupération de l'indice si le nom obtenu contient [] indice_master = -1 indices_master = re.findall(r"\[\d+\]", name_master) if len(indices_master) > 0: try: indice_master = int(indices_master[0].replace("[", "").replace("]", "")) except: pass # suppression de l'indice dans le nom du chemin courant (ie: data[0] devient data) name_master = name_master.replace("[{}]".format(indice_master), "") # recherche si la clef est présente dans le chemin courant if name_master not in __settings_temp.keys(): return None # récupération de la sous configuration if indice_master < 0: # la sous configuration n'est pas une liste __settings_temp = __settings_temp[name_master] else: # la sous configuration est une liste (SI JSON !!) __settings_temp = __settings_temp[name_master][indice_master] if self.is_json else __settings_temp[name] # recursion sur le chemin en dessous name_split = name.split("/")[1:] search_path = "/".join(name_split) return_value = self.get( search_path, default, parent_search, multikeys_search, __settings_temp, __rank_recursion + 1) # pas de valeur trouvé, on cherche sur la récursion inverse if len(name_split) > 1 and return_value is None: i = len(name_split) while i >= 0: # on décrémente le curseur de recherche i -= 1 # établissement du nouveau chemin en supprimant le niveau supérieur new_search_path = "/".join(name_split[i-len(name_split):]) return_value = self.get( new_search_path, default, parent_search, multikeys_search, __settings_temp, __rank_recursion + 1) # pas de recherche multi clef if not multikeys_search: break # une valeur a été trouvée if not return_value is None: break # pas de valeur trouvé et on est à la racine du chemin if return_value is None and __rank_recursion == 0: # on change le nom du master et on cherche name = name_split[-1] return_value = self.get( name, default, parent_search, multikeys_search, self.settings, 0) # toujours pas de valeur, on garde le défaut if return_value is None: return_value = default # retour de la valeur récupérée return return_value # récupération de l'indice si le nom obtenu contient [] indice_master = -1 indices_master = re.findall(r"\[\d+\]", name) if len(indices_master) > 0: try: indice_master = int(indices_master[0].replace("[", "").replace("]", "")) except: pass # suppression de l'indice dans le nom du chemin courant (ie: data[0] devient data) name = name.replace("[{}]".format(indice_master), "") # check de la précense de la clef if type(__settings_temp) is str or name not in __settings_temp.keys(): # le hash n'est pas présent ! # si la recherche récursive inverse est activée et pas de valeur trouvée, # on recherche plus haut if parent_search: return None return default # récupération de la valeur if indice_master < 0: # la sous configuration n'est pas une liste value = __settings_temp[name] else: # la sous configuration est une liste (SI JSON !!) value = __settings_temp[name][indice_master] if self.is_json else __settings_temp[name] # interdiction de la valeur "None" if value is None: # si la recherche récursive inverse est activée et pas de valeur trouvée, # on recherche plus haut if parent_search: return None # valeur par défaut value = default # trim si value est un str if isinstance(value, str): value = value.strip() # retour de la valeur return value
[ "def", "get", "(", "self", ",", "name", ",", "default", "=", "\"\"", ",", "parent_search", "=", "False", ",", "multikeys_search", "=", "False", ",", "__settings_temp", "=", "None", ",", "__rank_recursion", "=", "0", ")", ":", "# configuration des settings temp...
32.62987
23.844156
def mappings_frequency(df, filepath=None): """ Plots the frequency of logical conjunction mappings Parameters ---------- df: `pandas.DataFrame`_ DataFrame with columns `frequency` and `mapping` filepath: str Absolute path to a folder where to write the plot Returns ------- plot Generated plot .. _pandas.DataFrame: http://pandas.pydata.org/pandas-docs/stable/dsintro.html#dataframe """ df = df.sort_values('frequency') df['conf'] = df.frequency.map(lambda f: 0 if f < 0.2 else 1 if f < 0.8 else 2) g = sns.factorplot(x="mapping", y="frequency", data=df, aspect=3, hue='conf', legend=False) for tick in g.ax.get_xticklabels(): tick.set_rotation(90) g.ax.set_ylim([-.05, 1.05]) g.ax.set_xlabel("Logical mapping") g.ax.set_ylabel("Frequency") if filepath: g.savefig(os.path.join(filepath, 'mappings-frequency.pdf')) return g
[ "def", "mappings_frequency", "(", "df", ",", "filepath", "=", "None", ")", ":", "df", "=", "df", ".", "sort_values", "(", "'frequency'", ")", "df", "[", "'conf'", "]", "=", "df", ".", "frequency", ".", "map", "(", "lambda", "f", ":", "0", "if", "f"...
24.131579
26.026316
def make_lando_router(config, obj, queue_name): """ Makes MessageRouter which can listen to queue_name sending messages to the VM version of lando. :param config: WorkerConfig/ServerConfig: settings for connecting to the queue :param obj: object: implements lando specific methods :param queue_name: str: name of the queue we will listen on. :return MessageRouter """ return MessageRouter(config, obj, queue_name, VM_LANDO_INCOMING_MESSAGES, processor_constructor=WorkQueueProcessor)
[ "def", "make_lando_router", "(", "config", ",", "obj", ",", "queue_name", ")", ":", "return", "MessageRouter", "(", "config", ",", "obj", ",", "queue_name", ",", "VM_LANDO_INCOMING_MESSAGES", ",", "processor_constructor", "=", "WorkQueueProcessor", ")" ]
56.7
24.7
def get_statepostal(self, obj): """State postal abbreviation if county or state else ``None``.""" if obj.division.level.name == DivisionLevel.STATE: return us.states.lookup(obj.division.code).abbr elif obj.division.level.name == DivisionLevel.COUNTY: return us.states.lookup(obj.division.parent.code).abbr return None
[ "def", "get_statepostal", "(", "self", ",", "obj", ")", ":", "if", "obj", ".", "division", ".", "level", ".", "name", "==", "DivisionLevel", ".", "STATE", ":", "return", "us", ".", "states", ".", "lookup", "(", "obj", ".", "division", ".", "code", ")...
52.428571
16.285714
def request(self, request, proxies, timeout, verify, **_): """Responsible for dispatching the request and returning the result. Network level exceptions should be raised and only ``requests.Response`` should be returned. :param request: A ``requests.PreparedRequest`` object containing all the data necessary to perform the request. :param proxies: A dictionary of proxy settings to be utilized for the request. :param timeout: Specifies the maximum time that the actual HTTP request can take. :param verify: Specifies if SSL certificates should be validated. ``**_`` should be added to the method call to ignore the extra arguments intended for the cache handler. """ settings = self.http.merge_environment_settings( request.url, proxies, False, verify, None ) return self.http.send(request, timeout=timeout, allow_redirects=False, **settings)
[ "def", "request", "(", "self", ",", "request", ",", "proxies", ",", "timeout", ",", "verify", ",", "*", "*", "_", ")", ":", "settings", "=", "self", ".", "http", ".", "merge_environment_settings", "(", "request", ".", "url", ",", "proxies", ",", "False...
43.826087
23.521739
def set_tenant(self, tenant, include_public=True): """ Main API method to current database schema, but it does not actually modify the db connection. """ self.set_schema(tenant.schema_name, include_public) self.tenant = tenant
[ "def", "set_tenant", "(", "self", ",", "tenant", ",", "include_public", "=", "True", ")", ":", "self", ".", "set_schema", "(", "tenant", ".", "schema_name", ",", "include_public", ")", "self", ".", "tenant", "=", "tenant" ]
38.285714
10
def project(self, points): """Project 3D points to image coordinates. This projects 3D points expressed in the camera coordinate system to image points. Parameters -------------------- points : (3, N) ndarray 3D points Returns -------------------- image_points : (2, N) ndarray The world points projected to the image plane """ rvec = tvec = np.zeros(3) image_points, jac = cv2.projectPoints(points.T.reshape(-1,1,3), rvec, tvec, self.camera_matrix, self.dist_coefs) return image_points.reshape(-1,2).T
[ "def", "project", "(", "self", ",", "points", ")", ":", "rvec", "=", "tvec", "=", "np", ".", "zeros", "(", "3", ")", "image_points", ",", "jac", "=", "cv2", ".", "projectPoints", "(", "points", ".", "T", ".", "reshape", "(", "-", "1", ",", "1", ...
33.777778
21.833333
def upgradeProcessor1to2(oldProcessor): """ Batch processors stopped polling at version 2, so they no longer needed the idleInterval attribute. They also gained a scheduled attribute which tracks their interaction with the scheduler. Since they stopped polling, we also set them up as a timed event here to make sure that they don't silently disappear, never to be seen again: running them with the scheduler gives them a chance to figure out what's up and set up whatever other state they need to continue to run. Since this introduces a new dependency of all batch processors on a powerup for the IScheduler, install a Scheduler or a SubScheduler if one is not already present. """ newProcessor = oldProcessor.upgradeVersion( oldProcessor.typeName, 1, 2, busyInterval=oldProcessor.busyInterval) newProcessor.scheduled = extime.Time() s = newProcessor.store sch = iaxiom.IScheduler(s, None) if sch is None: if s.parent is None: # Only site stores have no parents. sch = Scheduler(store=s) else: # Substores get subschedulers. sch = SubScheduler(store=s) installOn(sch, s) # And set it up to run. sch.schedule(newProcessor, newProcessor.scheduled) return newProcessor
[ "def", "upgradeProcessor1to2", "(", "oldProcessor", ")", ":", "newProcessor", "=", "oldProcessor", ".", "upgradeVersion", "(", "oldProcessor", ".", "typeName", ",", "1", ",", "2", ",", "busyInterval", "=", "oldProcessor", ".", "busyInterval", ")", "newProcessor", ...
39.636364
18.606061