code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def get_public_key(self): """ Parse the scriptSig and extract the public key. Raises ValueError if this is a multisig-controlled subdomain. """ res = self.get_public_key_info() if 'error' in res: raise ValueError(res['error']) if res['type'] != 'singlesig': raise ValueError(res['error']) return res['public_keys'][0]
def function[get_public_key, parameter[self]]: constant[ Parse the scriptSig and extract the public key. Raises ValueError if this is a multisig-controlled subdomain. ] variable[res] assign[=] call[name[self].get_public_key_info, parameter[]] if compare[constant[error] in name[res]] begin[:] <ast.Raise object at 0x7da18bccbaf0> if compare[call[name[res]][constant[type]] not_equal[!=] constant[singlesig]] begin[:] <ast.Raise object at 0x7da18bcca200> return[call[call[name[res]][constant[public_keys]]][constant[0]]]
keyword[def] identifier[get_public_key] ( identifier[self] ): literal[string] identifier[res] = identifier[self] . identifier[get_public_key_info] () keyword[if] literal[string] keyword[in] identifier[res] : keyword[raise] identifier[ValueError] ( identifier[res] [ literal[string] ]) keyword[if] identifier[res] [ literal[string] ]!= literal[string] : keyword[raise] identifier[ValueError] ( identifier[res] [ literal[string] ]) keyword[return] identifier[res] [ literal[string] ][ literal[int] ]
def get_public_key(self): """ Parse the scriptSig and extract the public key. Raises ValueError if this is a multisig-controlled subdomain. """ res = self.get_public_key_info() if 'error' in res: raise ValueError(res['error']) # depends on [control=['if'], data=['res']] if res['type'] != 'singlesig': raise ValueError(res['error']) # depends on [control=['if'], data=[]] return res['public_keys'][0]
def clean(self): """Clean queue items from a previous session. In case a previous session crashed and there are still some running entries in the queue ('running', 'stopping', 'killing'), we clean those and enqueue them again. """ for _, item in self.queue.items(): if item['status'] in ['paused', 'running', 'stopping', 'killing']: item['status'] = 'queued' item['start'] = '' item['end'] = ''
def function[clean, parameter[self]]: constant[Clean queue items from a previous session. In case a previous session crashed and there are still some running entries in the queue ('running', 'stopping', 'killing'), we clean those and enqueue them again. ] for taget[tuple[[<ast.Name object at 0x7da1b0e59ab0>, <ast.Name object at 0x7da1b0e5acb0>]]] in starred[call[name[self].queue.items, parameter[]]] begin[:] if compare[call[name[item]][constant[status]] in list[[<ast.Constant object at 0x7da1b0e58610>, <ast.Constant object at 0x7da1b0e59000>, <ast.Constant object at 0x7da1b0e5bd60>, <ast.Constant object at 0x7da1b0e59150>]]] begin[:] call[name[item]][constant[status]] assign[=] constant[queued] call[name[item]][constant[start]] assign[=] constant[] call[name[item]][constant[end]] assign[=] constant[]
keyword[def] identifier[clean] ( identifier[self] ): literal[string] keyword[for] identifier[_] , identifier[item] keyword[in] identifier[self] . identifier[queue] . identifier[items] (): keyword[if] identifier[item] [ literal[string] ] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]: identifier[item] [ literal[string] ]= literal[string] identifier[item] [ literal[string] ]= literal[string] identifier[item] [ literal[string] ]= literal[string]
def clean(self): """Clean queue items from a previous session. In case a previous session crashed and there are still some running entries in the queue ('running', 'stopping', 'killing'), we clean those and enqueue them again. """ for (_, item) in self.queue.items(): if item['status'] in ['paused', 'running', 'stopping', 'killing']: item['status'] = 'queued' item['start'] = '' item['end'] = '' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def read(self, size): """Read raw bytes from the instrument. :param size: amount of bytes to be sent to the instrument :type size: integer :return: received bytes :return type: bytes """ raw_read = super(USBRawDevice, self).read received = bytearray() while not len(received) >= size: resp = raw_read(self.RECV_CHUNK) received.extend(resp) return bytes(received)
def function[read, parameter[self, size]]: constant[Read raw bytes from the instrument. :param size: amount of bytes to be sent to the instrument :type size: integer :return: received bytes :return type: bytes ] variable[raw_read] assign[=] call[name[super], parameter[name[USBRawDevice], name[self]]].read variable[received] assign[=] call[name[bytearray], parameter[]] while <ast.UnaryOp object at 0x7da20c6c6710> begin[:] variable[resp] assign[=] call[name[raw_read], parameter[name[self].RECV_CHUNK]] call[name[received].extend, parameter[name[resp]]] return[call[name[bytes], parameter[name[received]]]]
keyword[def] identifier[read] ( identifier[self] , identifier[size] ): literal[string] identifier[raw_read] = identifier[super] ( identifier[USBRawDevice] , identifier[self] ). identifier[read] identifier[received] = identifier[bytearray] () keyword[while] keyword[not] identifier[len] ( identifier[received] )>= identifier[size] : identifier[resp] = identifier[raw_read] ( identifier[self] . identifier[RECV_CHUNK] ) identifier[received] . identifier[extend] ( identifier[resp] ) keyword[return] identifier[bytes] ( identifier[received] )
def read(self, size): """Read raw bytes from the instrument. :param size: amount of bytes to be sent to the instrument :type size: integer :return: received bytes :return type: bytes """ raw_read = super(USBRawDevice, self).read received = bytearray() while not len(received) >= size: resp = raw_read(self.RECV_CHUNK) received.extend(resp) # depends on [control=['while'], data=[]] return bytes(received)
def sendall_stderr(self, s): """ Send data to the channel's "stderr" stream, without allowing partial results. Unlike L{send_stderr}, this method continues to send data from the given string until all data has been sent or an error occurs. Nothing is returned. @param s: data to send to the client as "stderr" output. @type s: str @raise socket.timeout: if sending stalled for longer than the timeout set by L{settimeout}. @raise socket.error: if an error occured before the entire string was sent. @since: 1.1 """ while s: if self.closed: raise socket.error('Socket is closed') sent = self.send_stderr(s) s = s[sent:] return None
def function[sendall_stderr, parameter[self, s]]: constant[ Send data to the channel's "stderr" stream, without allowing partial results. Unlike L{send_stderr}, this method continues to send data from the given string until all data has been sent or an error occurs. Nothing is returned. @param s: data to send to the client as "stderr" output. @type s: str @raise socket.timeout: if sending stalled for longer than the timeout set by L{settimeout}. @raise socket.error: if an error occured before the entire string was sent. @since: 1.1 ] while name[s] begin[:] if name[self].closed begin[:] <ast.Raise object at 0x7da1b1041840> variable[sent] assign[=] call[name[self].send_stderr, parameter[name[s]]] variable[s] assign[=] call[name[s]][<ast.Slice object at 0x7da1b0f50580>] return[constant[None]]
keyword[def] identifier[sendall_stderr] ( identifier[self] , identifier[s] ): literal[string] keyword[while] identifier[s] : keyword[if] identifier[self] . identifier[closed] : keyword[raise] identifier[socket] . identifier[error] ( literal[string] ) identifier[sent] = identifier[self] . identifier[send_stderr] ( identifier[s] ) identifier[s] = identifier[s] [ identifier[sent] :] keyword[return] keyword[None]
def sendall_stderr(self, s): """ Send data to the channel's "stderr" stream, without allowing partial results. Unlike L{send_stderr}, this method continues to send data from the given string until all data has been sent or an error occurs. Nothing is returned. @param s: data to send to the client as "stderr" output. @type s: str @raise socket.timeout: if sending stalled for longer than the timeout set by L{settimeout}. @raise socket.error: if an error occured before the entire string was sent. @since: 1.1 """ while s: if self.closed: raise socket.error('Socket is closed') # depends on [control=['if'], data=[]] sent = self.send_stderr(s) s = s[sent:] # depends on [control=['while'], data=[]] return None
def parse(json, query_path, expected_vars=NO_VARS): """ INTENDED TO TREAT JSON AS A STREAM; USING MINIMAL MEMORY WHILE IT ITERATES THROUGH THE STRUCTURE. ASSUMING THE JSON IS LARGE, AND HAS A HIGH LEVEL ARRAY STRUCTURE, IT WILL yield EACH OBJECT IN THAT ARRAY. NESTED ARRAYS ARE HANDLED BY REPEATING THE PARENT PROPERTIES FOR EACH MEMBER OF THE NESTED ARRAY. DEEPER NESTED PROPERTIES ARE TREATED AS PRIMITIVE VALUES; THE STANDARD JSON DECODER IS USED. LARGE MANY-PROPERTY OBJECTS CAN BE HANDLED BY `items()` :param json: SOME STRING-LIKE STRUCTURE THAT CAN ASSUME WE LOOK AT ONE CHARACTER AT A TIME, IN ORDER :param query_path: A DOT-SEPARATED STRING INDICATING THE PATH TO THE NESTED ARRAY OPTIONALLY, {"items":query_path} TO FURTHER ITERATE OVER PROPERTIES OF OBJECTS FOUND AT query_path :param expected_vars: REQUIRED PROPERTY NAMES, USED TO DETERMINE IF MORE-THAN-ONE PASS IS REQUIRED :return: RETURNS AN ITERATOR OVER ALL OBJECTS FROM ARRAY LOCATED AT query_path """ if hasattr(json, "read"): # ASSUME IT IS A STREAM temp = json def get_more(): return temp.read(MIN_READ_SIZE) json = List_usingStream(get_more) elif hasattr(json, "__call__"): json = List_usingStream(json) elif isinstance(json, GeneratorType): json = List_usingStream(json.next) else: Log.error("Expecting json to be a stream, or a function that will return more bytes") def _iterate_list(index, c, parent_path, path, expected_vars): c, index = skip_whitespace(index) if c == b']': yield index return while True: if not path: index = _assign_token(index, c, expected_vars) c, index = skip_whitespace(index) if c == b']': yield index _done(parent_path) return elif c == b',': yield index c, index = skip_whitespace(index) else: for index in _decode_token(index, c, parent_path, path, expected_vars): c, index = skip_whitespace(index) if c == b']': yield index _done(parent_path) return elif c == b',': yield index c, index = skip_whitespace(index) def _done(parent_path): if len(parent_path) < len(done[0]): done[0] = parent_path def _decode_object(index, c, parent_path, query_path, expected_vars): if "." in expected_vars: if len(done[0]) <= len(parent_path) and all(d == p for d, p in zip(done[0], parent_path)): Log.error("Can not pick up more variables, iterator is done") if query_path: Log.error("Can not extract objects that contain the iteration", var=join_field(query_path)) index = _assign_token(index, c, expected_vars) # c, index = skip_whitespace(index) yield index return did_yield = False while True: c, index = skip_whitespace(index) if c == b',': continue elif c == b'"': name, index = simple_token(index, c) c, index = skip_whitespace(index) if c != b':': Log.error("Expecting colon") c, index = skip_whitespace(index) child_expected = needed(name, expected_vars) child_path = parent_path + [name] if any(child_expected): if not query_path: index = _assign_token(index, c, child_expected) elif query_path[0] == name: for index in _decode_token(index, c, child_path, query_path[1:], child_expected): did_yield = True yield index else: if len(done[0]) <= len(child_path): Log.error("Can not pick up more variables, iterator over {{path}} is done", path=join_field(done[0])) index = _assign_token(index, c, child_expected) elif query_path and query_path[0] == name: for index in _decode_token(index, c, child_path, query_path[1:], child_expected): yield index else: index = jump_to_end(index, c) elif c == b"}": if not did_yield: yield index break def set_destination(expected_vars, value): for i, e in enumerate(expected_vars): if e is None: pass elif e == ".": destination[i] = value elif is_data(value): destination[i] = value[e] else: destination[i] = Null def _decode_object_items(index, c, parent_path, query_path, expected_vars): """ ITERATE THROUGH THE PROPERTIES OF AN OBJECT """ c, index = skip_whitespace(index) num_items = 0 while True: if c == b',': c, index = skip_whitespace(index) elif c == b'"': name, index = simple_token(index, c) if "name" in expected_vars: for i, e in enumerate(expected_vars): if e == "name": destination[i] = name c, index = skip_whitespace(index) if c != b':': Log.error("Expecting colon") c, index = skip_whitespace(index) child_expected = needed("value", expected_vars) index = _assign_token(index, c, child_expected) c, index = skip_whitespace(index) DEBUG and not num_items % 1000 and Log.note("{{num}} items iterated", num=num_items) yield index num_items += 1 elif c == b"}": break def _decode_token(index, c, parent_path, query_path, expected_vars): if c == b'{': if query_path and query_path[0] == "$items": if any(expected_vars): for index in _decode_object_items(index, c, parent_path, query_path[1:], expected_vars): yield index else: index = jump_to_end(index, c) yield index elif not any(expected_vars): index = jump_to_end(index, c) yield index else: for index in _decode_object(index, c, parent_path, query_path, expected_vars): yield index elif c == b'[': for index in _iterate_list(index, c, parent_path, query_path, expected_vars): yield index else: index = _assign_token(index, c, expected_vars) yield index def _assign_token(index, c, expected_vars): if not any(expected_vars): return jump_to_end(index, c) value, index = simple_token(index, c) set_destination(expected_vars, value) return index def jump_to_end(index, c): """ DO NOT PROCESS THIS JSON OBJECT, JUST RETURN WHERE IT ENDS """ if c == b'"': while True: c = json[index] index += 1 if c == b'\\': index += 1 elif c == b'"': break return index elif c not in b"[{": while True: c = json[index] index += 1 if c in b',]}': break return index - 1 # OBJECTS AND ARRAYS ARE MORE INVOLVED stack = [None] * 1024 stack[0] = CLOSE[c] i = 0 # FOR INDEXING THE STACK while True: c = json[index] index += 1 if c == b'"': while True: c = json[index] index += 1 if c == b'\\': index += 1 elif c == b'"': break elif c in b'[{': i += 1 stack[i] = CLOSE[c] elif c == stack[i]: i -= 1 if i == -1: return index # FOUND THE MATCH! RETURN elif c in b']}': Log.error("expecting {{symbol}}", symbol=stack[i]) def simple_token(index, c): if c == b'"': json.mark(index - 1) while True: c = json[index] index += 1 if c == b"\\": index += 1 elif c == b'"': break return json_decoder(json.release(index).decode("utf8")), index elif c in b"{[": json.mark(index-1) index = jump_to_end(index, c) value = wrap(json_decoder(json.release(index).decode("utf8"))) return value, index elif c == b"t" and json.slice(index, index + 3) == b"rue": return True, index + 3 elif c == b"n" and json.slice(index, index + 3) == b"ull": return None, index + 3 elif c == b"f" and json.slice(index, index + 4) == b"alse": return False, index + 4 else: json.mark(index-1) while True: c = json[index] if c in b',]}': break index += 1 text = json.release(index) try: return float(text), index except Exception: Log.error("Not a known JSON primitive: {{text|quote}}", text=text) def skip_whitespace(index): """ RETURN NEXT NON-WHITESPACE CHAR, AND ITS INDEX """ c = json[index] while c in WHITESPACE: index += 1 c = json[index] return c, index + 1 if is_data(query_path) and query_path.get("items"): path_list = split_field(query_path.get("items")) + ["$items"] # INSERT A MARKER SO THAT OBJECT IS STREAM DECODED else: path_list = split_field(query_path) destination = [None] * len(expected_vars) c, index = skip_whitespace(0) done = [path_list + [None]] for _ in _decode_token(index, c, [], path_list, expected_vars): output = Data() for i, e in enumerate(expected_vars): output[e] = destination[i] yield output
def function[parse, parameter[json, query_path, expected_vars]]: constant[ INTENDED TO TREAT JSON AS A STREAM; USING MINIMAL MEMORY WHILE IT ITERATES THROUGH THE STRUCTURE. ASSUMING THE JSON IS LARGE, AND HAS A HIGH LEVEL ARRAY STRUCTURE, IT WILL yield EACH OBJECT IN THAT ARRAY. NESTED ARRAYS ARE HANDLED BY REPEATING THE PARENT PROPERTIES FOR EACH MEMBER OF THE NESTED ARRAY. DEEPER NESTED PROPERTIES ARE TREATED AS PRIMITIVE VALUES; THE STANDARD JSON DECODER IS USED. LARGE MANY-PROPERTY OBJECTS CAN BE HANDLED BY `items()` :param json: SOME STRING-LIKE STRUCTURE THAT CAN ASSUME WE LOOK AT ONE CHARACTER AT A TIME, IN ORDER :param query_path: A DOT-SEPARATED STRING INDICATING THE PATH TO THE NESTED ARRAY OPTIONALLY, {"items":query_path} TO FURTHER ITERATE OVER PROPERTIES OF OBJECTS FOUND AT query_path :param expected_vars: REQUIRED PROPERTY NAMES, USED TO DETERMINE IF MORE-THAN-ONE PASS IS REQUIRED :return: RETURNS AN ITERATOR OVER ALL OBJECTS FROM ARRAY LOCATED AT query_path ] if call[name[hasattr], parameter[name[json], constant[read]]] begin[:] variable[temp] assign[=] name[json] def function[get_more, parameter[]]: return[call[name[temp].read, parameter[name[MIN_READ_SIZE]]]] variable[json] assign[=] call[name[List_usingStream], parameter[name[get_more]]] def function[_iterate_list, parameter[index, c, parent_path, path, expected_vars]]: <ast.Tuple object at 0x7da1b1f34a90> assign[=] call[name[skip_whitespace], parameter[name[index]]] if compare[name[c] equal[==] constant[b']']] begin[:] <ast.Yield object at 0x7da1b1f34880> return[None] while constant[True] begin[:] if <ast.UnaryOp object at 0x7da1b1f346d0> begin[:] variable[index] assign[=] call[name[_assign_token], parameter[name[index], name[c], name[expected_vars]]] <ast.Tuple object at 0x7da1b1f344f0> assign[=] call[name[skip_whitespace], parameter[name[index]]] if compare[name[c] equal[==] constant[b']']] begin[:] <ast.Yield object at 0x7da1b1f342e0> call[name[_done], parameter[name[parent_path]]] return[None] def function[_done, parameter[parent_path]]: if compare[call[name[len], parameter[name[parent_path]]] less[<] call[name[len], parameter[call[name[done]][constant[0]]]]] begin[:] call[name[done]][constant[0]] assign[=] name[parent_path] def function[_decode_object, parameter[index, c, parent_path, query_path, expected_vars]]: if compare[constant[.] in name[expected_vars]] begin[:] if <ast.BoolOp object at 0x7da1b1f0e0b0> begin[:] call[name[Log].error, parameter[constant[Can not pick up more variables, iterator is done]]] if name[query_path] begin[:] call[name[Log].error, parameter[constant[Can not extract objects that contain the iteration]]] variable[index] assign[=] call[name[_assign_token], parameter[name[index], name[c], name[expected_vars]]] <ast.Yield object at 0x7da1b1f2aa10> return[None] variable[did_yield] assign[=] constant[False] while constant[True] begin[:] <ast.Tuple object at 0x7da1b1f2a7d0> assign[=] call[name[skip_whitespace], parameter[name[index]]] if compare[name[c] equal[==] constant[b',']] begin[:] continue def function[set_destination, parameter[expected_vars, value]]: for taget[tuple[[<ast.Name object at 0x7da1b1f262c0>, <ast.Name object at 0x7da1b1f26290>]]] in starred[call[name[enumerate], parameter[name[expected_vars]]]] begin[:] if compare[name[e] is constant[None]] begin[:] pass def function[_decode_object_items, parameter[index, c, parent_path, query_path, expected_vars]]: constant[ ITERATE THROUGH THE PROPERTIES OF AN OBJECT ] <ast.Tuple object at 0x7da1b1f259c0> assign[=] call[name[skip_whitespace], parameter[name[index]]] variable[num_items] assign[=] constant[0] while constant[True] begin[:] if compare[name[c] equal[==] constant[b',']] begin[:] <ast.Tuple object at 0x7da1b1f256c0> assign[=] call[name[skip_whitespace], parameter[name[index]]] def function[_decode_token, parameter[index, c, parent_path, query_path, expected_vars]]: if compare[name[c] equal[==] constant[b'{']] begin[:] if <ast.BoolOp object at 0x7da1b1f23f70> begin[:] if call[name[any], parameter[name[expected_vars]]] begin[:] for taget[name[index]] in starred[call[name[_decode_object_items], parameter[name[index], name[c], name[parent_path], call[name[query_path]][<ast.Slice object at 0x7da1b1f23b80>], name[expected_vars]]]] begin[:] <ast.Yield object at 0x7da1b1f23ac0> def function[_assign_token, parameter[index, c, expected_vars]]: if <ast.UnaryOp object at 0x7da1b1f22b30> begin[:] return[call[name[jump_to_end], parameter[name[index], name[c]]]] <ast.Tuple object at 0x7da1b1f22920> assign[=] call[name[simple_token], parameter[name[index], name[c]]] call[name[set_destination], parameter[name[expected_vars], name[value]]] return[name[index]] def function[jump_to_end, parameter[index, c]]: constant[ DO NOT PROCESS THIS JSON OBJECT, JUST RETURN WHERE IT ENDS ] if compare[name[c] equal[==] constant[b'"']] begin[:] while constant[True] begin[:] variable[c] assign[=] call[name[json]][name[index]] <ast.AugAssign object at 0x7da1b1f22290> if compare[name[c] equal[==] constant[b'\\']] begin[:] <ast.AugAssign object at 0x7da1b1f22140> return[name[index]] variable[stack] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b1f21990>]] * constant[1024]] call[name[stack]][constant[0]] assign[=] call[name[CLOSE]][name[c]] variable[i] assign[=] constant[0] while constant[True] begin[:] variable[c] assign[=] call[name[json]][name[index]] <ast.AugAssign object at 0x7da1b20fc310> if compare[name[c] equal[==] constant[b'"']] begin[:] while constant[True] begin[:] variable[c] assign[=] call[name[json]][name[index]] <ast.AugAssign object at 0x7da1b20fcfa0> if compare[name[c] equal[==] constant[b'\\']] begin[:] <ast.AugAssign object at 0x7da1b20fcaf0> def function[simple_token, parameter[index, c]]: if compare[name[c] equal[==] constant[b'"']] begin[:] call[name[json].mark, parameter[binary_operation[name[index] - constant[1]]]] while constant[True] begin[:] variable[c] assign[=] call[name[json]][name[index]] <ast.AugAssign object at 0x7da1b20576a0> if compare[name[c] equal[==] constant[b'\\']] begin[:] <ast.AugAssign object at 0x7da1b2057f40> return[tuple[[<ast.Call object at 0x7da1b2057190>, <ast.Name object at 0x7da1b20552d0>]]] def function[skip_whitespace, parameter[index]]: constant[ RETURN NEXT NON-WHITESPACE CHAR, AND ITS INDEX ] variable[c] assign[=] call[name[json]][name[index]] while compare[name[c] in name[WHITESPACE]] begin[:] <ast.AugAssign object at 0x7da2054a49a0> variable[c] assign[=] call[name[json]][name[index]] return[tuple[[<ast.Name object at 0x7da2054a6410>, <ast.BinOp object at 0x7da2054a7100>]]] if <ast.BoolOp object at 0x7da1b1f0c3a0> begin[:] variable[path_list] assign[=] binary_operation[call[name[split_field], parameter[call[name[query_path].get, parameter[constant[items]]]]] + list[[<ast.Constant object at 0x7da1b1f0c490>]]] variable[destination] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b1f0dbd0>]] * call[name[len], parameter[name[expected_vars]]]] <ast.Tuple object at 0x7da1b1f0dba0> assign[=] call[name[skip_whitespace], parameter[constant[0]]] variable[done] assign[=] list[[<ast.BinOp object at 0x7da1b1f0c730>]] for taget[name[_]] in starred[call[name[_decode_token], parameter[name[index], name[c], list[[]], name[path_list], name[expected_vars]]]] begin[:] variable[output] assign[=] call[name[Data], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b1f0c640>, <ast.Name object at 0x7da1b1f0dff0>]]] in starred[call[name[enumerate], parameter[name[expected_vars]]]] begin[:] call[name[output]][name[e]] assign[=] call[name[destination]][name[i]] <ast.Yield object at 0x7da1b1f0c130>
keyword[def] identifier[parse] ( identifier[json] , identifier[query_path] , identifier[expected_vars] = identifier[NO_VARS] ): literal[string] keyword[if] identifier[hasattr] ( identifier[json] , literal[string] ): identifier[temp] = identifier[json] keyword[def] identifier[get_more] (): keyword[return] identifier[temp] . identifier[read] ( identifier[MIN_READ_SIZE] ) identifier[json] = identifier[List_usingStream] ( identifier[get_more] ) keyword[elif] identifier[hasattr] ( identifier[json] , literal[string] ): identifier[json] = identifier[List_usingStream] ( identifier[json] ) keyword[elif] identifier[isinstance] ( identifier[json] , identifier[GeneratorType] ): identifier[json] = identifier[List_usingStream] ( identifier[json] . identifier[next] ) keyword[else] : identifier[Log] . identifier[error] ( literal[string] ) keyword[def] identifier[_iterate_list] ( identifier[index] , identifier[c] , identifier[parent_path] , identifier[path] , identifier[expected_vars] ): identifier[c] , identifier[index] = identifier[skip_whitespace] ( identifier[index] ) keyword[if] identifier[c] == literal[string] : keyword[yield] identifier[index] keyword[return] keyword[while] keyword[True] : keyword[if] keyword[not] identifier[path] : identifier[index] = identifier[_assign_token] ( identifier[index] , identifier[c] , identifier[expected_vars] ) identifier[c] , identifier[index] = identifier[skip_whitespace] ( identifier[index] ) keyword[if] identifier[c] == literal[string] : keyword[yield] identifier[index] identifier[_done] ( identifier[parent_path] ) keyword[return] keyword[elif] identifier[c] == literal[string] : keyword[yield] identifier[index] identifier[c] , identifier[index] = identifier[skip_whitespace] ( identifier[index] ) keyword[else] : keyword[for] identifier[index] keyword[in] identifier[_decode_token] ( identifier[index] , identifier[c] , identifier[parent_path] , identifier[path] , identifier[expected_vars] ): identifier[c] , identifier[index] = identifier[skip_whitespace] ( identifier[index] ) keyword[if] identifier[c] == literal[string] : keyword[yield] identifier[index] identifier[_done] ( identifier[parent_path] ) keyword[return] keyword[elif] identifier[c] == literal[string] : keyword[yield] identifier[index] identifier[c] , identifier[index] = identifier[skip_whitespace] ( identifier[index] ) keyword[def] identifier[_done] ( identifier[parent_path] ): keyword[if] identifier[len] ( identifier[parent_path] )< identifier[len] ( identifier[done] [ literal[int] ]): identifier[done] [ literal[int] ]= identifier[parent_path] keyword[def] identifier[_decode_object] ( identifier[index] , identifier[c] , identifier[parent_path] , identifier[query_path] , identifier[expected_vars] ): keyword[if] literal[string] keyword[in] identifier[expected_vars] : keyword[if] identifier[len] ( identifier[done] [ literal[int] ])<= identifier[len] ( identifier[parent_path] ) keyword[and] identifier[all] ( identifier[d] == identifier[p] keyword[for] identifier[d] , identifier[p] keyword[in] identifier[zip] ( identifier[done] [ literal[int] ], identifier[parent_path] )): identifier[Log] . identifier[error] ( literal[string] ) keyword[if] identifier[query_path] : identifier[Log] . identifier[error] ( literal[string] , identifier[var] = identifier[join_field] ( identifier[query_path] )) identifier[index] = identifier[_assign_token] ( identifier[index] , identifier[c] , identifier[expected_vars] ) keyword[yield] identifier[index] keyword[return] identifier[did_yield] = keyword[False] keyword[while] keyword[True] : identifier[c] , identifier[index] = identifier[skip_whitespace] ( identifier[index] ) keyword[if] identifier[c] == literal[string] : keyword[continue] keyword[elif] identifier[c] == literal[string] : identifier[name] , identifier[index] = identifier[simple_token] ( identifier[index] , identifier[c] ) identifier[c] , identifier[index] = identifier[skip_whitespace] ( identifier[index] ) keyword[if] identifier[c] != literal[string] : identifier[Log] . identifier[error] ( literal[string] ) identifier[c] , identifier[index] = identifier[skip_whitespace] ( identifier[index] ) identifier[child_expected] = identifier[needed] ( identifier[name] , identifier[expected_vars] ) identifier[child_path] = identifier[parent_path] +[ identifier[name] ] keyword[if] identifier[any] ( identifier[child_expected] ): keyword[if] keyword[not] identifier[query_path] : identifier[index] = identifier[_assign_token] ( identifier[index] , identifier[c] , identifier[child_expected] ) keyword[elif] identifier[query_path] [ literal[int] ]== identifier[name] : keyword[for] identifier[index] keyword[in] identifier[_decode_token] ( identifier[index] , identifier[c] , identifier[child_path] , identifier[query_path] [ literal[int] :], identifier[child_expected] ): identifier[did_yield] = keyword[True] keyword[yield] identifier[index] keyword[else] : keyword[if] identifier[len] ( identifier[done] [ literal[int] ])<= identifier[len] ( identifier[child_path] ): identifier[Log] . identifier[error] ( literal[string] , identifier[path] = identifier[join_field] ( identifier[done] [ literal[int] ])) identifier[index] = identifier[_assign_token] ( identifier[index] , identifier[c] , identifier[child_expected] ) keyword[elif] identifier[query_path] keyword[and] identifier[query_path] [ literal[int] ]== identifier[name] : keyword[for] identifier[index] keyword[in] identifier[_decode_token] ( identifier[index] , identifier[c] , identifier[child_path] , identifier[query_path] [ literal[int] :], identifier[child_expected] ): keyword[yield] identifier[index] keyword[else] : identifier[index] = identifier[jump_to_end] ( identifier[index] , identifier[c] ) keyword[elif] identifier[c] == literal[string] : keyword[if] keyword[not] identifier[did_yield] : keyword[yield] identifier[index] keyword[break] keyword[def] identifier[set_destination] ( identifier[expected_vars] , identifier[value] ): keyword[for] identifier[i] , identifier[e] keyword[in] identifier[enumerate] ( identifier[expected_vars] ): keyword[if] identifier[e] keyword[is] keyword[None] : keyword[pass] keyword[elif] identifier[e] == literal[string] : identifier[destination] [ identifier[i] ]= identifier[value] keyword[elif] identifier[is_data] ( identifier[value] ): identifier[destination] [ identifier[i] ]= identifier[value] [ identifier[e] ] keyword[else] : identifier[destination] [ identifier[i] ]= identifier[Null] keyword[def] identifier[_decode_object_items] ( identifier[index] , identifier[c] , identifier[parent_path] , identifier[query_path] , identifier[expected_vars] ): literal[string] identifier[c] , identifier[index] = identifier[skip_whitespace] ( identifier[index] ) identifier[num_items] = literal[int] keyword[while] keyword[True] : keyword[if] identifier[c] == literal[string] : identifier[c] , identifier[index] = identifier[skip_whitespace] ( identifier[index] ) keyword[elif] identifier[c] == literal[string] : identifier[name] , identifier[index] = identifier[simple_token] ( identifier[index] , identifier[c] ) keyword[if] literal[string] keyword[in] identifier[expected_vars] : keyword[for] identifier[i] , identifier[e] keyword[in] identifier[enumerate] ( identifier[expected_vars] ): keyword[if] identifier[e] == literal[string] : identifier[destination] [ identifier[i] ]= identifier[name] identifier[c] , identifier[index] = identifier[skip_whitespace] ( identifier[index] ) keyword[if] identifier[c] != literal[string] : identifier[Log] . identifier[error] ( literal[string] ) identifier[c] , identifier[index] = identifier[skip_whitespace] ( identifier[index] ) identifier[child_expected] = identifier[needed] ( literal[string] , identifier[expected_vars] ) identifier[index] = identifier[_assign_token] ( identifier[index] , identifier[c] , identifier[child_expected] ) identifier[c] , identifier[index] = identifier[skip_whitespace] ( identifier[index] ) identifier[DEBUG] keyword[and] keyword[not] identifier[num_items] % literal[int] keyword[and] identifier[Log] . identifier[note] ( literal[string] , identifier[num] = identifier[num_items] ) keyword[yield] identifier[index] identifier[num_items] += literal[int] keyword[elif] identifier[c] == literal[string] : keyword[break] keyword[def] identifier[_decode_token] ( identifier[index] , identifier[c] , identifier[parent_path] , identifier[query_path] , identifier[expected_vars] ): keyword[if] identifier[c] == literal[string] : keyword[if] identifier[query_path] keyword[and] identifier[query_path] [ literal[int] ]== literal[string] : keyword[if] identifier[any] ( identifier[expected_vars] ): keyword[for] identifier[index] keyword[in] identifier[_decode_object_items] ( identifier[index] , identifier[c] , identifier[parent_path] , identifier[query_path] [ literal[int] :], identifier[expected_vars] ): keyword[yield] identifier[index] keyword[else] : identifier[index] = identifier[jump_to_end] ( identifier[index] , identifier[c] ) keyword[yield] identifier[index] keyword[elif] keyword[not] identifier[any] ( identifier[expected_vars] ): identifier[index] = identifier[jump_to_end] ( identifier[index] , identifier[c] ) keyword[yield] identifier[index] keyword[else] : keyword[for] identifier[index] keyword[in] identifier[_decode_object] ( identifier[index] , identifier[c] , identifier[parent_path] , identifier[query_path] , identifier[expected_vars] ): keyword[yield] identifier[index] keyword[elif] identifier[c] == literal[string] : keyword[for] identifier[index] keyword[in] identifier[_iterate_list] ( identifier[index] , identifier[c] , identifier[parent_path] , identifier[query_path] , identifier[expected_vars] ): keyword[yield] identifier[index] keyword[else] : identifier[index] = identifier[_assign_token] ( identifier[index] , identifier[c] , identifier[expected_vars] ) keyword[yield] identifier[index] keyword[def] identifier[_assign_token] ( identifier[index] , identifier[c] , identifier[expected_vars] ): keyword[if] keyword[not] identifier[any] ( identifier[expected_vars] ): keyword[return] identifier[jump_to_end] ( identifier[index] , identifier[c] ) identifier[value] , identifier[index] = identifier[simple_token] ( identifier[index] , identifier[c] ) identifier[set_destination] ( identifier[expected_vars] , identifier[value] ) keyword[return] identifier[index] keyword[def] identifier[jump_to_end] ( identifier[index] , identifier[c] ): literal[string] keyword[if] identifier[c] == literal[string] : keyword[while] keyword[True] : identifier[c] = identifier[json] [ identifier[index] ] identifier[index] += literal[int] keyword[if] identifier[c] == literal[string] : identifier[index] += literal[int] keyword[elif] identifier[c] == literal[string] : keyword[break] keyword[return] identifier[index] keyword[elif] identifier[c] keyword[not] keyword[in] literal[string] : keyword[while] keyword[True] : identifier[c] = identifier[json] [ identifier[index] ] identifier[index] += literal[int] keyword[if] identifier[c] keyword[in] literal[string] : keyword[break] keyword[return] identifier[index] - literal[int] identifier[stack] =[ keyword[None] ]* literal[int] identifier[stack] [ literal[int] ]= identifier[CLOSE] [ identifier[c] ] identifier[i] = literal[int] keyword[while] keyword[True] : identifier[c] = identifier[json] [ identifier[index] ] identifier[index] += literal[int] keyword[if] identifier[c] == literal[string] : keyword[while] keyword[True] : identifier[c] = identifier[json] [ identifier[index] ] identifier[index] += literal[int] keyword[if] identifier[c] == literal[string] : identifier[index] += literal[int] keyword[elif] identifier[c] == literal[string] : keyword[break] keyword[elif] identifier[c] keyword[in] literal[string] : identifier[i] += literal[int] identifier[stack] [ identifier[i] ]= identifier[CLOSE] [ identifier[c] ] keyword[elif] identifier[c] == identifier[stack] [ identifier[i] ]: identifier[i] -= literal[int] keyword[if] identifier[i] ==- literal[int] : keyword[return] identifier[index] keyword[elif] identifier[c] keyword[in] literal[string] : identifier[Log] . identifier[error] ( literal[string] , identifier[symbol] = identifier[stack] [ identifier[i] ]) keyword[def] identifier[simple_token] ( identifier[index] , identifier[c] ): keyword[if] identifier[c] == literal[string] : identifier[json] . identifier[mark] ( identifier[index] - literal[int] ) keyword[while] keyword[True] : identifier[c] = identifier[json] [ identifier[index] ] identifier[index] += literal[int] keyword[if] identifier[c] == literal[string] : identifier[index] += literal[int] keyword[elif] identifier[c] == literal[string] : keyword[break] keyword[return] identifier[json_decoder] ( identifier[json] . identifier[release] ( identifier[index] ). identifier[decode] ( literal[string] )), identifier[index] keyword[elif] identifier[c] keyword[in] literal[string] : identifier[json] . identifier[mark] ( identifier[index] - literal[int] ) identifier[index] = identifier[jump_to_end] ( identifier[index] , identifier[c] ) identifier[value] = identifier[wrap] ( identifier[json_decoder] ( identifier[json] . identifier[release] ( identifier[index] ). identifier[decode] ( literal[string] ))) keyword[return] identifier[value] , identifier[index] keyword[elif] identifier[c] == literal[string] keyword[and] identifier[json] . identifier[slice] ( identifier[index] , identifier[index] + literal[int] )== literal[string] : keyword[return] keyword[True] , identifier[index] + literal[int] keyword[elif] identifier[c] == literal[string] keyword[and] identifier[json] . identifier[slice] ( identifier[index] , identifier[index] + literal[int] )== literal[string] : keyword[return] keyword[None] , identifier[index] + literal[int] keyword[elif] identifier[c] == literal[string] keyword[and] identifier[json] . identifier[slice] ( identifier[index] , identifier[index] + literal[int] )== literal[string] : keyword[return] keyword[False] , identifier[index] + literal[int] keyword[else] : identifier[json] . identifier[mark] ( identifier[index] - literal[int] ) keyword[while] keyword[True] : identifier[c] = identifier[json] [ identifier[index] ] keyword[if] identifier[c] keyword[in] literal[string] : keyword[break] identifier[index] += literal[int] identifier[text] = identifier[json] . identifier[release] ( identifier[index] ) keyword[try] : keyword[return] identifier[float] ( identifier[text] ), identifier[index] keyword[except] identifier[Exception] : identifier[Log] . identifier[error] ( literal[string] , identifier[text] = identifier[text] ) keyword[def] identifier[skip_whitespace] ( identifier[index] ): literal[string] identifier[c] = identifier[json] [ identifier[index] ] keyword[while] identifier[c] keyword[in] identifier[WHITESPACE] : identifier[index] += literal[int] identifier[c] = identifier[json] [ identifier[index] ] keyword[return] identifier[c] , identifier[index] + literal[int] keyword[if] identifier[is_data] ( identifier[query_path] ) keyword[and] identifier[query_path] . identifier[get] ( literal[string] ): identifier[path_list] = identifier[split_field] ( identifier[query_path] . identifier[get] ( literal[string] ))+[ literal[string] ] keyword[else] : identifier[path_list] = identifier[split_field] ( identifier[query_path] ) identifier[destination] =[ keyword[None] ]* identifier[len] ( identifier[expected_vars] ) identifier[c] , identifier[index] = identifier[skip_whitespace] ( literal[int] ) identifier[done] =[ identifier[path_list] +[ keyword[None] ]] keyword[for] identifier[_] keyword[in] identifier[_decode_token] ( identifier[index] , identifier[c] ,[], identifier[path_list] , identifier[expected_vars] ): identifier[output] = identifier[Data] () keyword[for] identifier[i] , identifier[e] keyword[in] identifier[enumerate] ( identifier[expected_vars] ): identifier[output] [ identifier[e] ]= identifier[destination] [ identifier[i] ] keyword[yield] identifier[output]
def parse(json, query_path, expected_vars=NO_VARS): """ INTENDED TO TREAT JSON AS A STREAM; USING MINIMAL MEMORY WHILE IT ITERATES THROUGH THE STRUCTURE. ASSUMING THE JSON IS LARGE, AND HAS A HIGH LEVEL ARRAY STRUCTURE, IT WILL yield EACH OBJECT IN THAT ARRAY. NESTED ARRAYS ARE HANDLED BY REPEATING THE PARENT PROPERTIES FOR EACH MEMBER OF THE NESTED ARRAY. DEEPER NESTED PROPERTIES ARE TREATED AS PRIMITIVE VALUES; THE STANDARD JSON DECODER IS USED. LARGE MANY-PROPERTY OBJECTS CAN BE HANDLED BY `items()` :param json: SOME STRING-LIKE STRUCTURE THAT CAN ASSUME WE LOOK AT ONE CHARACTER AT A TIME, IN ORDER :param query_path: A DOT-SEPARATED STRING INDICATING THE PATH TO THE NESTED ARRAY OPTIONALLY, {"items":query_path} TO FURTHER ITERATE OVER PROPERTIES OF OBJECTS FOUND AT query_path :param expected_vars: REQUIRED PROPERTY NAMES, USED TO DETERMINE IF MORE-THAN-ONE PASS IS REQUIRED :return: RETURNS AN ITERATOR OVER ALL OBJECTS FROM ARRAY LOCATED AT query_path """ if hasattr(json, 'read'): # ASSUME IT IS A STREAM temp = json def get_more(): return temp.read(MIN_READ_SIZE) json = List_usingStream(get_more) # depends on [control=['if'], data=[]] elif hasattr(json, '__call__'): json = List_usingStream(json) # depends on [control=['if'], data=[]] elif isinstance(json, GeneratorType): json = List_usingStream(json.next) # depends on [control=['if'], data=[]] else: Log.error('Expecting json to be a stream, or a function that will return more bytes') def _iterate_list(index, c, parent_path, path, expected_vars): (c, index) = skip_whitespace(index) if c == b']': yield index return # depends on [control=['if'], data=[]] while True: if not path: index = _assign_token(index, c, expected_vars) (c, index) = skip_whitespace(index) if c == b']': yield index _done(parent_path) return # depends on [control=['if'], data=[]] elif c == b',': yield index (c, index) = skip_whitespace(index) # depends on [control=['if'], data=['c']] # depends on [control=['if'], data=[]] else: for index in _decode_token(index, c, parent_path, path, expected_vars): (c, index) = skip_whitespace(index) if c == b']': yield index _done(parent_path) return # depends on [control=['if'], data=[]] elif c == b',': yield index (c, index) = skip_whitespace(index) # depends on [control=['if'], data=['c']] # depends on [control=['for'], data=['index']] # depends on [control=['while'], data=[]] def _done(parent_path): if len(parent_path) < len(done[0]): done[0] = parent_path # depends on [control=['if'], data=[]] def _decode_object(index, c, parent_path, query_path, expected_vars): if '.' in expected_vars: if len(done[0]) <= len(parent_path) and all((d == p for (d, p) in zip(done[0], parent_path))): Log.error('Can not pick up more variables, iterator is done') # depends on [control=['if'], data=[]] if query_path: Log.error('Can not extract objects that contain the iteration', var=join_field(query_path)) # depends on [control=['if'], data=[]] index = _assign_token(index, c, expected_vars) # c, index = skip_whitespace(index) yield index return # depends on [control=['if'], data=['expected_vars']] did_yield = False while True: (c, index) = skip_whitespace(index) if c == b',': continue # depends on [control=['if'], data=[]] elif c == b'"': (name, index) = simple_token(index, c) (c, index) = skip_whitespace(index) if c != b':': Log.error('Expecting colon') # depends on [control=['if'], data=[]] (c, index) = skip_whitespace(index) child_expected = needed(name, expected_vars) child_path = parent_path + [name] if any(child_expected): if not query_path: index = _assign_token(index, c, child_expected) # depends on [control=['if'], data=[]] elif query_path[0] == name: for index in _decode_token(index, c, child_path, query_path[1:], child_expected): did_yield = True yield index # depends on [control=['for'], data=['index']] # depends on [control=['if'], data=[]] else: if len(done[0]) <= len(child_path): Log.error('Can not pick up more variables, iterator over {{path}} is done', path=join_field(done[0])) # depends on [control=['if'], data=[]] index = _assign_token(index, c, child_expected) # depends on [control=['if'], data=[]] elif query_path and query_path[0] == name: for index in _decode_token(index, c, child_path, query_path[1:], child_expected): yield index # depends on [control=['for'], data=['index']] # depends on [control=['if'], data=[]] else: index = jump_to_end(index, c) # depends on [control=['if'], data=['c']] elif c == b'}': if not did_yield: yield index # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] def set_destination(expected_vars, value): for (i, e) in enumerate(expected_vars): if e is None: pass # depends on [control=['if'], data=[]] elif e == '.': destination[i] = value # depends on [control=['if'], data=[]] elif is_data(value): destination[i] = value[e] # depends on [control=['if'], data=[]] else: destination[i] = Null # depends on [control=['for'], data=[]] def _decode_object_items(index, c, parent_path, query_path, expected_vars): """ ITERATE THROUGH THE PROPERTIES OF AN OBJECT """ (c, index) = skip_whitespace(index) num_items = 0 while True: if c == b',': (c, index) = skip_whitespace(index) # depends on [control=['if'], data=['c']] elif c == b'"': (name, index) = simple_token(index, c) if 'name' in expected_vars: for (i, e) in enumerate(expected_vars): if e == 'name': destination[i] = name # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['expected_vars']] (c, index) = skip_whitespace(index) if c != b':': Log.error('Expecting colon') # depends on [control=['if'], data=[]] (c, index) = skip_whitespace(index) child_expected = needed('value', expected_vars) index = _assign_token(index, c, child_expected) (c, index) = skip_whitespace(index) DEBUG and (not num_items % 1000) and Log.note('{{num}} items iterated', num=num_items) yield index num_items += 1 # depends on [control=['if'], data=['c']] elif c == b'}': break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] def _decode_token(index, c, parent_path, query_path, expected_vars): if c == b'{': if query_path and query_path[0] == '$items': if any(expected_vars): for index in _decode_object_items(index, c, parent_path, query_path[1:], expected_vars): yield index # depends on [control=['for'], data=['index']] # depends on [control=['if'], data=[]] else: index = jump_to_end(index, c) yield index # depends on [control=['if'], data=[]] elif not any(expected_vars): index = jump_to_end(index, c) yield index # depends on [control=['if'], data=[]] else: for index in _decode_object(index, c, parent_path, query_path, expected_vars): yield index # depends on [control=['for'], data=['index']] # depends on [control=['if'], data=['c']] elif c == b'[': for index in _iterate_list(index, c, parent_path, query_path, expected_vars): yield index # depends on [control=['for'], data=['index']] # depends on [control=['if'], data=['c']] else: index = _assign_token(index, c, expected_vars) yield index def _assign_token(index, c, expected_vars): if not any(expected_vars): return jump_to_end(index, c) # depends on [control=['if'], data=[]] (value, index) = simple_token(index, c) set_destination(expected_vars, value) return index def jump_to_end(index, c): """ DO NOT PROCESS THIS JSON OBJECT, JUST RETURN WHERE IT ENDS """ if c == b'"': while True: c = json[index] index += 1 if c == b'\\': index += 1 # depends on [control=['if'], data=[]] elif c == b'"': break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] return index # depends on [control=['if'], data=['c']] elif c not in b'[{': while True: c = json[index] index += 1 if c in b',]}': break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] return index - 1 # depends on [control=['if'], data=['c']] # OBJECTS AND ARRAYS ARE MORE INVOLVED stack = [None] * 1024 stack[0] = CLOSE[c] i = 0 # FOR INDEXING THE STACK while True: c = json[index] index += 1 if c == b'"': while True: c = json[index] index += 1 if c == b'\\': index += 1 # depends on [control=['if'], data=[]] elif c == b'"': break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['if'], data=['c']] elif c in b'[{': i += 1 stack[i] = CLOSE[c] # depends on [control=['if'], data=['c']] elif c == stack[i]: i -= 1 if i == -1: return index # FOUND THE MATCH! RETURN # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif c in b']}': Log.error('expecting {{symbol}}', symbol=stack[i]) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] def simple_token(index, c): if c == b'"': json.mark(index - 1) while True: c = json[index] index += 1 if c == b'\\': index += 1 # depends on [control=['if'], data=[]] elif c == b'"': break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] return (json_decoder(json.release(index).decode('utf8')), index) # depends on [control=['if'], data=['c']] elif c in b'{[': json.mark(index - 1) index = jump_to_end(index, c) value = wrap(json_decoder(json.release(index).decode('utf8'))) return (value, index) # depends on [control=['if'], data=['c']] elif c == b't' and json.slice(index, index + 3) == b'rue': return (True, index + 3) # depends on [control=['if'], data=[]] elif c == b'n' and json.slice(index, index + 3) == b'ull': return (None, index + 3) # depends on [control=['if'], data=[]] elif c == b'f' and json.slice(index, index + 4) == b'alse': return (False, index + 4) # depends on [control=['if'], data=[]] else: json.mark(index - 1) while True: c = json[index] if c in b',]}': break # depends on [control=['if'], data=[]] index += 1 # depends on [control=['while'], data=[]] text = json.release(index) try: return (float(text), index) # depends on [control=['try'], data=[]] except Exception: Log.error('Not a known JSON primitive: {{text|quote}}', text=text) # depends on [control=['except'], data=[]] def skip_whitespace(index): """ RETURN NEXT NON-WHITESPACE CHAR, AND ITS INDEX """ c = json[index] while c in WHITESPACE: index += 1 c = json[index] # depends on [control=['while'], data=['c']] return (c, index + 1) if is_data(query_path) and query_path.get('items'): path_list = split_field(query_path.get('items')) + ['$items'] # INSERT A MARKER SO THAT OBJECT IS STREAM DECODED # depends on [control=['if'], data=[]] else: path_list = split_field(query_path) destination = [None] * len(expected_vars) (c, index) = skip_whitespace(0) done = [path_list + [None]] for _ in _decode_token(index, c, [], path_list, expected_vars): output = Data() for (i, e) in enumerate(expected_vars): output[e] = destination[i] # depends on [control=['for'], data=[]] yield output # depends on [control=['for'], data=[]]
def response(self, url): """Grab an API response.""" resp = requests.get(url).content return self.parseresponse(resp)
def function[response, parameter[self, url]]: constant[Grab an API response.] variable[resp] assign[=] call[name[requests].get, parameter[name[url]]].content return[call[name[self].parseresponse, parameter[name[resp]]]]
keyword[def] identifier[response] ( identifier[self] , identifier[url] ): literal[string] identifier[resp] = identifier[requests] . identifier[get] ( identifier[url] ). identifier[content] keyword[return] identifier[self] . identifier[parseresponse] ( identifier[resp] )
def response(self, url): """Grab an API response.""" resp = requests.get(url).content return self.parseresponse(resp)
def slowlog_get(self, length=None): """Returns the Redis slow queries log.""" if length is not None: if not isinstance(length, int): raise TypeError("length must be int or None") return self.execute(b'SLOWLOG', b'GET', length) else: return self.execute(b'SLOWLOG', b'GET')
def function[slowlog_get, parameter[self, length]]: constant[Returns the Redis slow queries log.] if compare[name[length] is_not constant[None]] begin[:] if <ast.UnaryOp object at 0x7da2043441f0> begin[:] <ast.Raise object at 0x7da2054a74f0> return[call[name[self].execute, parameter[constant[b'SLOWLOG'], constant[b'GET'], name[length]]]]
keyword[def] identifier[slowlog_get] ( identifier[self] , identifier[length] = keyword[None] ): literal[string] keyword[if] identifier[length] keyword[is] keyword[not] keyword[None] : keyword[if] keyword[not] identifier[isinstance] ( identifier[length] , identifier[int] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[return] identifier[self] . identifier[execute] ( literal[string] , literal[string] , identifier[length] ) keyword[else] : keyword[return] identifier[self] . identifier[execute] ( literal[string] , literal[string] )
def slowlog_get(self, length=None): """Returns the Redis slow queries log.""" if length is not None: if not isinstance(length, int): raise TypeError('length must be int or None') # depends on [control=['if'], data=[]] return self.execute(b'SLOWLOG', b'GET', length) # depends on [control=['if'], data=['length']] else: return self.execute(b'SLOWLOG', b'GET')
def render(self, request, **kwargs): """ Renders this view. Adds cancel_url to the context. If the request get parameters contains 'popup' then the `render_type` is set to 'popup'. """ if request.GET.get('popup'): self.render_type = 'popup' kwargs['popup'] = 1 kwargs['cancel_url'] = self.get_cancel_url() if not self.object: kwargs['single_title'] = True return super(FormView, self).render(request, **kwargs)
def function[render, parameter[self, request]]: constant[ Renders this view. Adds cancel_url to the context. If the request get parameters contains 'popup' then the `render_type` is set to 'popup'. ] if call[name[request].GET.get, parameter[constant[popup]]] begin[:] name[self].render_type assign[=] constant[popup] call[name[kwargs]][constant[popup]] assign[=] constant[1] call[name[kwargs]][constant[cancel_url]] assign[=] call[name[self].get_cancel_url, parameter[]] if <ast.UnaryOp object at 0x7da1b0b6e290> begin[:] call[name[kwargs]][constant[single_title]] assign[=] constant[True] return[call[call[name[super], parameter[name[FormView], name[self]]].render, parameter[name[request]]]]
keyword[def] identifier[render] ( identifier[self] , identifier[request] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[request] . identifier[GET] . identifier[get] ( literal[string] ): identifier[self] . identifier[render_type] = literal[string] identifier[kwargs] [ literal[string] ]= literal[int] identifier[kwargs] [ literal[string] ]= identifier[self] . identifier[get_cancel_url] () keyword[if] keyword[not] identifier[self] . identifier[object] : identifier[kwargs] [ literal[string] ]= keyword[True] keyword[return] identifier[super] ( identifier[FormView] , identifier[self] ). identifier[render] ( identifier[request] ,** identifier[kwargs] )
def render(self, request, **kwargs): """ Renders this view. Adds cancel_url to the context. If the request get parameters contains 'popup' then the `render_type` is set to 'popup'. """ if request.GET.get('popup'): self.render_type = 'popup' kwargs['popup'] = 1 # depends on [control=['if'], data=[]] kwargs['cancel_url'] = self.get_cancel_url() if not self.object: kwargs['single_title'] = True # depends on [control=['if'], data=[]] return super(FormView, self).render(request, **kwargs)
def IsValidForDescriptor(self, message_descriptor): """Checks whether the FieldMask is valid for Message Descriptor.""" for path in self.paths: if not _IsValidPath(message_descriptor, path): return False return True
def function[IsValidForDescriptor, parameter[self, message_descriptor]]: constant[Checks whether the FieldMask is valid for Message Descriptor.] for taget[name[path]] in starred[name[self].paths] begin[:] if <ast.UnaryOp object at 0x7da1b1f74d90> begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[IsValidForDescriptor] ( identifier[self] , identifier[message_descriptor] ): literal[string] keyword[for] identifier[path] keyword[in] identifier[self] . identifier[paths] : keyword[if] keyword[not] identifier[_IsValidPath] ( identifier[message_descriptor] , identifier[path] ): keyword[return] keyword[False] keyword[return] keyword[True]
def IsValidForDescriptor(self, message_descriptor): """Checks whether the FieldMask is valid for Message Descriptor.""" for path in self.paths: if not _IsValidPath(message_descriptor, path): return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['path']] return True
def estimate(data, fit_offset="mean", fit_profile="tilt", border_px=0, from_mask=None, ret_mask=False): """Estimate the background value of an image Parameters ---------- data: np.ndarray Data from which to compute the background value fit_profile: str The type of background profile to fit: - "offset": offset only - "poly2o": 2D 2nd order polynomial with mixed terms - "tilt": 2D linear tilt with offset (default) fit_offset: str The method for computing the profile offset - "fit": offset as fitting parameter - "gauss": center of a gaussian fit - "mean": simple average - "mode": mode (see `qpimage.bg_estimate.mode`) border_px: float Assume that a frame of `border_px` pixels around the image is background. from_mask: boolean np.ndarray or None Use a boolean array to define the background area. The boolean mask must have the same shape as the input data. `True` elements are used for background estimation. ret_mask: bool Return the boolean mask used to compute the background. Notes ----- If both `border_px` and `from_mask` are given, the intersection of the two is used, i.e. the positions where both, the frame mask and `from_mask`, are `True`. """ if fit_profile not in VALID_FIT_PROFILES: msg = "`fit_profile` must be one of {}, got '{}'".format( VALID_FIT_PROFILES, fit_profile) raise ValueError(msg) if fit_offset not in VALID_FIT_OFFSETS: msg = "`fit_offset` must be one of {}, got '{}'".format( VALID_FIT_OFFSETS, fit_offset) raise ValueError(msg) # initial mask image if from_mask is not None: assert isinstance(from_mask, np.ndarray) mask = from_mask.copy() else: mask = np.ones_like(data, dtype=bool) # multiply with border mask image (intersection) if border_px > 0: border_px = int(np.round(border_px)) mask_px = np.zeros_like(mask) mask_px[:border_px, :] = True mask_px[-border_px:, :] = True mask_px[:, :border_px] = True mask_px[:, -border_px:] = True # intersection np.logical_and(mask, mask_px, out=mask) # compute background image if fit_profile == "tilt": bgimg = profile_tilt(data, mask) elif fit_profile == "poly2o": bgimg = profile_poly2o(data, mask) else: bgimg = np.zeros_like(data, dtype=float) # add offsets if fit_offset == "fit": if fit_profile == "offset": msg = "`fit_offset=='fit'` only valid when `fit_profile!='offset`" raise ValueError(msg) # nothing else to do here, using offset from fit elif fit_offset == "gauss": bgimg += offset_gaussian((data - bgimg)[mask]) elif fit_offset == "mean": bgimg += np.mean((data - bgimg)[mask]) elif fit_offset == "mode": bgimg += offset_mode((data - bgimg)[mask]) if ret_mask: ret = (bgimg, mask) else: ret = bgimg return ret
def function[estimate, parameter[data, fit_offset, fit_profile, border_px, from_mask, ret_mask]]: constant[Estimate the background value of an image Parameters ---------- data: np.ndarray Data from which to compute the background value fit_profile: str The type of background profile to fit: - "offset": offset only - "poly2o": 2D 2nd order polynomial with mixed terms - "tilt": 2D linear tilt with offset (default) fit_offset: str The method for computing the profile offset - "fit": offset as fitting parameter - "gauss": center of a gaussian fit - "mean": simple average - "mode": mode (see `qpimage.bg_estimate.mode`) border_px: float Assume that a frame of `border_px` pixels around the image is background. from_mask: boolean np.ndarray or None Use a boolean array to define the background area. The boolean mask must have the same shape as the input data. `True` elements are used for background estimation. ret_mask: bool Return the boolean mask used to compute the background. Notes ----- If both `border_px` and `from_mask` are given, the intersection of the two is used, i.e. the positions where both, the frame mask and `from_mask`, are `True`. ] if compare[name[fit_profile] <ast.NotIn object at 0x7da2590d7190> name[VALID_FIT_PROFILES]] begin[:] variable[msg] assign[=] call[constant[`fit_profile` must be one of {}, got '{}'].format, parameter[name[VALID_FIT_PROFILES], name[fit_profile]]] <ast.Raise object at 0x7da1b109ba30> if compare[name[fit_offset] <ast.NotIn object at 0x7da2590d7190> name[VALID_FIT_OFFSETS]] begin[:] variable[msg] assign[=] call[constant[`fit_offset` must be one of {}, got '{}'].format, parameter[name[VALID_FIT_OFFSETS], name[fit_offset]]] <ast.Raise object at 0x7da1b109b760> if compare[name[from_mask] is_not constant[None]] begin[:] assert[call[name[isinstance], parameter[name[from_mask], name[np].ndarray]]] variable[mask] assign[=] call[name[from_mask].copy, parameter[]] if compare[name[border_px] greater[>] constant[0]] begin[:] variable[border_px] assign[=] call[name[int], parameter[call[name[np].round, parameter[name[border_px]]]]] variable[mask_px] assign[=] call[name[np].zeros_like, parameter[name[mask]]] call[name[mask_px]][tuple[[<ast.Slice object at 0x7da1b109ae30>, <ast.Slice object at 0x7da1b109add0>]]] assign[=] constant[True] call[name[mask_px]][tuple[[<ast.Slice object at 0x7da1b109acb0>, <ast.Slice object at 0x7da1b109ac20>]]] assign[=] constant[True] call[name[mask_px]][tuple[[<ast.Slice object at 0x7da1b109ab00>, <ast.Slice object at 0x7da1b109aad0>]]] assign[=] constant[True] call[name[mask_px]][tuple[[<ast.Slice object at 0x7da1b109a980>, <ast.Slice object at 0x7da1b109a950>]]] assign[=] constant[True] call[name[np].logical_and, parameter[name[mask], name[mask_px]]] if compare[name[fit_profile] equal[==] constant[tilt]] begin[:] variable[bgimg] assign[=] call[name[profile_tilt], parameter[name[data], name[mask]]] if compare[name[fit_offset] equal[==] constant[fit]] begin[:] if compare[name[fit_profile] equal[==] constant[offset]] begin[:] variable[msg] assign[=] constant[`fit_offset=='fit'` only valid when `fit_profile!='offset`] <ast.Raise object at 0x7da1b11745e0> if name[ret_mask] begin[:] variable[ret] assign[=] tuple[[<ast.Name object at 0x7da1b1175de0>, <ast.Name object at 0x7da1b11769b0>]] return[name[ret]]
keyword[def] identifier[estimate] ( identifier[data] , identifier[fit_offset] = literal[string] , identifier[fit_profile] = literal[string] , identifier[border_px] = literal[int] , identifier[from_mask] = keyword[None] , identifier[ret_mask] = keyword[False] ): literal[string] keyword[if] identifier[fit_profile] keyword[not] keyword[in] identifier[VALID_FIT_PROFILES] : identifier[msg] = literal[string] . identifier[format] ( identifier[VALID_FIT_PROFILES] , identifier[fit_profile] ) keyword[raise] identifier[ValueError] ( identifier[msg] ) keyword[if] identifier[fit_offset] keyword[not] keyword[in] identifier[VALID_FIT_OFFSETS] : identifier[msg] = literal[string] . identifier[format] ( identifier[VALID_FIT_OFFSETS] , identifier[fit_offset] ) keyword[raise] identifier[ValueError] ( identifier[msg] ) keyword[if] identifier[from_mask] keyword[is] keyword[not] keyword[None] : keyword[assert] identifier[isinstance] ( identifier[from_mask] , identifier[np] . identifier[ndarray] ) identifier[mask] = identifier[from_mask] . identifier[copy] () keyword[else] : identifier[mask] = identifier[np] . identifier[ones_like] ( identifier[data] , identifier[dtype] = identifier[bool] ) keyword[if] identifier[border_px] > literal[int] : identifier[border_px] = identifier[int] ( identifier[np] . identifier[round] ( identifier[border_px] )) identifier[mask_px] = identifier[np] . identifier[zeros_like] ( identifier[mask] ) identifier[mask_px] [: identifier[border_px] ,:]= keyword[True] identifier[mask_px] [- identifier[border_px] :,:]= keyword[True] identifier[mask_px] [:,: identifier[border_px] ]= keyword[True] identifier[mask_px] [:,- identifier[border_px] :]= keyword[True] identifier[np] . identifier[logical_and] ( identifier[mask] , identifier[mask_px] , identifier[out] = identifier[mask] ) keyword[if] identifier[fit_profile] == literal[string] : identifier[bgimg] = identifier[profile_tilt] ( identifier[data] , identifier[mask] ) keyword[elif] identifier[fit_profile] == literal[string] : identifier[bgimg] = identifier[profile_poly2o] ( identifier[data] , identifier[mask] ) keyword[else] : identifier[bgimg] = identifier[np] . identifier[zeros_like] ( identifier[data] , identifier[dtype] = identifier[float] ) keyword[if] identifier[fit_offset] == literal[string] : keyword[if] identifier[fit_profile] == literal[string] : identifier[msg] = literal[string] keyword[raise] identifier[ValueError] ( identifier[msg] ) keyword[elif] identifier[fit_offset] == literal[string] : identifier[bgimg] += identifier[offset_gaussian] (( identifier[data] - identifier[bgimg] )[ identifier[mask] ]) keyword[elif] identifier[fit_offset] == literal[string] : identifier[bgimg] += identifier[np] . identifier[mean] (( identifier[data] - identifier[bgimg] )[ identifier[mask] ]) keyword[elif] identifier[fit_offset] == literal[string] : identifier[bgimg] += identifier[offset_mode] (( identifier[data] - identifier[bgimg] )[ identifier[mask] ]) keyword[if] identifier[ret_mask] : identifier[ret] =( identifier[bgimg] , identifier[mask] ) keyword[else] : identifier[ret] = identifier[bgimg] keyword[return] identifier[ret]
def estimate(data, fit_offset='mean', fit_profile='tilt', border_px=0, from_mask=None, ret_mask=False): """Estimate the background value of an image Parameters ---------- data: np.ndarray Data from which to compute the background value fit_profile: str The type of background profile to fit: - "offset": offset only - "poly2o": 2D 2nd order polynomial with mixed terms - "tilt": 2D linear tilt with offset (default) fit_offset: str The method for computing the profile offset - "fit": offset as fitting parameter - "gauss": center of a gaussian fit - "mean": simple average - "mode": mode (see `qpimage.bg_estimate.mode`) border_px: float Assume that a frame of `border_px` pixels around the image is background. from_mask: boolean np.ndarray or None Use a boolean array to define the background area. The boolean mask must have the same shape as the input data. `True` elements are used for background estimation. ret_mask: bool Return the boolean mask used to compute the background. Notes ----- If both `border_px` and `from_mask` are given, the intersection of the two is used, i.e. the positions where both, the frame mask and `from_mask`, are `True`. """ if fit_profile not in VALID_FIT_PROFILES: msg = "`fit_profile` must be one of {}, got '{}'".format(VALID_FIT_PROFILES, fit_profile) raise ValueError(msg) # depends on [control=['if'], data=['fit_profile', 'VALID_FIT_PROFILES']] if fit_offset not in VALID_FIT_OFFSETS: msg = "`fit_offset` must be one of {}, got '{}'".format(VALID_FIT_OFFSETS, fit_offset) raise ValueError(msg) # depends on [control=['if'], data=['fit_offset', 'VALID_FIT_OFFSETS']] # initial mask image if from_mask is not None: assert isinstance(from_mask, np.ndarray) mask = from_mask.copy() # depends on [control=['if'], data=['from_mask']] else: mask = np.ones_like(data, dtype=bool) # multiply with border mask image (intersection) if border_px > 0: border_px = int(np.round(border_px)) mask_px = np.zeros_like(mask) mask_px[:border_px, :] = True mask_px[-border_px:, :] = True mask_px[:, :border_px] = True mask_px[:, -border_px:] = True # intersection np.logical_and(mask, mask_px, out=mask) # depends on [control=['if'], data=['border_px']] # compute background image if fit_profile == 'tilt': bgimg = profile_tilt(data, mask) # depends on [control=['if'], data=[]] elif fit_profile == 'poly2o': bgimg = profile_poly2o(data, mask) # depends on [control=['if'], data=[]] else: bgimg = np.zeros_like(data, dtype=float) # add offsets if fit_offset == 'fit': if fit_profile == 'offset': msg = "`fit_offset=='fit'` only valid when `fit_profile!='offset`" raise ValueError(msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # nothing else to do here, using offset from fit elif fit_offset == 'gauss': bgimg += offset_gaussian((data - bgimg)[mask]) # depends on [control=['if'], data=[]] elif fit_offset == 'mean': bgimg += np.mean((data - bgimg)[mask]) # depends on [control=['if'], data=[]] elif fit_offset == 'mode': bgimg += offset_mode((data - bgimg)[mask]) # depends on [control=['if'], data=[]] if ret_mask: ret = (bgimg, mask) # depends on [control=['if'], data=[]] else: ret = bgimg return ret
def set(self, mode, disable): """ create logger object, enable or disable logging """ global logger try: if logger: if disable: logger.disabled = True else: if mode in ('STREAM', 'FILE'): logger = logd.getLogger(mode, __version__) except Exception as e: logger.exception( '%s: Problem incurred during logging setup' % inspect.stack()[0][3] ) return False return True
def function[set, parameter[self, mode, disable]]: constant[ create logger object, enable or disable logging ] <ast.Global object at 0x7da18fe93250> <ast.Try object at 0x7da18fe91ff0> return[constant[True]]
keyword[def] identifier[set] ( identifier[self] , identifier[mode] , identifier[disable] ): literal[string] keyword[global] identifier[logger] keyword[try] : keyword[if] identifier[logger] : keyword[if] identifier[disable] : identifier[logger] . identifier[disabled] = keyword[True] keyword[else] : keyword[if] identifier[mode] keyword[in] ( literal[string] , literal[string] ): identifier[logger] = identifier[logd] . identifier[getLogger] ( identifier[mode] , identifier[__version__] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[logger] . identifier[exception] ( literal[string] % identifier[inspect] . identifier[stack] ()[ literal[int] ][ literal[int] ] ) keyword[return] keyword[False] keyword[return] keyword[True]
def set(self, mode, disable): """ create logger object, enable or disable logging """ global logger try: if logger: if disable: logger.disabled = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif mode in ('STREAM', 'FILE'): logger = logd.getLogger(mode, __version__) # depends on [control=['if'], data=['mode']] # depends on [control=['try'], data=[]] except Exception as e: logger.exception('%s: Problem incurred during logging setup' % inspect.stack()[0][3]) return False # depends on [control=['except'], data=[]] return True
def log_combinations(n, counts, name="log_combinations"): """Multinomial coefficient. Given `n` and `counts`, where `counts` has last dimension `k`, we compute the multinomial coefficient as: ```n! / sum_i n_i!``` where `i` runs over all `k` classes. Args: n: Floating-point `Tensor` broadcastable with `counts`. This represents `n` outcomes. counts: Floating-point `Tensor` broadcastable with `n`. This represents counts in `k` classes, where `k` is the last dimension of the tensor. name: A name for this operation (optional). Returns: `Tensor` representing the multinomial coefficient between `n` and `counts`. """ # First a bit about the number of ways counts could have come in: # E.g. if counts = [1, 2], then this is 3 choose 2. # In general, this is (sum counts)! / sum(counts!) # The sum should be along the last dimension of counts. This is the # "distribution" dimension. Here n a priori represents the sum of counts. with tf.name_scope(name): n = tf.convert_to_tensor(value=n, name="n") counts = tf.convert_to_tensor(value=counts, name="counts") total_permutations = tf.math.lgamma(n + 1) counts_factorial = tf.math.lgamma(counts + 1) redundant_permutations = tf.reduce_sum( input_tensor=counts_factorial, axis=[-1]) return total_permutations - redundant_permutations
def function[log_combinations, parameter[n, counts, name]]: constant[Multinomial coefficient. Given `n` and `counts`, where `counts` has last dimension `k`, we compute the multinomial coefficient as: ```n! / sum_i n_i!``` where `i` runs over all `k` classes. Args: n: Floating-point `Tensor` broadcastable with `counts`. This represents `n` outcomes. counts: Floating-point `Tensor` broadcastable with `n`. This represents counts in `k` classes, where `k` is the last dimension of the tensor. name: A name for this operation (optional). Returns: `Tensor` representing the multinomial coefficient between `n` and `counts`. ] with call[name[tf].name_scope, parameter[name[name]]] begin[:] variable[n] assign[=] call[name[tf].convert_to_tensor, parameter[]] variable[counts] assign[=] call[name[tf].convert_to_tensor, parameter[]] variable[total_permutations] assign[=] call[name[tf].math.lgamma, parameter[binary_operation[name[n] + constant[1]]]] variable[counts_factorial] assign[=] call[name[tf].math.lgamma, parameter[binary_operation[name[counts] + constant[1]]]] variable[redundant_permutations] assign[=] call[name[tf].reduce_sum, parameter[]] return[binary_operation[name[total_permutations] - name[redundant_permutations]]]
keyword[def] identifier[log_combinations] ( identifier[n] , identifier[counts] , identifier[name] = literal[string] ): literal[string] keyword[with] identifier[tf] . identifier[name_scope] ( identifier[name] ): identifier[n] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[n] , identifier[name] = literal[string] ) identifier[counts] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[counts] , identifier[name] = literal[string] ) identifier[total_permutations] = identifier[tf] . identifier[math] . identifier[lgamma] ( identifier[n] + literal[int] ) identifier[counts_factorial] = identifier[tf] . identifier[math] . identifier[lgamma] ( identifier[counts] + literal[int] ) identifier[redundant_permutations] = identifier[tf] . identifier[reduce_sum] ( identifier[input_tensor] = identifier[counts_factorial] , identifier[axis] =[- literal[int] ]) keyword[return] identifier[total_permutations] - identifier[redundant_permutations]
def log_combinations(n, counts, name='log_combinations'): """Multinomial coefficient. Given `n` and `counts`, where `counts` has last dimension `k`, we compute the multinomial coefficient as: ```n! / sum_i n_i!``` where `i` runs over all `k` classes. Args: n: Floating-point `Tensor` broadcastable with `counts`. This represents `n` outcomes. counts: Floating-point `Tensor` broadcastable with `n`. This represents counts in `k` classes, where `k` is the last dimension of the tensor. name: A name for this operation (optional). Returns: `Tensor` representing the multinomial coefficient between `n` and `counts`. """ # First a bit about the number of ways counts could have come in: # E.g. if counts = [1, 2], then this is 3 choose 2. # In general, this is (sum counts)! / sum(counts!) # The sum should be along the last dimension of counts. This is the # "distribution" dimension. Here n a priori represents the sum of counts. with tf.name_scope(name): n = tf.convert_to_tensor(value=n, name='n') counts = tf.convert_to_tensor(value=counts, name='counts') total_permutations = tf.math.lgamma(n + 1) counts_factorial = tf.math.lgamma(counts + 1) redundant_permutations = tf.reduce_sum(input_tensor=counts_factorial, axis=[-1]) return total_permutations - redundant_permutations # depends on [control=['with'], data=[]]
def clear(self): """Remove all items.""" self._fwdm.clear() self._invm.clear() self._sntl.nxt = self._sntl.prv = self._sntl
def function[clear, parameter[self]]: constant[Remove all items.] call[name[self]._fwdm.clear, parameter[]] call[name[self]._invm.clear, parameter[]] name[self]._sntl.nxt assign[=] name[self]._sntl
keyword[def] identifier[clear] ( identifier[self] ): literal[string] identifier[self] . identifier[_fwdm] . identifier[clear] () identifier[self] . identifier[_invm] . identifier[clear] () identifier[self] . identifier[_sntl] . identifier[nxt] = identifier[self] . identifier[_sntl] . identifier[prv] = identifier[self] . identifier[_sntl]
def clear(self): """Remove all items.""" self._fwdm.clear() self._invm.clear() self._sntl.nxt = self._sntl.prv = self._sntl
def getResponseText(endpoint, query, requestedMimeType): ''' endpoint - URL of sparql endpoint query - SPARQL query to be executed requestedMimeType Type of content requested. can be: 'text/csv; q=1.0, */*; q=0.1' 'application/json' etc. Returns result + mimetype ''' retFormat = _mimeTypeToSparqlFormat(requestedMimeType) client = SPARQLWrapper(endpoint) client.setQuery(query) client.setReturnFormat(retFormat) client.setCredentials(static.DEFAULT_ENDPOINT_USER, static.DEFAULT_ENDPOINT_PASSWORD) result = client.queryAndConvert() if retFormat==JSON: result = jsonify(result) return result, MIME_FORMAT[retFormat]
def function[getResponseText, parameter[endpoint, query, requestedMimeType]]: constant[ endpoint - URL of sparql endpoint query - SPARQL query to be executed requestedMimeType Type of content requested. can be: 'text/csv; q=1.0, */*; q=0.1' 'application/json' etc. Returns result + mimetype ] variable[retFormat] assign[=] call[name[_mimeTypeToSparqlFormat], parameter[name[requestedMimeType]]] variable[client] assign[=] call[name[SPARQLWrapper], parameter[name[endpoint]]] call[name[client].setQuery, parameter[name[query]]] call[name[client].setReturnFormat, parameter[name[retFormat]]] call[name[client].setCredentials, parameter[name[static].DEFAULT_ENDPOINT_USER, name[static].DEFAULT_ENDPOINT_PASSWORD]] variable[result] assign[=] call[name[client].queryAndConvert, parameter[]] if compare[name[retFormat] equal[==] name[JSON]] begin[:] variable[result] assign[=] call[name[jsonify], parameter[name[result]]] return[tuple[[<ast.Name object at 0x7da20c76dff0>, <ast.Subscript object at 0x7da20c76f670>]]]
keyword[def] identifier[getResponseText] ( identifier[endpoint] , identifier[query] , identifier[requestedMimeType] ): literal[string] identifier[retFormat] = identifier[_mimeTypeToSparqlFormat] ( identifier[requestedMimeType] ) identifier[client] = identifier[SPARQLWrapper] ( identifier[endpoint] ) identifier[client] . identifier[setQuery] ( identifier[query] ) identifier[client] . identifier[setReturnFormat] ( identifier[retFormat] ) identifier[client] . identifier[setCredentials] ( identifier[static] . identifier[DEFAULT_ENDPOINT_USER] , identifier[static] . identifier[DEFAULT_ENDPOINT_PASSWORD] ) identifier[result] = identifier[client] . identifier[queryAndConvert] () keyword[if] identifier[retFormat] == identifier[JSON] : identifier[result] = identifier[jsonify] ( identifier[result] ) keyword[return] identifier[result] , identifier[MIME_FORMAT] [ identifier[retFormat] ]
def getResponseText(endpoint, query, requestedMimeType): """ endpoint - URL of sparql endpoint query - SPARQL query to be executed requestedMimeType Type of content requested. can be: 'text/csv; q=1.0, */*; q=0.1' 'application/json' etc. Returns result + mimetype """ retFormat = _mimeTypeToSparqlFormat(requestedMimeType) client = SPARQLWrapper(endpoint) client.setQuery(query) client.setReturnFormat(retFormat) client.setCredentials(static.DEFAULT_ENDPOINT_USER, static.DEFAULT_ENDPOINT_PASSWORD) result = client.queryAndConvert() if retFormat == JSON: result = jsonify(result) # depends on [control=['if'], data=[]] return (result, MIME_FORMAT[retFormat])
def get_requirement_warn(self, line): """Gets name of test case that was not successfully imported.""" res = self.REQ_WARN_SEARCH.search(line) try: return LogItem(res.group(1), None, None) except (AttributeError, IndexError): return None
def function[get_requirement_warn, parameter[self, line]]: constant[Gets name of test case that was not successfully imported.] variable[res] assign[=] call[name[self].REQ_WARN_SEARCH.search, parameter[name[line]]] <ast.Try object at 0x7da18eb55f00>
keyword[def] identifier[get_requirement_warn] ( identifier[self] , identifier[line] ): literal[string] identifier[res] = identifier[self] . identifier[REQ_WARN_SEARCH] . identifier[search] ( identifier[line] ) keyword[try] : keyword[return] identifier[LogItem] ( identifier[res] . identifier[group] ( literal[int] ), keyword[None] , keyword[None] ) keyword[except] ( identifier[AttributeError] , identifier[IndexError] ): keyword[return] keyword[None]
def get_requirement_warn(self, line): """Gets name of test case that was not successfully imported.""" res = self.REQ_WARN_SEARCH.search(line) try: return LogItem(res.group(1), None, None) # depends on [control=['try'], data=[]] except (AttributeError, IndexError): return None # depends on [control=['except'], data=[]]
def set_group_mask(self, group_mask=ALL_GROUPS): """ Set the group mask that the Crazyflie belongs to :param group_mask: mask for which groups this CF belongs to """ self._send_packet(struct.pack('<BB', self.COMMAND_SET_GROUP_MASK, group_mask))
def function[set_group_mask, parameter[self, group_mask]]: constant[ Set the group mask that the Crazyflie belongs to :param group_mask: mask for which groups this CF belongs to ] call[name[self]._send_packet, parameter[call[name[struct].pack, parameter[constant[<BB], name[self].COMMAND_SET_GROUP_MASK, name[group_mask]]]]]
keyword[def] identifier[set_group_mask] ( identifier[self] , identifier[group_mask] = identifier[ALL_GROUPS] ): literal[string] identifier[self] . identifier[_send_packet] ( identifier[struct] . identifier[pack] ( literal[string] , identifier[self] . identifier[COMMAND_SET_GROUP_MASK] , identifier[group_mask] ))
def set_group_mask(self, group_mask=ALL_GROUPS): """ Set the group mask that the Crazyflie belongs to :param group_mask: mask for which groups this CF belongs to """ self._send_packet(struct.pack('<BB', self.COMMAND_SET_GROUP_MASK, group_mask))
def can_be_internationally_dialled(numobj): """Returns True if the number can only be dialled from outside the region, or unknown. If the number can only be dialled from within the region as well, returns False. Does not check the number is a valid number. Note that, at the moment, this method does not handle short numbers (which are currently all presumed to not be diallable from outside their country). Arguments: numobj -- the phone number objectfor which we want to know whether it is diallable from outside the region. """ metadata = PhoneMetadata.metadata_for_region(region_code_for_number(numobj), None) if metadata is None: # Note numbers belonging to non-geographical entities (e.g. +800 # numbers) are always internationally diallable, and will be caught # here. return True nsn = national_significant_number(numobj) return not _is_number_matching_desc(nsn, metadata.no_international_dialling)
def function[can_be_internationally_dialled, parameter[numobj]]: constant[Returns True if the number can only be dialled from outside the region, or unknown. If the number can only be dialled from within the region as well, returns False. Does not check the number is a valid number. Note that, at the moment, this method does not handle short numbers (which are currently all presumed to not be diallable from outside their country). Arguments: numobj -- the phone number objectfor which we want to know whether it is diallable from outside the region. ] variable[metadata] assign[=] call[name[PhoneMetadata].metadata_for_region, parameter[call[name[region_code_for_number], parameter[name[numobj]]], constant[None]]] if compare[name[metadata] is constant[None]] begin[:] return[constant[True]] variable[nsn] assign[=] call[name[national_significant_number], parameter[name[numobj]]] return[<ast.UnaryOp object at 0x7da1b19d9600>]
keyword[def] identifier[can_be_internationally_dialled] ( identifier[numobj] ): literal[string] identifier[metadata] = identifier[PhoneMetadata] . identifier[metadata_for_region] ( identifier[region_code_for_number] ( identifier[numobj] ), keyword[None] ) keyword[if] identifier[metadata] keyword[is] keyword[None] : keyword[return] keyword[True] identifier[nsn] = identifier[national_significant_number] ( identifier[numobj] ) keyword[return] keyword[not] identifier[_is_number_matching_desc] ( identifier[nsn] , identifier[metadata] . identifier[no_international_dialling] )
def can_be_internationally_dialled(numobj): """Returns True if the number can only be dialled from outside the region, or unknown. If the number can only be dialled from within the region as well, returns False. Does not check the number is a valid number. Note that, at the moment, this method does not handle short numbers (which are currently all presumed to not be diallable from outside their country). Arguments: numobj -- the phone number objectfor which we want to know whether it is diallable from outside the region. """ metadata = PhoneMetadata.metadata_for_region(region_code_for_number(numobj), None) if metadata is None: # Note numbers belonging to non-geographical entities (e.g. +800 # numbers) are always internationally diallable, and will be caught # here. return True # depends on [control=['if'], data=[]] nsn = national_significant_number(numobj) return not _is_number_matching_desc(nsn, metadata.no_international_dialling)
def set_next_boot(self, boot_device): """Sets the machine to boot to boot_device on its next reboot Will default back to normal boot list on the reboot that follows. """ payload = amt.wsman.change_boot_order_request(self.uri, boot_device) self.post(payload) payload = amt.wsman.enable_boot_config_request(self.uri) self.post(payload)
def function[set_next_boot, parameter[self, boot_device]]: constant[Sets the machine to boot to boot_device on its next reboot Will default back to normal boot list on the reboot that follows. ] variable[payload] assign[=] call[name[amt].wsman.change_boot_order_request, parameter[name[self].uri, name[boot_device]]] call[name[self].post, parameter[name[payload]]] variable[payload] assign[=] call[name[amt].wsman.enable_boot_config_request, parameter[name[self].uri]] call[name[self].post, parameter[name[payload]]]
keyword[def] identifier[set_next_boot] ( identifier[self] , identifier[boot_device] ): literal[string] identifier[payload] = identifier[amt] . identifier[wsman] . identifier[change_boot_order_request] ( identifier[self] . identifier[uri] , identifier[boot_device] ) identifier[self] . identifier[post] ( identifier[payload] ) identifier[payload] = identifier[amt] . identifier[wsman] . identifier[enable_boot_config_request] ( identifier[self] . identifier[uri] ) identifier[self] . identifier[post] ( identifier[payload] )
def set_next_boot(self, boot_device): """Sets the machine to boot to boot_device on its next reboot Will default back to normal boot list on the reboot that follows. """ payload = amt.wsman.change_boot_order_request(self.uri, boot_device) self.post(payload) payload = amt.wsman.enable_boot_config_request(self.uri) self.post(payload)
def tunnel_settings_system_tunnel_suppress_debounce(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") tunnel_settings = ET.SubElement(config, "tunnel-settings", xmlns="urn:brocade.com:mgmt:brocade-tunnels") system = ET.SubElement(tunnel_settings, "system") tunnel = ET.SubElement(system, "tunnel") suppress_debounce = ET.SubElement(tunnel, "suppress-debounce") callback = kwargs.pop('callback', self._callback) return callback(config)
def function[tunnel_settings_system_tunnel_suppress_debounce, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[tunnel_settings] assign[=] call[name[ET].SubElement, parameter[name[config], constant[tunnel-settings]]] variable[system] assign[=] call[name[ET].SubElement, parameter[name[tunnel_settings], constant[system]]] variable[tunnel] assign[=] call[name[ET].SubElement, parameter[name[system], constant[tunnel]]] variable[suppress_debounce] assign[=] call[name[ET].SubElement, parameter[name[tunnel], constant[suppress-debounce]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[tunnel_settings_system_tunnel_suppress_debounce] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[tunnel_settings] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[system] = identifier[ET] . identifier[SubElement] ( identifier[tunnel_settings] , literal[string] ) identifier[tunnel] = identifier[ET] . identifier[SubElement] ( identifier[system] , literal[string] ) identifier[suppress_debounce] = identifier[ET] . identifier[SubElement] ( identifier[tunnel] , literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def tunnel_settings_system_tunnel_suppress_debounce(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') tunnel_settings = ET.SubElement(config, 'tunnel-settings', xmlns='urn:brocade.com:mgmt:brocade-tunnels') system = ET.SubElement(tunnel_settings, 'system') tunnel = ET.SubElement(system, 'tunnel') suppress_debounce = ET.SubElement(tunnel, 'suppress-debounce') callback = kwargs.pop('callback', self._callback) return callback(config)
def ok_schema_id(token: str) -> bool: """ Whether input token looks like a valid schema identifier; i.e., <issuer-did>:2:<name>:<version>. :param token: candidate string :return: whether input token looks like a valid schema identifier """ return bool(re.match('[{}]{{21,22}}:2:.+:[0-9.]+$'.format(B58), token or ''))
def function[ok_schema_id, parameter[token]]: constant[ Whether input token looks like a valid schema identifier; i.e., <issuer-did>:2:<name>:<version>. :param token: candidate string :return: whether input token looks like a valid schema identifier ] return[call[name[bool], parameter[call[name[re].match, parameter[call[constant[[{}]{{21,22}}:2:.+:[0-9.]+$].format, parameter[name[B58]]], <ast.BoolOp object at 0x7da18dc9b6a0>]]]]]
keyword[def] identifier[ok_schema_id] ( identifier[token] : identifier[str] )-> identifier[bool] : literal[string] keyword[return] identifier[bool] ( identifier[re] . identifier[match] ( literal[string] . identifier[format] ( identifier[B58] ), identifier[token] keyword[or] literal[string] ))
def ok_schema_id(token: str) -> bool: """ Whether input token looks like a valid schema identifier; i.e., <issuer-did>:2:<name>:<version>. :param token: candidate string :return: whether input token looks like a valid schema identifier """ return bool(re.match('[{}]{{21,22}}:2:.+:[0-9.]+$'.format(B58), token or ''))
def resolutions(self): """Get a list of resolution Resources from the server. :rtype: List[Resolution] """ r_json = self._get_json('resolution') resolutions = [Resolution( self._options, self._session, raw_res_json) for raw_res_json in r_json] return resolutions
def function[resolutions, parameter[self]]: constant[Get a list of resolution Resources from the server. :rtype: List[Resolution] ] variable[r_json] assign[=] call[name[self]._get_json, parameter[constant[resolution]]] variable[resolutions] assign[=] <ast.ListComp object at 0x7da1b2185cc0> return[name[resolutions]]
keyword[def] identifier[resolutions] ( identifier[self] ): literal[string] identifier[r_json] = identifier[self] . identifier[_get_json] ( literal[string] ) identifier[resolutions] =[ identifier[Resolution] ( identifier[self] . identifier[_options] , identifier[self] . identifier[_session] , identifier[raw_res_json] ) keyword[for] identifier[raw_res_json] keyword[in] identifier[r_json] ] keyword[return] identifier[resolutions]
def resolutions(self): """Get a list of resolution Resources from the server. :rtype: List[Resolution] """ r_json = self._get_json('resolution') resolutions = [Resolution(self._options, self._session, raw_res_json) for raw_res_json in r_json] return resolutions
def attributes(self): """ Returns a dict of all this structures attributes and values, skipping any attributes that start with an underscore (assumed they should be ignored). """ return dict([(name, getattr(self, name)) for (name, _) in self.format.values() if name is not None and not name.startswith('_')])
def function[attributes, parameter[self]]: constant[ Returns a dict of all this structures attributes and values, skipping any attributes that start with an underscore (assumed they should be ignored). ] return[call[name[dict], parameter[<ast.ListComp object at 0x7da1b1eacc40>]]]
keyword[def] identifier[attributes] ( identifier[self] ): literal[string] keyword[return] identifier[dict] ([( identifier[name] , identifier[getattr] ( identifier[self] , identifier[name] )) keyword[for] ( identifier[name] , identifier[_] ) keyword[in] identifier[self] . identifier[format] . identifier[values] () keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[name] . identifier[startswith] ( literal[string] )])
def attributes(self): """ Returns a dict of all this structures attributes and values, skipping any attributes that start with an underscore (assumed they should be ignored). """ return dict([(name, getattr(self, name)) for (name, _) in self.format.values() if name is not None and (not name.startswith('_'))])
def HandleGetBlocksMessageReceived(self, payload): """ Process a GetBlocksPayload payload. Args: payload (neo.Network.Payloads.GetBlocksPayload): """ if not self.leader.ServiceEnabled: return inventory = IOHelper.AsSerializableWithType(payload, 'neo.Network.Payloads.GetBlocksPayload.GetBlocksPayload') if not inventory: return blockchain = BC.Default() hash = inventory.HashStart[0] if not blockchain.GetHeader(hash): return hashes = [] hcount = 0 while hash != inventory.HashStop and hcount < 500: hash = blockchain.GetNextBlockHash(hash) if hash is None: break hashes.append(hash) hcount += 1 if hcount > 0: self.SendSerializedMessage(Message('inv', InvPayload(type=InventoryType.Block, hashes=hashes)))
def function[HandleGetBlocksMessageReceived, parameter[self, payload]]: constant[ Process a GetBlocksPayload payload. Args: payload (neo.Network.Payloads.GetBlocksPayload): ] if <ast.UnaryOp object at 0x7da18bcca9b0> begin[:] return[None] variable[inventory] assign[=] call[name[IOHelper].AsSerializableWithType, parameter[name[payload], constant[neo.Network.Payloads.GetBlocksPayload.GetBlocksPayload]]] if <ast.UnaryOp object at 0x7da1b1df8640> begin[:] return[None] variable[blockchain] assign[=] call[name[BC].Default, parameter[]] variable[hash] assign[=] call[name[inventory].HashStart][constant[0]] if <ast.UnaryOp object at 0x7da18bcc8610> begin[:] return[None] variable[hashes] assign[=] list[[]] variable[hcount] assign[=] constant[0] while <ast.BoolOp object at 0x7da1b1df9b10> begin[:] variable[hash] assign[=] call[name[blockchain].GetNextBlockHash, parameter[name[hash]]] if compare[name[hash] is constant[None]] begin[:] break call[name[hashes].append, parameter[name[hash]]] <ast.AugAssign object at 0x7da1b1dfa050> if compare[name[hcount] greater[>] constant[0]] begin[:] call[name[self].SendSerializedMessage, parameter[call[name[Message], parameter[constant[inv], call[name[InvPayload], parameter[]]]]]]
keyword[def] identifier[HandleGetBlocksMessageReceived] ( identifier[self] , identifier[payload] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[leader] . identifier[ServiceEnabled] : keyword[return] identifier[inventory] = identifier[IOHelper] . identifier[AsSerializableWithType] ( identifier[payload] , literal[string] ) keyword[if] keyword[not] identifier[inventory] : keyword[return] identifier[blockchain] = identifier[BC] . identifier[Default] () identifier[hash] = identifier[inventory] . identifier[HashStart] [ literal[int] ] keyword[if] keyword[not] identifier[blockchain] . identifier[GetHeader] ( identifier[hash] ): keyword[return] identifier[hashes] =[] identifier[hcount] = literal[int] keyword[while] identifier[hash] != identifier[inventory] . identifier[HashStop] keyword[and] identifier[hcount] < literal[int] : identifier[hash] = identifier[blockchain] . identifier[GetNextBlockHash] ( identifier[hash] ) keyword[if] identifier[hash] keyword[is] keyword[None] : keyword[break] identifier[hashes] . identifier[append] ( identifier[hash] ) identifier[hcount] += literal[int] keyword[if] identifier[hcount] > literal[int] : identifier[self] . identifier[SendSerializedMessage] ( identifier[Message] ( literal[string] , identifier[InvPayload] ( identifier[type] = identifier[InventoryType] . identifier[Block] , identifier[hashes] = identifier[hashes] )))
def HandleGetBlocksMessageReceived(self, payload): """ Process a GetBlocksPayload payload. Args: payload (neo.Network.Payloads.GetBlocksPayload): """ if not self.leader.ServiceEnabled: return # depends on [control=['if'], data=[]] inventory = IOHelper.AsSerializableWithType(payload, 'neo.Network.Payloads.GetBlocksPayload.GetBlocksPayload') if not inventory: return # depends on [control=['if'], data=[]] blockchain = BC.Default() hash = inventory.HashStart[0] if not blockchain.GetHeader(hash): return # depends on [control=['if'], data=[]] hashes = [] hcount = 0 while hash != inventory.HashStop and hcount < 500: hash = blockchain.GetNextBlockHash(hash) if hash is None: break # depends on [control=['if'], data=[]] hashes.append(hash) hcount += 1 # depends on [control=['while'], data=[]] if hcount > 0: self.SendSerializedMessage(Message('inv', InvPayload(type=InventoryType.Block, hashes=hashes))) # depends on [control=['if'], data=[]]
def event_handlers(self): """ The list of handlers registered for this node. If the node is not a `Flow` and does not have its own list of `handlers` the handlers registered at the level of the flow are returned. This trick allows one to registered different handlers at the level of the Task for testing purposes. By default, we have a common list of handlers for all the nodes in the flow. This choice facilitates the automatic installation of the handlers when we use callbacks to generate new Works and Tasks! """ if self.is_flow: return self._event_handlers try: return self._event_handlers except AttributeError: return self.flow._event_handlers
def function[event_handlers, parameter[self]]: constant[ The list of handlers registered for this node. If the node is not a `Flow` and does not have its own list of `handlers` the handlers registered at the level of the flow are returned. This trick allows one to registered different handlers at the level of the Task for testing purposes. By default, we have a common list of handlers for all the nodes in the flow. This choice facilitates the automatic installation of the handlers when we use callbacks to generate new Works and Tasks! ] if name[self].is_flow begin[:] return[name[self]._event_handlers] <ast.Try object at 0x7da204344910>
keyword[def] identifier[event_handlers] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[is_flow] : keyword[return] identifier[self] . identifier[_event_handlers] keyword[try] : keyword[return] identifier[self] . identifier[_event_handlers] keyword[except] identifier[AttributeError] : keyword[return] identifier[self] . identifier[flow] . identifier[_event_handlers]
def event_handlers(self): """ The list of handlers registered for this node. If the node is not a `Flow` and does not have its own list of `handlers` the handlers registered at the level of the flow are returned. This trick allows one to registered different handlers at the level of the Task for testing purposes. By default, we have a common list of handlers for all the nodes in the flow. This choice facilitates the automatic installation of the handlers when we use callbacks to generate new Works and Tasks! """ if self.is_flow: return self._event_handlers # depends on [control=['if'], data=[]] try: return self._event_handlers # depends on [control=['try'], data=[]] except AttributeError: return self.flow._event_handlers # depends on [control=['except'], data=[]]
def visit_excepthandler(self, node): """Visit an except handler block and check for exception unpacking.""" def _is_used_in_except_block(node): scope = node.scope() current = node while ( current and current != scope and not isinstance(current, astroid.ExceptHandler) ): current = current.parent return isinstance(current, astroid.ExceptHandler) and current.type != node if isinstance(node.name, (astroid.Tuple, astroid.List)): self.add_message("unpacking-in-except", node=node) return if not node.name: return # Find any names scope = node.parent.scope() scope_names = scope.nodes_of_class(astroid.Name, skip_klass=astroid.FunctionDef) scope_names = list(scope_names) potential_leaked_names = [ scope_name for scope_name in scope_names if scope_name.name == node.name.name and scope_name.lineno > node.lineno and not _is_used_in_except_block(scope_name) ] reassignments_for_same_name = { assign_name.lineno for assign_name in scope.nodes_of_class( astroid.AssignName, skip_klass=astroid.FunctionDef ) if assign_name.name == node.name.name } for leaked_name in potential_leaked_names: if any( node.lineno < elem < leaked_name.lineno for elem in reassignments_for_same_name ): continue self.add_message("exception-escape", node=leaked_name)
def function[visit_excepthandler, parameter[self, node]]: constant[Visit an except handler block and check for exception unpacking.] def function[_is_used_in_except_block, parameter[node]]: variable[scope] assign[=] call[name[node].scope, parameter[]] variable[current] assign[=] name[node] while <ast.BoolOp object at 0x7da1b0245960> begin[:] variable[current] assign[=] name[current].parent return[<ast.BoolOp object at 0x7da1b0244ac0>] if call[name[isinstance], parameter[name[node].name, tuple[[<ast.Attribute object at 0x7da1b02461d0>, <ast.Attribute object at 0x7da1b0246c80>]]]] begin[:] call[name[self].add_message, parameter[constant[unpacking-in-except]]] return[None] if <ast.UnaryOp object at 0x7da1b0245ea0> begin[:] return[None] variable[scope] assign[=] call[name[node].parent.scope, parameter[]] variable[scope_names] assign[=] call[name[scope].nodes_of_class, parameter[name[astroid].Name]] variable[scope_names] assign[=] call[name[list], parameter[name[scope_names]]] variable[potential_leaked_names] assign[=] <ast.ListComp object at 0x7da1b0247ca0> variable[reassignments_for_same_name] assign[=] <ast.SetComp object at 0x7da1b0245060> for taget[name[leaked_name]] in starred[name[potential_leaked_names]] begin[:] if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b0245e10>]] begin[:] continue call[name[self].add_message, parameter[constant[exception-escape]]]
keyword[def] identifier[visit_excepthandler] ( identifier[self] , identifier[node] ): literal[string] keyword[def] identifier[_is_used_in_except_block] ( identifier[node] ): identifier[scope] = identifier[node] . identifier[scope] () identifier[current] = identifier[node] keyword[while] ( identifier[current] keyword[and] identifier[current] != identifier[scope] keyword[and] keyword[not] identifier[isinstance] ( identifier[current] , identifier[astroid] . identifier[ExceptHandler] ) ): identifier[current] = identifier[current] . identifier[parent] keyword[return] identifier[isinstance] ( identifier[current] , identifier[astroid] . identifier[ExceptHandler] ) keyword[and] identifier[current] . identifier[type] != identifier[node] keyword[if] identifier[isinstance] ( identifier[node] . identifier[name] ,( identifier[astroid] . identifier[Tuple] , identifier[astroid] . identifier[List] )): identifier[self] . identifier[add_message] ( literal[string] , identifier[node] = identifier[node] ) keyword[return] keyword[if] keyword[not] identifier[node] . identifier[name] : keyword[return] identifier[scope] = identifier[node] . identifier[parent] . identifier[scope] () identifier[scope_names] = identifier[scope] . identifier[nodes_of_class] ( identifier[astroid] . identifier[Name] , identifier[skip_klass] = identifier[astroid] . identifier[FunctionDef] ) identifier[scope_names] = identifier[list] ( identifier[scope_names] ) identifier[potential_leaked_names] =[ identifier[scope_name] keyword[for] identifier[scope_name] keyword[in] identifier[scope_names] keyword[if] identifier[scope_name] . identifier[name] == identifier[node] . identifier[name] . identifier[name] keyword[and] identifier[scope_name] . identifier[lineno] > identifier[node] . identifier[lineno] keyword[and] keyword[not] identifier[_is_used_in_except_block] ( identifier[scope_name] ) ] identifier[reassignments_for_same_name] ={ identifier[assign_name] . identifier[lineno] keyword[for] identifier[assign_name] keyword[in] identifier[scope] . identifier[nodes_of_class] ( identifier[astroid] . identifier[AssignName] , identifier[skip_klass] = identifier[astroid] . identifier[FunctionDef] ) keyword[if] identifier[assign_name] . identifier[name] == identifier[node] . identifier[name] . identifier[name] } keyword[for] identifier[leaked_name] keyword[in] identifier[potential_leaked_names] : keyword[if] identifier[any] ( identifier[node] . identifier[lineno] < identifier[elem] < identifier[leaked_name] . identifier[lineno] keyword[for] identifier[elem] keyword[in] identifier[reassignments_for_same_name] ): keyword[continue] identifier[self] . identifier[add_message] ( literal[string] , identifier[node] = identifier[leaked_name] )
def visit_excepthandler(self, node): """Visit an except handler block and check for exception unpacking.""" def _is_used_in_except_block(node): scope = node.scope() current = node while current and current != scope and (not isinstance(current, astroid.ExceptHandler)): current = current.parent # depends on [control=['while'], data=[]] return isinstance(current, astroid.ExceptHandler) and current.type != node if isinstance(node.name, (astroid.Tuple, astroid.List)): self.add_message('unpacking-in-except', node=node) return # depends on [control=['if'], data=[]] if not node.name: return # depends on [control=['if'], data=[]] # Find any names scope = node.parent.scope() scope_names = scope.nodes_of_class(astroid.Name, skip_klass=astroid.FunctionDef) scope_names = list(scope_names) potential_leaked_names = [scope_name for scope_name in scope_names if scope_name.name == node.name.name and scope_name.lineno > node.lineno and (not _is_used_in_except_block(scope_name))] reassignments_for_same_name = {assign_name.lineno for assign_name in scope.nodes_of_class(astroid.AssignName, skip_klass=astroid.FunctionDef) if assign_name.name == node.name.name} for leaked_name in potential_leaked_names: if any((node.lineno < elem < leaked_name.lineno for elem in reassignments_for_same_name)): continue # depends on [control=['if'], data=[]] self.add_message('exception-escape', node=leaked_name) # depends on [control=['for'], data=['leaked_name']]
def to_primitive(self, context=None): """ .. versionadded:: 1.3.0 """ primitive = super(VerboseMixin, self).to_primitive(context) to_text = lambda x: six.text_type(x) if x is not None else x primitive.update({ 'verbose_name': to_text(self.verbose_name), 'help_text': to_text(self.help_text), }) return primitive
def function[to_primitive, parameter[self, context]]: constant[ .. versionadded:: 1.3.0 ] variable[primitive] assign[=] call[call[name[super], parameter[name[VerboseMixin], name[self]]].to_primitive, parameter[name[context]]] variable[to_text] assign[=] <ast.Lambda object at 0x7da1b1451d20> call[name[primitive].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1347a90>, <ast.Constant object at 0x7da1b1347400>], [<ast.Call object at 0x7da1b1347eb0>, <ast.Call object at 0x7da1b1347d00>]]]] return[name[primitive]]
keyword[def] identifier[to_primitive] ( identifier[self] , identifier[context] = keyword[None] ): literal[string] identifier[primitive] = identifier[super] ( identifier[VerboseMixin] , identifier[self] ). identifier[to_primitive] ( identifier[context] ) identifier[to_text] = keyword[lambda] identifier[x] : identifier[six] . identifier[text_type] ( identifier[x] ) keyword[if] identifier[x] keyword[is] keyword[not] keyword[None] keyword[else] identifier[x] identifier[primitive] . identifier[update] ({ literal[string] : identifier[to_text] ( identifier[self] . identifier[verbose_name] ), literal[string] : identifier[to_text] ( identifier[self] . identifier[help_text] ), }) keyword[return] identifier[primitive]
def to_primitive(self, context=None): """ .. versionadded:: 1.3.0 """ primitive = super(VerboseMixin, self).to_primitive(context) to_text = lambda x: six.text_type(x) if x is not None else x primitive.update({'verbose_name': to_text(self.verbose_name), 'help_text': to_text(self.help_text)}) return primitive
def getCatalogFile(catalog_dir, mc_source_id): """ Inputs: catalog_dir = string corresponding to directory containing the stellar catalog infiles mc_source_id = integer corresponding the target MC_SOURCE_ID value Outputs: catalog_infile = string corresponding to filename of stellar catalog containing mc_source_id """ catalog_infiles = sorted(glob.glob(catalog_dir + '/*catalog*.fits')) mc_source_id_array = [] catalog_infile_index_array = [] for ii, catalog_infile in enumerate(catalog_infiles): mc_source_id_min = int(os.path.basename(catalog_infile).split('.')[0].split('mc_source_id_')[-1].split('-')[0]) mc_source_id_max = int(os.path.basename(catalog_infile).split('.')[0].split('mc_source_id_')[-1].split('-')[1]) assert (mc_source_id_max > mc_source_id_min) & (mc_source_id_min >= 1), 'Found invalue MC_SOURCE_ID values in filenames' mc_source_id_array.append(np.arange(mc_source_id_min, mc_source_id_max + 1)) catalog_infile_index_array.append(np.tile(ii, 1 + (mc_source_id_max - mc_source_id_min))) mc_source_id_array = np.concatenate(mc_source_id_array) catalog_infile_index_array = np.concatenate(catalog_infile_index_array) assert len(mc_source_id_array) == len(np.unique(mc_source_id_array)), 'Found non-unique MC_SOURCE_ID values in filenames' assert np.in1d(mc_source_id, mc_source_id_array), 'Requested MC_SOURCE_ID value not among files' mc_source_id_index = np.nonzero(mc_source_id == mc_source_id_array)[0] return catalog_infiles[catalog_infile_index_array[mc_source_id_index]]
def function[getCatalogFile, parameter[catalog_dir, mc_source_id]]: constant[ Inputs: catalog_dir = string corresponding to directory containing the stellar catalog infiles mc_source_id = integer corresponding the target MC_SOURCE_ID value Outputs: catalog_infile = string corresponding to filename of stellar catalog containing mc_source_id ] variable[catalog_infiles] assign[=] call[name[sorted], parameter[call[name[glob].glob, parameter[binary_operation[name[catalog_dir] + constant[/*catalog*.fits]]]]]] variable[mc_source_id_array] assign[=] list[[]] variable[catalog_infile_index_array] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da20c76efe0>, <ast.Name object at 0x7da20c76e3b0>]]] in starred[call[name[enumerate], parameter[name[catalog_infiles]]]] begin[:] variable[mc_source_id_min] assign[=] call[name[int], parameter[call[call[call[call[call[call[call[name[os].path.basename, parameter[name[catalog_infile]]].split, parameter[constant[.]]]][constant[0]].split, parameter[constant[mc_source_id_]]]][<ast.UnaryOp object at 0x7da20c6e6140>].split, parameter[constant[-]]]][constant[0]]]] variable[mc_source_id_max] assign[=] call[name[int], parameter[call[call[call[call[call[call[call[name[os].path.basename, parameter[name[catalog_infile]]].split, parameter[constant[.]]]][constant[0]].split, parameter[constant[mc_source_id_]]]][<ast.UnaryOp object at 0x7da1b25d3580>].split, parameter[constant[-]]]][constant[1]]]] assert[binary_operation[compare[name[mc_source_id_max] greater[>] name[mc_source_id_min]] <ast.BitAnd object at 0x7da2590d6b60> compare[name[mc_source_id_min] greater_or_equal[>=] constant[1]]]] call[name[mc_source_id_array].append, parameter[call[name[np].arange, parameter[name[mc_source_id_min], binary_operation[name[mc_source_id_max] + constant[1]]]]]] call[name[catalog_infile_index_array].append, parameter[call[name[np].tile, parameter[name[ii], binary_operation[constant[1] + binary_operation[name[mc_source_id_max] - name[mc_source_id_min]]]]]]] variable[mc_source_id_array] assign[=] call[name[np].concatenate, parameter[name[mc_source_id_array]]] variable[catalog_infile_index_array] assign[=] call[name[np].concatenate, parameter[name[catalog_infile_index_array]]] assert[compare[call[name[len], parameter[name[mc_source_id_array]]] equal[==] call[name[len], parameter[call[name[np].unique, parameter[name[mc_source_id_array]]]]]]] assert[call[name[np].in1d, parameter[name[mc_source_id], name[mc_source_id_array]]]] variable[mc_source_id_index] assign[=] call[call[name[np].nonzero, parameter[compare[name[mc_source_id] equal[==] name[mc_source_id_array]]]]][constant[0]] return[call[name[catalog_infiles]][call[name[catalog_infile_index_array]][name[mc_source_id_index]]]]
keyword[def] identifier[getCatalogFile] ( identifier[catalog_dir] , identifier[mc_source_id] ): literal[string] identifier[catalog_infiles] = identifier[sorted] ( identifier[glob] . identifier[glob] ( identifier[catalog_dir] + literal[string] )) identifier[mc_source_id_array] =[] identifier[catalog_infile_index_array] =[] keyword[for] identifier[ii] , identifier[catalog_infile] keyword[in] identifier[enumerate] ( identifier[catalog_infiles] ): identifier[mc_source_id_min] = identifier[int] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[catalog_infile] ). identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )[- literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]) identifier[mc_source_id_max] = identifier[int] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[catalog_infile] ). identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )[- literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]) keyword[assert] ( identifier[mc_source_id_max] > identifier[mc_source_id_min] )&( identifier[mc_source_id_min] >= literal[int] ), literal[string] identifier[mc_source_id_array] . identifier[append] ( identifier[np] . identifier[arange] ( identifier[mc_source_id_min] , identifier[mc_source_id_max] + literal[int] )) identifier[catalog_infile_index_array] . identifier[append] ( identifier[np] . identifier[tile] ( identifier[ii] , literal[int] +( identifier[mc_source_id_max] - identifier[mc_source_id_min] ))) identifier[mc_source_id_array] = identifier[np] . identifier[concatenate] ( identifier[mc_source_id_array] ) identifier[catalog_infile_index_array] = identifier[np] . identifier[concatenate] ( identifier[catalog_infile_index_array] ) keyword[assert] identifier[len] ( identifier[mc_source_id_array] )== identifier[len] ( identifier[np] . identifier[unique] ( identifier[mc_source_id_array] )), literal[string] keyword[assert] identifier[np] . identifier[in1d] ( identifier[mc_source_id] , identifier[mc_source_id_array] ), literal[string] identifier[mc_source_id_index] = identifier[np] . identifier[nonzero] ( identifier[mc_source_id] == identifier[mc_source_id_array] )[ literal[int] ] keyword[return] identifier[catalog_infiles] [ identifier[catalog_infile_index_array] [ identifier[mc_source_id_index] ]]
def getCatalogFile(catalog_dir, mc_source_id): """ Inputs: catalog_dir = string corresponding to directory containing the stellar catalog infiles mc_source_id = integer corresponding the target MC_SOURCE_ID value Outputs: catalog_infile = string corresponding to filename of stellar catalog containing mc_source_id """ catalog_infiles = sorted(glob.glob(catalog_dir + '/*catalog*.fits')) mc_source_id_array = [] catalog_infile_index_array = [] for (ii, catalog_infile) in enumerate(catalog_infiles): mc_source_id_min = int(os.path.basename(catalog_infile).split('.')[0].split('mc_source_id_')[-1].split('-')[0]) mc_source_id_max = int(os.path.basename(catalog_infile).split('.')[0].split('mc_source_id_')[-1].split('-')[1]) assert (mc_source_id_max > mc_source_id_min) & (mc_source_id_min >= 1), 'Found invalue MC_SOURCE_ID values in filenames' mc_source_id_array.append(np.arange(mc_source_id_min, mc_source_id_max + 1)) catalog_infile_index_array.append(np.tile(ii, 1 + (mc_source_id_max - mc_source_id_min))) # depends on [control=['for'], data=[]] mc_source_id_array = np.concatenate(mc_source_id_array) catalog_infile_index_array = np.concatenate(catalog_infile_index_array) assert len(mc_source_id_array) == len(np.unique(mc_source_id_array)), 'Found non-unique MC_SOURCE_ID values in filenames' assert np.in1d(mc_source_id, mc_source_id_array), 'Requested MC_SOURCE_ID value not among files' mc_source_id_index = np.nonzero(mc_source_id == mc_source_id_array)[0] return catalog_infiles[catalog_infile_index_array[mc_source_id_index]]
def checkForSpiceError(f): """ Internal function to check :param f: :raise stypes.SpiceyError: """ if failed(): errorparts = { "tkvsn": tkvrsn("TOOLKIT").replace("CSPICE_", ""), "short": getmsg("SHORT", 26), "explain": getmsg("EXPLAIN", 100).strip(), "long": getmsg("LONG", 321).strip(), "traceback": qcktrc(200)} msg = stypes.errorformat.format(**errorparts) reset() raise stypes.SpiceyError(msg)
def function[checkForSpiceError, parameter[f]]: constant[ Internal function to check :param f: :raise stypes.SpiceyError: ] if call[name[failed], parameter[]] begin[:] variable[errorparts] assign[=] dictionary[[<ast.Constant object at 0x7da18f09e470>, <ast.Constant object at 0x7da18f09c220>, <ast.Constant object at 0x7da18f09ff70>, <ast.Constant object at 0x7da18f09c190>, <ast.Constant object at 0x7da18f09c6d0>], [<ast.Call object at 0x7da18f09e140>, <ast.Call object at 0x7da18f09cac0>, <ast.Call object at 0x7da18f09f130>, <ast.Call object at 0x7da204567dc0>, <ast.Call object at 0x7da204564340>]] variable[msg] assign[=] call[name[stypes].errorformat.format, parameter[]] call[name[reset], parameter[]] <ast.Raise object at 0x7da204566fb0>
keyword[def] identifier[checkForSpiceError] ( identifier[f] ): literal[string] keyword[if] identifier[failed] (): identifier[errorparts] ={ literal[string] : identifier[tkvrsn] ( literal[string] ). identifier[replace] ( literal[string] , literal[string] ), literal[string] : identifier[getmsg] ( literal[string] , literal[int] ), literal[string] : identifier[getmsg] ( literal[string] , literal[int] ). identifier[strip] (), literal[string] : identifier[getmsg] ( literal[string] , literal[int] ). identifier[strip] (), literal[string] : identifier[qcktrc] ( literal[int] )} identifier[msg] = identifier[stypes] . identifier[errorformat] . identifier[format] (** identifier[errorparts] ) identifier[reset] () keyword[raise] identifier[stypes] . identifier[SpiceyError] ( identifier[msg] )
def checkForSpiceError(f): """ Internal function to check :param f: :raise stypes.SpiceyError: """ if failed(): errorparts = {'tkvsn': tkvrsn('TOOLKIT').replace('CSPICE_', ''), 'short': getmsg('SHORT', 26), 'explain': getmsg('EXPLAIN', 100).strip(), 'long': getmsg('LONG', 321).strip(), 'traceback': qcktrc(200)} msg = stypes.errorformat.format(**errorparts) reset() raise stypes.SpiceyError(msg) # depends on [control=['if'], data=[]]
def tar_and_s3_upload(self, path, key, bucket): """ Tar the local file or directory and upload to s3 :param path: local file or directory :type path: str :param key: s3 key :type key: str :param bucket: s3 bucket :type bucket: str :return: None """ with tempfile.TemporaryFile() as temp_file: if os.path.isdir(path): files = [os.path.join(path, name) for name in os.listdir(path)] else: files = [path] with tarfile.open(mode='w:gz', fileobj=temp_file) as tar_file: for f in files: tar_file.add(f, arcname=os.path.basename(f)) temp_file.seek(0) self.s3_hook.load_file_obj(temp_file, key, bucket, replace=True)
def function[tar_and_s3_upload, parameter[self, path, key, bucket]]: constant[ Tar the local file or directory and upload to s3 :param path: local file or directory :type path: str :param key: s3 key :type key: str :param bucket: s3 bucket :type bucket: str :return: None ] with call[name[tempfile].TemporaryFile, parameter[]] begin[:] if call[name[os].path.isdir, parameter[name[path]]] begin[:] variable[files] assign[=] <ast.ListComp object at 0x7da1b052bf40> with call[name[tarfile].open, parameter[]] begin[:] for taget[name[f]] in starred[name[files]] begin[:] call[name[tar_file].add, parameter[name[f]]] call[name[temp_file].seek, parameter[constant[0]]] call[name[self].s3_hook.load_file_obj, parameter[name[temp_file], name[key], name[bucket]]]
keyword[def] identifier[tar_and_s3_upload] ( identifier[self] , identifier[path] , identifier[key] , identifier[bucket] ): literal[string] keyword[with] identifier[tempfile] . identifier[TemporaryFile] () keyword[as] identifier[temp_file] : keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ): identifier[files] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[name] ) keyword[for] identifier[name] keyword[in] identifier[os] . identifier[listdir] ( identifier[path] )] keyword[else] : identifier[files] =[ identifier[path] ] keyword[with] identifier[tarfile] . identifier[open] ( identifier[mode] = literal[string] , identifier[fileobj] = identifier[temp_file] ) keyword[as] identifier[tar_file] : keyword[for] identifier[f] keyword[in] identifier[files] : identifier[tar_file] . identifier[add] ( identifier[f] , identifier[arcname] = identifier[os] . identifier[path] . identifier[basename] ( identifier[f] )) identifier[temp_file] . identifier[seek] ( literal[int] ) identifier[self] . identifier[s3_hook] . identifier[load_file_obj] ( identifier[temp_file] , identifier[key] , identifier[bucket] , identifier[replace] = keyword[True] )
def tar_and_s3_upload(self, path, key, bucket): """ Tar the local file or directory and upload to s3 :param path: local file or directory :type path: str :param key: s3 key :type key: str :param bucket: s3 bucket :type bucket: str :return: None """ with tempfile.TemporaryFile() as temp_file: if os.path.isdir(path): files = [os.path.join(path, name) for name in os.listdir(path)] # depends on [control=['if'], data=[]] else: files = [path] with tarfile.open(mode='w:gz', fileobj=temp_file) as tar_file: for f in files: tar_file.add(f, arcname=os.path.basename(f)) # depends on [control=['for'], data=['f']] # depends on [control=['with'], data=['tar_file']] temp_file.seek(0) self.s3_hook.load_file_obj(temp_file, key, bucket, replace=True) # depends on [control=['with'], data=['temp_file']]
def get(self, key, default=_NoValue): """Returns the value of Spark runtime configuration property for the given key, assuming it is set. """ self._checkType(key, "key") if default is _NoValue: return self._jconf.get(key) else: if default is not None: self._checkType(default, "default") return self._jconf.get(key, default)
def function[get, parameter[self, key, default]]: constant[Returns the value of Spark runtime configuration property for the given key, assuming it is set. ] call[name[self]._checkType, parameter[name[key], constant[key]]] if compare[name[default] is name[_NoValue]] begin[:] return[call[name[self]._jconf.get, parameter[name[key]]]]
keyword[def] identifier[get] ( identifier[self] , identifier[key] , identifier[default] = identifier[_NoValue] ): literal[string] identifier[self] . identifier[_checkType] ( identifier[key] , literal[string] ) keyword[if] identifier[default] keyword[is] identifier[_NoValue] : keyword[return] identifier[self] . identifier[_jconf] . identifier[get] ( identifier[key] ) keyword[else] : keyword[if] identifier[default] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[_checkType] ( identifier[default] , literal[string] ) keyword[return] identifier[self] . identifier[_jconf] . identifier[get] ( identifier[key] , identifier[default] )
def get(self, key, default=_NoValue): """Returns the value of Spark runtime configuration property for the given key, assuming it is set. """ self._checkType(key, 'key') if default is _NoValue: return self._jconf.get(key) # depends on [control=['if'], data=[]] else: if default is not None: self._checkType(default, 'default') # depends on [control=['if'], data=['default']] return self._jconf.get(key, default)
def _remove_from_ptr_size(self, ptr): # type: (path_table_record.PathTableRecord) -> int ''' An internal method to remove a PTR from a VD, removing space from the VD if necessary. Parameters: ptr - The PTR to remove from the VD. Returns: The number of bytes to remove from the VDs (this may be zero). ''' num_bytes_to_remove = 0 for pvd in self.pvds: # The remove_from_ptr_size() returns True if the PVD no longer # needs the extra extents in the PTR that stored this directory. # We always remove 4 additional extents for that. if pvd.remove_from_ptr_size(path_table_record.PathTableRecord.record_length(ptr.len_di)): num_bytes_to_remove += 4 * self.pvd.logical_block_size() return num_bytes_to_remove
def function[_remove_from_ptr_size, parameter[self, ptr]]: constant[ An internal method to remove a PTR from a VD, removing space from the VD if necessary. Parameters: ptr - The PTR to remove from the VD. Returns: The number of bytes to remove from the VDs (this may be zero). ] variable[num_bytes_to_remove] assign[=] constant[0] for taget[name[pvd]] in starred[name[self].pvds] begin[:] if call[name[pvd].remove_from_ptr_size, parameter[call[name[path_table_record].PathTableRecord.record_length, parameter[name[ptr].len_di]]]] begin[:] <ast.AugAssign object at 0x7da1b0d0ee00> return[name[num_bytes_to_remove]]
keyword[def] identifier[_remove_from_ptr_size] ( identifier[self] , identifier[ptr] ): literal[string] identifier[num_bytes_to_remove] = literal[int] keyword[for] identifier[pvd] keyword[in] identifier[self] . identifier[pvds] : keyword[if] identifier[pvd] . identifier[remove_from_ptr_size] ( identifier[path_table_record] . identifier[PathTableRecord] . identifier[record_length] ( identifier[ptr] . identifier[len_di] )): identifier[num_bytes_to_remove] += literal[int] * identifier[self] . identifier[pvd] . identifier[logical_block_size] () keyword[return] identifier[num_bytes_to_remove]
def _remove_from_ptr_size(self, ptr): # type: (path_table_record.PathTableRecord) -> int '\n An internal method to remove a PTR from a VD, removing space from the VD if\n necessary.\n\n Parameters:\n ptr - The PTR to remove from the VD.\n Returns:\n The number of bytes to remove from the VDs (this may be zero).\n ' num_bytes_to_remove = 0 for pvd in self.pvds: # The remove_from_ptr_size() returns True if the PVD no longer # needs the extra extents in the PTR that stored this directory. # We always remove 4 additional extents for that. if pvd.remove_from_ptr_size(path_table_record.PathTableRecord.record_length(ptr.len_di)): num_bytes_to_remove += 4 * self.pvd.logical_block_size() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pvd']] return num_bytes_to_remove
def wait_for_payment_balance( raiden: 'RaidenService', payment_network_id: PaymentNetworkID, token_address: TokenAddress, partner_address: Address, target_address: Address, target_balance: TokenAmount, retry_timeout: float, ) -> None: """Wait until a given channel's balance exceeds the target balance. Note: This does not time out, use gevent.Timeout. """ def get_balance(end_state): if end_state.balance_proof: return end_state.balance_proof.transferred_amount else: return 0 if target_address == raiden.address: balance = lambda channel_state: get_balance(channel_state.partner_state) elif target_address == partner_address: balance = lambda channel_state: get_balance(channel_state.our_state) else: raise ValueError('target_address must be one of the channel participants') channel_state = views.get_channelstate_for( views.state_from_raiden(raiden), payment_network_id, token_address, partner_address, ) while balance(channel_state) < target_balance: log.critical('wait', b=balance(channel_state), t=target_balance) gevent.sleep(retry_timeout) channel_state = views.get_channelstate_for( views.state_from_raiden(raiden), payment_network_id, token_address, partner_address, )
def function[wait_for_payment_balance, parameter[raiden, payment_network_id, token_address, partner_address, target_address, target_balance, retry_timeout]]: constant[Wait until a given channel's balance exceeds the target balance. Note: This does not time out, use gevent.Timeout. ] def function[get_balance, parameter[end_state]]: if name[end_state].balance_proof begin[:] return[name[end_state].balance_proof.transferred_amount] if compare[name[target_address] equal[==] name[raiden].address] begin[:] variable[balance] assign[=] <ast.Lambda object at 0x7da1b1708f40> variable[channel_state] assign[=] call[name[views].get_channelstate_for, parameter[call[name[views].state_from_raiden, parameter[name[raiden]]], name[payment_network_id], name[token_address], name[partner_address]]] while compare[call[name[balance], parameter[name[channel_state]]] less[<] name[target_balance]] begin[:] call[name[log].critical, parameter[constant[wait]]] call[name[gevent].sleep, parameter[name[retry_timeout]]] variable[channel_state] assign[=] call[name[views].get_channelstate_for, parameter[call[name[views].state_from_raiden, parameter[name[raiden]]], name[payment_network_id], name[token_address], name[partner_address]]]
keyword[def] identifier[wait_for_payment_balance] ( identifier[raiden] : literal[string] , identifier[payment_network_id] : identifier[PaymentNetworkID] , identifier[token_address] : identifier[TokenAddress] , identifier[partner_address] : identifier[Address] , identifier[target_address] : identifier[Address] , identifier[target_balance] : identifier[TokenAmount] , identifier[retry_timeout] : identifier[float] , )-> keyword[None] : literal[string] keyword[def] identifier[get_balance] ( identifier[end_state] ): keyword[if] identifier[end_state] . identifier[balance_proof] : keyword[return] identifier[end_state] . identifier[balance_proof] . identifier[transferred_amount] keyword[else] : keyword[return] literal[int] keyword[if] identifier[target_address] == identifier[raiden] . identifier[address] : identifier[balance] = keyword[lambda] identifier[channel_state] : identifier[get_balance] ( identifier[channel_state] . identifier[partner_state] ) keyword[elif] identifier[target_address] == identifier[partner_address] : identifier[balance] = keyword[lambda] identifier[channel_state] : identifier[get_balance] ( identifier[channel_state] . identifier[our_state] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[channel_state] = identifier[views] . identifier[get_channelstate_for] ( identifier[views] . identifier[state_from_raiden] ( identifier[raiden] ), identifier[payment_network_id] , identifier[token_address] , identifier[partner_address] , ) keyword[while] identifier[balance] ( identifier[channel_state] )< identifier[target_balance] : identifier[log] . identifier[critical] ( literal[string] , identifier[b] = identifier[balance] ( identifier[channel_state] ), identifier[t] = identifier[target_balance] ) identifier[gevent] . identifier[sleep] ( identifier[retry_timeout] ) identifier[channel_state] = identifier[views] . identifier[get_channelstate_for] ( identifier[views] . identifier[state_from_raiden] ( identifier[raiden] ), identifier[payment_network_id] , identifier[token_address] , identifier[partner_address] , )
def wait_for_payment_balance(raiden: 'RaidenService', payment_network_id: PaymentNetworkID, token_address: TokenAddress, partner_address: Address, target_address: Address, target_balance: TokenAmount, retry_timeout: float) -> None: """Wait until a given channel's balance exceeds the target balance. Note: This does not time out, use gevent.Timeout. """ def get_balance(end_state): if end_state.balance_proof: return end_state.balance_proof.transferred_amount # depends on [control=['if'], data=[]] else: return 0 if target_address == raiden.address: balance = lambda channel_state: get_balance(channel_state.partner_state) # depends on [control=['if'], data=[]] elif target_address == partner_address: balance = lambda channel_state: get_balance(channel_state.our_state) # depends on [control=['if'], data=[]] else: raise ValueError('target_address must be one of the channel participants') channel_state = views.get_channelstate_for(views.state_from_raiden(raiden), payment_network_id, token_address, partner_address) while balance(channel_state) < target_balance: log.critical('wait', b=balance(channel_state), t=target_balance) gevent.sleep(retry_timeout) channel_state = views.get_channelstate_for(views.state_from_raiden(raiden), payment_network_id, token_address, partner_address) # depends on [control=['while'], data=['target_balance']]
def set_password(self, password, user='', shutit_pexpect_child=None, note=None): """Sets the password for the current user or passed-in user. As a side effect, installs the "password" package. @param user: username to set the password for. Defaults to '' (i.e. current user) @param password: password to set for the user @param shutit_pexpect_child: See send() @param note: See send() """ shutit_global.shutit_global_object.yield_to_draw() shutit_pexpect_child = shutit_pexpect_child or self.get_current_shutit_pexpect_session().pexpect_child shutit_pexpect_session = self.get_shutit_pexpect_session_from_child(shutit_pexpect_child) return shutit_pexpect_session.set_password(password,user=user,note=note)
def function[set_password, parameter[self, password, user, shutit_pexpect_child, note]]: constant[Sets the password for the current user or passed-in user. As a side effect, installs the "password" package. @param user: username to set the password for. Defaults to '' (i.e. current user) @param password: password to set for the user @param shutit_pexpect_child: See send() @param note: See send() ] call[name[shutit_global].shutit_global_object.yield_to_draw, parameter[]] variable[shutit_pexpect_child] assign[=] <ast.BoolOp object at 0x7da18f00fb80> variable[shutit_pexpect_session] assign[=] call[name[self].get_shutit_pexpect_session_from_child, parameter[name[shutit_pexpect_child]]] return[call[name[shutit_pexpect_session].set_password, parameter[name[password]]]]
keyword[def] identifier[set_password] ( identifier[self] , identifier[password] , identifier[user] = literal[string] , identifier[shutit_pexpect_child] = keyword[None] , identifier[note] = keyword[None] ): literal[string] identifier[shutit_global] . identifier[shutit_global_object] . identifier[yield_to_draw] () identifier[shutit_pexpect_child] = identifier[shutit_pexpect_child] keyword[or] identifier[self] . identifier[get_current_shutit_pexpect_session] (). identifier[pexpect_child] identifier[shutit_pexpect_session] = identifier[self] . identifier[get_shutit_pexpect_session_from_child] ( identifier[shutit_pexpect_child] ) keyword[return] identifier[shutit_pexpect_session] . identifier[set_password] ( identifier[password] , identifier[user] = identifier[user] , identifier[note] = identifier[note] )
def set_password(self, password, user='', shutit_pexpect_child=None, note=None): """Sets the password for the current user or passed-in user. As a side effect, installs the "password" package. @param user: username to set the password for. Defaults to '' (i.e. current user) @param password: password to set for the user @param shutit_pexpect_child: See send() @param note: See send() """ shutit_global.shutit_global_object.yield_to_draw() shutit_pexpect_child = shutit_pexpect_child or self.get_current_shutit_pexpect_session().pexpect_child shutit_pexpect_session = self.get_shutit_pexpect_session_from_child(shutit_pexpect_child) return shutit_pexpect_session.set_password(password, user=user, note=note)
def apply_patches(document, patches): """Serially apply all patches to a document.""" for i, patch in enumerate(patches): try: result = apply_patch(document, patch) if patch.op == "test" and result is False: raise JSONPatchError("Test patch {0} failed. Cancelling entire set.".format(i + 1)) except Exception as ex: raise JSONPatchError("An error occurred with patch {0}: {1}".format(i + 1, ex)) from ex
def function[apply_patches, parameter[document, patches]]: constant[Serially apply all patches to a document.] for taget[tuple[[<ast.Name object at 0x7da1b1340460>, <ast.Name object at 0x7da1b1342530>]]] in starred[call[name[enumerate], parameter[name[patches]]]] begin[:] <ast.Try object at 0x7da1b13b9630>
keyword[def] identifier[apply_patches] ( identifier[document] , identifier[patches] ): literal[string] keyword[for] identifier[i] , identifier[patch] keyword[in] identifier[enumerate] ( identifier[patches] ): keyword[try] : identifier[result] = identifier[apply_patch] ( identifier[document] , identifier[patch] ) keyword[if] identifier[patch] . identifier[op] == literal[string] keyword[and] identifier[result] keyword[is] keyword[False] : keyword[raise] identifier[JSONPatchError] ( literal[string] . identifier[format] ( identifier[i] + literal[int] )) keyword[except] identifier[Exception] keyword[as] identifier[ex] : keyword[raise] identifier[JSONPatchError] ( literal[string] . identifier[format] ( identifier[i] + literal[int] , identifier[ex] )) keyword[from] identifier[ex]
def apply_patches(document, patches): """Serially apply all patches to a document.""" for (i, patch) in enumerate(patches): try: result = apply_patch(document, patch) if patch.op == 'test' and result is False: raise JSONPatchError('Test patch {0} failed. Cancelling entire set.'.format(i + 1)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Exception as ex: raise JSONPatchError('An error occurred with patch {0}: {1}'.format(i + 1, ex)) from ex # depends on [control=['except'], data=['ex']] # depends on [control=['for'], data=[]]
def searchFeaturesInDb( self, startIndex=0, maxResults=None, referenceName=None, start=None, end=None, parentId=None, featureTypes=None, name=None, geneSymbol=None): """ Perform a full features query in database. :param startIndex: int representing first record to return :param maxResults: int representing number of records to return :param referenceName: string representing reference name, ex 'chr1' :param start: int position on reference to start search :param end: int position on reference to end search >= start :param parentId: string restrict search by id of parent node. :param name: match features by name :param geneSymbol: match features by gene symbol :return an array of dictionaries, representing the returned data. """ # TODO: Refactor out common bits of this and the above count query. sql, sql_args = self.featuresQuery( startIndex=startIndex, maxResults=maxResults, referenceName=referenceName, start=start, end=end, parentId=parentId, featureTypes=featureTypes, name=name, geneSymbol=geneSymbol) sql += sqlite_backend.limitsSql(startIndex, maxResults) query = self._dbconn.execute(sql, sql_args) return sqlite_backend.sqliteRowsToDicts(query.fetchall())
def function[searchFeaturesInDb, parameter[self, startIndex, maxResults, referenceName, start, end, parentId, featureTypes, name, geneSymbol]]: constant[ Perform a full features query in database. :param startIndex: int representing first record to return :param maxResults: int representing number of records to return :param referenceName: string representing reference name, ex 'chr1' :param start: int position on reference to start search :param end: int position on reference to end search >= start :param parentId: string restrict search by id of parent node. :param name: match features by name :param geneSymbol: match features by gene symbol :return an array of dictionaries, representing the returned data. ] <ast.Tuple object at 0x7da18bcc89a0> assign[=] call[name[self].featuresQuery, parameter[]] <ast.AugAssign object at 0x7da18bcca230> variable[query] assign[=] call[name[self]._dbconn.execute, parameter[name[sql], name[sql_args]]] return[call[name[sqlite_backend].sqliteRowsToDicts, parameter[call[name[query].fetchall, parameter[]]]]]
keyword[def] identifier[searchFeaturesInDb] ( identifier[self] , identifier[startIndex] = literal[int] , identifier[maxResults] = keyword[None] , identifier[referenceName] = keyword[None] , identifier[start] = keyword[None] , identifier[end] = keyword[None] , identifier[parentId] = keyword[None] , identifier[featureTypes] = keyword[None] , identifier[name] = keyword[None] , identifier[geneSymbol] = keyword[None] ): literal[string] identifier[sql] , identifier[sql_args] = identifier[self] . identifier[featuresQuery] ( identifier[startIndex] = identifier[startIndex] , identifier[maxResults] = identifier[maxResults] , identifier[referenceName] = identifier[referenceName] , identifier[start] = identifier[start] , identifier[end] = identifier[end] , identifier[parentId] = identifier[parentId] , identifier[featureTypes] = identifier[featureTypes] , identifier[name] = identifier[name] , identifier[geneSymbol] = identifier[geneSymbol] ) identifier[sql] += identifier[sqlite_backend] . identifier[limitsSql] ( identifier[startIndex] , identifier[maxResults] ) identifier[query] = identifier[self] . identifier[_dbconn] . identifier[execute] ( identifier[sql] , identifier[sql_args] ) keyword[return] identifier[sqlite_backend] . identifier[sqliteRowsToDicts] ( identifier[query] . identifier[fetchall] ())
def searchFeaturesInDb(self, startIndex=0, maxResults=None, referenceName=None, start=None, end=None, parentId=None, featureTypes=None, name=None, geneSymbol=None): """ Perform a full features query in database. :param startIndex: int representing first record to return :param maxResults: int representing number of records to return :param referenceName: string representing reference name, ex 'chr1' :param start: int position on reference to start search :param end: int position on reference to end search >= start :param parentId: string restrict search by id of parent node. :param name: match features by name :param geneSymbol: match features by gene symbol :return an array of dictionaries, representing the returned data. """ # TODO: Refactor out common bits of this and the above count query. (sql, sql_args) = self.featuresQuery(startIndex=startIndex, maxResults=maxResults, referenceName=referenceName, start=start, end=end, parentId=parentId, featureTypes=featureTypes, name=name, geneSymbol=geneSymbol) sql += sqlite_backend.limitsSql(startIndex, maxResults) query = self._dbconn.execute(sql, sql_args) return sqlite_backend.sqliteRowsToDicts(query.fetchall())
def apply_T4(word): '''An agglutination diphthong that ends in /u, y/ optionally contains a syllable boundary when -C# or -CCV follow, e.g., [lau.ka.us], [va.ka.ut.taa].''' WORD = word.split('.') PARTS = [[] for part in range(len(WORD))] for i, v in enumerate(WORD): # i % 2 != 0 prevents this rule from applying to first, third, etc. # syllables, which receive stress (WSP) if is_consonant(v[-1]) and i % 2 != 0: if i + 1 == len(WORD) or is_consonant(WORD[i + 1][0]): vv = u_y_final_diphthongs(v) if vv: I = vv.start(1) + 1 PARTS[i].append(v[:I] + '.' + v[I:]) # include original form (non-application of rule) PARTS[i].append(v) WORDS = [w for w in product(*PARTS)] for WORD in WORDS: WORD = '.'.join(WORD) RULE = ' T4' if word != WORD else '' yield WORD, RULE
def function[apply_T4, parameter[word]]: constant[An agglutination diphthong that ends in /u, y/ optionally contains a syllable boundary when -C# or -CCV follow, e.g., [lau.ka.us], [va.ka.ut.taa].] variable[WORD] assign[=] call[name[word].split, parameter[constant[.]]] variable[PARTS] assign[=] <ast.ListComp object at 0x7da1b11d3b20> for taget[tuple[[<ast.Name object at 0x7da1b11a6260>, <ast.Name object at 0x7da1b11a4760>]]] in starred[call[name[enumerate], parameter[name[WORD]]]] begin[:] if <ast.BoolOp object at 0x7da1b11a59c0> begin[:] if <ast.BoolOp object at 0x7da1b11a5450> begin[:] variable[vv] assign[=] call[name[u_y_final_diphthongs], parameter[name[v]]] if name[vv] begin[:] variable[I] assign[=] binary_operation[call[name[vv].start, parameter[constant[1]]] + constant[1]] call[call[name[PARTS]][name[i]].append, parameter[binary_operation[binary_operation[call[name[v]][<ast.Slice object at 0x7da1b11a6c20>] + constant[.]] + call[name[v]][<ast.Slice object at 0x7da1b11a5600>]]]] call[call[name[PARTS]][name[i]].append, parameter[name[v]]] variable[WORDS] assign[=] <ast.ListComp object at 0x7da1b11a42e0> for taget[name[WORD]] in starred[name[WORDS]] begin[:] variable[WORD] assign[=] call[constant[.].join, parameter[name[WORD]]] variable[RULE] assign[=] <ast.IfExp object at 0x7da1b11a7e80> <ast.Yield object at 0x7da1b11a71f0>
keyword[def] identifier[apply_T4] ( identifier[word] ): literal[string] identifier[WORD] = identifier[word] . identifier[split] ( literal[string] ) identifier[PARTS] =[[] keyword[for] identifier[part] keyword[in] identifier[range] ( identifier[len] ( identifier[WORD] ))] keyword[for] identifier[i] , identifier[v] keyword[in] identifier[enumerate] ( identifier[WORD] ): keyword[if] identifier[is_consonant] ( identifier[v] [- literal[int] ]) keyword[and] identifier[i] % literal[int] != literal[int] : keyword[if] identifier[i] + literal[int] == identifier[len] ( identifier[WORD] ) keyword[or] identifier[is_consonant] ( identifier[WORD] [ identifier[i] + literal[int] ][ literal[int] ]): identifier[vv] = identifier[u_y_final_diphthongs] ( identifier[v] ) keyword[if] identifier[vv] : identifier[I] = identifier[vv] . identifier[start] ( literal[int] )+ literal[int] identifier[PARTS] [ identifier[i] ]. identifier[append] ( identifier[v] [: identifier[I] ]+ literal[string] + identifier[v] [ identifier[I] :]) identifier[PARTS] [ identifier[i] ]. identifier[append] ( identifier[v] ) identifier[WORDS] =[ identifier[w] keyword[for] identifier[w] keyword[in] identifier[product] (* identifier[PARTS] )] keyword[for] identifier[WORD] keyword[in] identifier[WORDS] : identifier[WORD] = literal[string] . identifier[join] ( identifier[WORD] ) identifier[RULE] = literal[string] keyword[if] identifier[word] != identifier[WORD] keyword[else] literal[string] keyword[yield] identifier[WORD] , identifier[RULE]
def apply_T4(word): """An agglutination diphthong that ends in /u, y/ optionally contains a syllable boundary when -C# or -CCV follow, e.g., [lau.ka.us], [va.ka.ut.taa].""" WORD = word.split('.') PARTS = [[] for part in range(len(WORD))] for (i, v) in enumerate(WORD): # i % 2 != 0 prevents this rule from applying to first, third, etc. # syllables, which receive stress (WSP) if is_consonant(v[-1]) and i % 2 != 0: if i + 1 == len(WORD) or is_consonant(WORD[i + 1][0]): vv = u_y_final_diphthongs(v) if vv: I = vv.start(1) + 1 PARTS[i].append(v[:I] + '.' + v[I:]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # include original form (non-application of rule) PARTS[i].append(v) # depends on [control=['for'], data=[]] WORDS = [w for w in product(*PARTS)] for WORD in WORDS: WORD = '.'.join(WORD) RULE = ' T4' if word != WORD else '' yield (WORD, RULE) # depends on [control=['for'], data=['WORD']]
def write_STELLA_model(self,name): """ Write an initial model in a format that may easily be read by the radiation hydrodynamics code STELLA. Parameters ---------- name : string an identifier for the model. There are two output files from this method, which will be <name>.hyd and <name>.abn, which contain the profiles for the hydro and abundance variables, respectively. """ # Hydro variables: zn = np.array(self.get('zone'),np.int64) Mr = self.get('mass')[::-1] dM = 10. ** self.get('logdq')[::-1] * self.header_attr['star_mass'] R = self.get('radius')[::-1] * ast.rsun_cm dR = np.insert( np.diff(R), 0, R[0] ) Rho = 10. ** self.get('logRho')[::-1] PRE = 10. ** self.get('logP')[::-1] T = 10. ** self.get('logT')[::-1] V = self.get('velocity')[::-1] # Abundances: def make_list(element,lowA,highA): l = [] for i in range(lowA,highA+1): l.append(element+str(i)) return l abun_avail = list(self.cols.keys()) def elemental_abund(ilist,abun_avail): X = np.zeros(len(self.get('mass'))) for a in ilist: if a in abun_avail: X += self.get(a)[::-1] return X iH = ['h1','h2','prot'] XH = elemental_abund(iH, abun_avail) XHe = elemental_abund(make_list('he',1,5), abun_avail) XC = elemental_abund(make_list('c',11,15), abun_avail) XN = elemental_abund(make_list('n',12,16), abun_avail) XO = elemental_abund(make_list('o',13,20), abun_avail) XNe = elemental_abund(make_list('ne',17,25), abun_avail) XNa = elemental_abund(make_list('na',20,25), abun_avail) XMg = elemental_abund(make_list('mg',21,28), abun_avail) XAl = elemental_abund(make_list('al',21,30), abun_avail) XSi = elemental_abund(make_list('si',25,34), abun_avail) XS = elemental_abund(make_list('s',28,38), abun_avail) XAr = elemental_abund(make_list('ar',32,46), abun_avail) XCa = elemental_abund(make_list('ca',36,53), abun_avail) XFe = elemental_abund(make_list('fe',50,65), abun_avail) XCo = elemental_abund(make_list('co',52,66), abun_avail) XNi = elemental_abund(make_list('ni',54,71), abun_avail) XNi56 = self.get('ni56') # Write the output files: file_hyd = name+'.hyd' file_abn = name+'.abn' f = open(file_hyd,'w') # write header: f.write(' 0.000E+00\n') f.write('# No.') f.write('Mr'.rjust(28)+ 'dM'.rjust(28)+ 'R'.rjust(28)+ 'dR'.rjust(28)+ 'Rho'.rjust(28)+ 'PRE'.rjust(28)+ 'T'.rjust(28)+ 'V'.rjust(28)+ '\n') # write data: for i in range(len(zn)): f.write( str(zn[i]).rjust(5) + '%.16E'.rjust(11) %Mr[i] + '%.16E'.rjust(11) %dM[i] + '%.16E'.rjust(11) %R[i] + '%.16E'.rjust(11) %dR[i] + '%.16E'.rjust(11) %Rho[i] + '%.16E'.rjust(11) %PRE[i] + '%.16E'.rjust(11) %T[i] + '%.16E'.rjust(11) %V[i] + '\n') f.close() f = open(file_abn,'w') # write header: f.write('# No.') f.write('Mr'.rjust(28)+ 'H'.rjust(28)+ 'He'.rjust(28)+ 'C'.rjust(28)+ 'N'.rjust(28)+ 'O'.rjust(28)+ 'Ne'.rjust(28)+ 'Na'.rjust(28)+ 'Mg'.rjust(28)+ 'Al'.rjust(28)+ 'Si'.rjust(28)+ 'S'.rjust(28)+ 'Ar'.rjust(28)+ 'Ca'.rjust(28)+ 'Fe'.rjust(28)+ 'Co'.rjust(28)+ 'Ni'.rjust(28)+ 'X(56Ni)'.rjust(28)+ '\n') # write data: for i in range(len(zn)): f.write( str(zn[i]).rjust(5) + '%.16E'.rjust(11) %Mr[i] + '%.16E'.rjust(11) %XH[i] + '%.16E'.rjust(11) %XHe[i] + '%.16E'.rjust(11) %XC[i] + '%.16E'.rjust(11) %XN[i] + '%.16E'.rjust(11) %XO[i] + '%.16E'.rjust(11) %XNe[i] + '%.16E'.rjust(11) %XNa[i] + '%.16E'.rjust(11) %XMg[i] + '%.16E'.rjust(11) %XAl[i] + '%.16E'.rjust(11) %XSi[i] + '%.16E'.rjust(11) %XS[i] + '%.16E'.rjust(11) %XAr[i] + '%.16E'.rjust(11) %XCa[i] + '%.16E'.rjust(11) %XFe[i] + '%.16E'.rjust(11) %XCo[i] + '%.16E'.rjust(11) %XNi[i] + '%.16E'.rjust(11) %XNi56[i] + '\n')
def function[write_STELLA_model, parameter[self, name]]: constant[ Write an initial model in a format that may easily be read by the radiation hydrodynamics code STELLA. Parameters ---------- name : string an identifier for the model. There are two output files from this method, which will be <name>.hyd and <name>.abn, which contain the profiles for the hydro and abundance variables, respectively. ] variable[zn] assign[=] call[name[np].array, parameter[call[name[self].get, parameter[constant[zone]]], name[np].int64]] variable[Mr] assign[=] call[call[name[self].get, parameter[constant[mass]]]][<ast.Slice object at 0x7da1b1963af0>] variable[dM] assign[=] binary_operation[binary_operation[constant[10.0] ** call[call[name[self].get, parameter[constant[logdq]]]][<ast.Slice object at 0x7da1b1963880>]] * call[name[self].header_attr][constant[star_mass]]] variable[R] assign[=] binary_operation[call[call[name[self].get, parameter[constant[radius]]]][<ast.Slice object at 0x7da1b19635b0>] * name[ast].rsun_cm] variable[dR] assign[=] call[name[np].insert, parameter[call[name[np].diff, parameter[name[R]]], constant[0], call[name[R]][constant[0]]]] variable[Rho] assign[=] binary_operation[constant[10.0] ** call[call[name[self].get, parameter[constant[logRho]]]][<ast.Slice object at 0x7da1b19630a0>]] variable[PRE] assign[=] binary_operation[constant[10.0] ** call[call[name[self].get, parameter[constant[logP]]]][<ast.Slice object at 0x7da1b1960b20>]] variable[T] assign[=] binary_operation[constant[10.0] ** call[call[name[self].get, parameter[constant[logT]]]][<ast.Slice object at 0x7da1b19608e0>]] variable[V] assign[=] call[call[name[self].get, parameter[constant[velocity]]]][<ast.Slice object at 0x7da1b1960700>] def function[make_list, parameter[element, lowA, highA]]: variable[l] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[name[lowA], binary_operation[name[highA] + constant[1]]]]] begin[:] call[name[l].append, parameter[binary_operation[name[element] + call[name[str], parameter[name[i]]]]]] return[name[l]] variable[abun_avail] assign[=] call[name[list], parameter[call[name[self].cols.keys, parameter[]]]] def function[elemental_abund, parameter[ilist, abun_avail]]: variable[X] assign[=] call[name[np].zeros, parameter[call[name[len], parameter[call[name[self].get, parameter[constant[mass]]]]]]] for taget[name[a]] in starred[name[ilist]] begin[:] if compare[name[a] in name[abun_avail]] begin[:] <ast.AugAssign object at 0x7da1b193bb20> return[name[X]] variable[iH] assign[=] list[[<ast.Constant object at 0x7da1b193b820>, <ast.Constant object at 0x7da1b193b7f0>, <ast.Constant object at 0x7da1b193b7c0>]] variable[XH] assign[=] call[name[elemental_abund], parameter[name[iH], name[abun_avail]]] variable[XHe] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[he], constant[1], constant[5]]], name[abun_avail]]] variable[XC] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[c], constant[11], constant[15]]], name[abun_avail]]] variable[XN] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[n], constant[12], constant[16]]], name[abun_avail]]] variable[XO] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[o], constant[13], constant[20]]], name[abun_avail]]] variable[XNe] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[ne], constant[17], constant[25]]], name[abun_avail]]] variable[XNa] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[na], constant[20], constant[25]]], name[abun_avail]]] variable[XMg] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[mg], constant[21], constant[28]]], name[abun_avail]]] variable[XAl] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[al], constant[21], constant[30]]], name[abun_avail]]] variable[XSi] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[si], constant[25], constant[34]]], name[abun_avail]]] variable[XS] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[s], constant[28], constant[38]]], name[abun_avail]]] variable[XAr] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[ar], constant[32], constant[46]]], name[abun_avail]]] variable[XCa] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[ca], constant[36], constant[53]]], name[abun_avail]]] variable[XFe] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[fe], constant[50], constant[65]]], name[abun_avail]]] variable[XCo] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[co], constant[52], constant[66]]], name[abun_avail]]] variable[XNi] assign[=] call[name[elemental_abund], parameter[call[name[make_list], parameter[constant[ni], constant[54], constant[71]]], name[abun_avail]]] variable[XNi56] assign[=] call[name[self].get, parameter[constant[ni56]]] variable[file_hyd] assign[=] binary_operation[name[name] + constant[.hyd]] variable[file_abn] assign[=] binary_operation[name[name] + constant[.abn]] variable[f] assign[=] call[name[open], parameter[name[file_hyd], constant[w]]] call[name[f].write, parameter[constant[ 0.000E+00 ]]] call[name[f].write, parameter[constant[# No.]]] call[name[f].write, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[constant[Mr].rjust, parameter[constant[28]]] + call[constant[dM].rjust, parameter[constant[28]]]] + call[constant[R].rjust, parameter[constant[28]]]] + call[constant[dR].rjust, parameter[constant[28]]]] + call[constant[Rho].rjust, parameter[constant[28]]]] + call[constant[PRE].rjust, parameter[constant[28]]]] + call[constant[T].rjust, parameter[constant[28]]]] + call[constant[V].rjust, parameter[constant[28]]]] + constant[ ]]]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[zn]]]]]] begin[:] call[name[f].write, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[call[name[str], parameter[call[name[zn]][name[i]]]].rjust, parameter[constant[5]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[Mr]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[dM]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[R]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[dR]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[Rho]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[PRE]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[T]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[V]][name[i]]]] + constant[ ]]]] call[name[f].close, parameter[]] variable[f] assign[=] call[name[open], parameter[name[file_abn], constant[w]]] call[name[f].write, parameter[constant[# No.]]] call[name[f].write, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[constant[Mr].rjust, parameter[constant[28]]] + call[constant[H].rjust, parameter[constant[28]]]] + call[constant[He].rjust, parameter[constant[28]]]] + call[constant[C].rjust, parameter[constant[28]]]] + call[constant[N].rjust, parameter[constant[28]]]] + call[constant[O].rjust, parameter[constant[28]]]] + call[constant[Ne].rjust, parameter[constant[28]]]] + call[constant[Na].rjust, parameter[constant[28]]]] + call[constant[Mg].rjust, parameter[constant[28]]]] + call[constant[Al].rjust, parameter[constant[28]]]] + call[constant[Si].rjust, parameter[constant[28]]]] + call[constant[S].rjust, parameter[constant[28]]]] + call[constant[Ar].rjust, parameter[constant[28]]]] + call[constant[Ca].rjust, parameter[constant[28]]]] + call[constant[Fe].rjust, parameter[constant[28]]]] + call[constant[Co].rjust, parameter[constant[28]]]] + call[constant[Ni].rjust, parameter[constant[28]]]] + call[constant[X(56Ni)].rjust, parameter[constant[28]]]] + constant[ ]]]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[zn]]]]]] begin[:] call[name[f].write, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[call[name[str], parameter[call[name[zn]][name[i]]]].rjust, parameter[constant[5]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[Mr]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XH]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XHe]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XC]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XN]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XO]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XNe]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XNa]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XMg]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XAl]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XSi]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XS]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XAr]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XCa]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XFe]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XCo]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XNi]][name[i]]]] + binary_operation[call[constant[%.16E].rjust, parameter[constant[11]]] <ast.Mod object at 0x7da2590d6920> call[name[XNi56]][name[i]]]] + constant[ ]]]]
keyword[def] identifier[write_STELLA_model] ( identifier[self] , identifier[name] ): literal[string] identifier[zn] = identifier[np] . identifier[array] ( identifier[self] . identifier[get] ( literal[string] ), identifier[np] . identifier[int64] ) identifier[Mr] = identifier[self] . identifier[get] ( literal[string] )[::- literal[int] ] identifier[dM] = literal[int] ** identifier[self] . identifier[get] ( literal[string] )[::- literal[int] ]* identifier[self] . identifier[header_attr] [ literal[string] ] identifier[R] = identifier[self] . identifier[get] ( literal[string] )[::- literal[int] ]* identifier[ast] . identifier[rsun_cm] identifier[dR] = identifier[np] . identifier[insert] ( identifier[np] . identifier[diff] ( identifier[R] ), literal[int] , identifier[R] [ literal[int] ]) identifier[Rho] = literal[int] ** identifier[self] . identifier[get] ( literal[string] )[::- literal[int] ] identifier[PRE] = literal[int] ** identifier[self] . identifier[get] ( literal[string] )[::- literal[int] ] identifier[T] = literal[int] ** identifier[self] . identifier[get] ( literal[string] )[::- literal[int] ] identifier[V] = identifier[self] . identifier[get] ( literal[string] )[::- literal[int] ] keyword[def] identifier[make_list] ( identifier[element] , identifier[lowA] , identifier[highA] ): identifier[l] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[lowA] , identifier[highA] + literal[int] ): identifier[l] . identifier[append] ( identifier[element] + identifier[str] ( identifier[i] )) keyword[return] identifier[l] identifier[abun_avail] = identifier[list] ( identifier[self] . identifier[cols] . identifier[keys] ()) keyword[def] identifier[elemental_abund] ( identifier[ilist] , identifier[abun_avail] ): identifier[X] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[self] . identifier[get] ( literal[string] ))) keyword[for] identifier[a] keyword[in] identifier[ilist] : keyword[if] identifier[a] keyword[in] identifier[abun_avail] : identifier[X] += identifier[self] . identifier[get] ( identifier[a] )[::- literal[int] ] keyword[return] identifier[X] identifier[iH] =[ literal[string] , literal[string] , literal[string] ] identifier[XH] = identifier[elemental_abund] ( identifier[iH] , identifier[abun_avail] ) identifier[XHe] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XC] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XN] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XO] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XNe] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XNa] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XMg] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XAl] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XSi] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XS] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XAr] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XCa] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XFe] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XCo] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XNi] = identifier[elemental_abund] ( identifier[make_list] ( literal[string] , literal[int] , literal[int] ), identifier[abun_avail] ) identifier[XNi56] = identifier[self] . identifier[get] ( literal[string] ) identifier[file_hyd] = identifier[name] + literal[string] identifier[file_abn] = identifier[name] + literal[string] identifier[f] = identifier[open] ( identifier[file_hyd] , literal[string] ) identifier[f] . identifier[write] ( literal[string] ) identifier[f] . identifier[write] ( literal[string] ) identifier[f] . identifier[write] ( literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[zn] )): identifier[f] . identifier[write] ( identifier[str] ( identifier[zn] [ identifier[i] ]). identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )% identifier[Mr] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[dM] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[R] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[dR] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[Rho] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[PRE] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[T] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[V] [ identifier[i] ]+ literal[string] ) identifier[f] . identifier[close] () identifier[f] = identifier[open] ( identifier[file_abn] , literal[string] ) identifier[f] . identifier[write] ( literal[string] ) identifier[f] . identifier[write] ( literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )+ literal[string] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[zn] )): identifier[f] . identifier[write] ( identifier[str] ( identifier[zn] [ identifier[i] ]). identifier[rjust] ( literal[int] )+ literal[string] . identifier[rjust] ( literal[int] )% identifier[Mr] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XH] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XHe] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XC] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XN] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XO] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XNe] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XNa] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XMg] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XAl] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XSi] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XS] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XAr] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XCa] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XFe] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XCo] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XNi] [ identifier[i] ]+ literal[string] . identifier[rjust] ( literal[int] )% identifier[XNi56] [ identifier[i] ]+ literal[string] )
def write_STELLA_model(self, name): """ Write an initial model in a format that may easily be read by the radiation hydrodynamics code STELLA. Parameters ---------- name : string an identifier for the model. There are two output files from this method, which will be <name>.hyd and <name>.abn, which contain the profiles for the hydro and abundance variables, respectively. """ # Hydro variables: zn = np.array(self.get('zone'), np.int64) Mr = self.get('mass')[::-1] dM = 10.0 ** self.get('logdq')[::-1] * self.header_attr['star_mass'] R = self.get('radius')[::-1] * ast.rsun_cm dR = np.insert(np.diff(R), 0, R[0]) Rho = 10.0 ** self.get('logRho')[::-1] PRE = 10.0 ** self.get('logP')[::-1] T = 10.0 ** self.get('logT')[::-1] V = self.get('velocity')[::-1] # Abundances: def make_list(element, lowA, highA): l = [] for i in range(lowA, highA + 1): l.append(element + str(i)) # depends on [control=['for'], data=['i']] return l abun_avail = list(self.cols.keys()) def elemental_abund(ilist, abun_avail): X = np.zeros(len(self.get('mass'))) for a in ilist: if a in abun_avail: X += self.get(a)[::-1] # depends on [control=['if'], data=['a']] # depends on [control=['for'], data=['a']] return X iH = ['h1', 'h2', 'prot'] XH = elemental_abund(iH, abun_avail) XHe = elemental_abund(make_list('he', 1, 5), abun_avail) XC = elemental_abund(make_list('c', 11, 15), abun_avail) XN = elemental_abund(make_list('n', 12, 16), abun_avail) XO = elemental_abund(make_list('o', 13, 20), abun_avail) XNe = elemental_abund(make_list('ne', 17, 25), abun_avail) XNa = elemental_abund(make_list('na', 20, 25), abun_avail) XMg = elemental_abund(make_list('mg', 21, 28), abun_avail) XAl = elemental_abund(make_list('al', 21, 30), abun_avail) XSi = elemental_abund(make_list('si', 25, 34), abun_avail) XS = elemental_abund(make_list('s', 28, 38), abun_avail) XAr = elemental_abund(make_list('ar', 32, 46), abun_avail) XCa = elemental_abund(make_list('ca', 36, 53), abun_avail) XFe = elemental_abund(make_list('fe', 50, 65), abun_avail) XCo = elemental_abund(make_list('co', 52, 66), abun_avail) XNi = elemental_abund(make_list('ni', 54, 71), abun_avail) XNi56 = self.get('ni56') # Write the output files: file_hyd = name + '.hyd' file_abn = name + '.abn' f = open(file_hyd, 'w') # write header: f.write(' 0.000E+00\n') f.write('# No.') f.write('Mr'.rjust(28) + 'dM'.rjust(28) + 'R'.rjust(28) + 'dR'.rjust(28) + 'Rho'.rjust(28) + 'PRE'.rjust(28) + 'T'.rjust(28) + 'V'.rjust(28) + '\n') # write data: for i in range(len(zn)): f.write(str(zn[i]).rjust(5) + '%.16E'.rjust(11) % Mr[i] + '%.16E'.rjust(11) % dM[i] + '%.16E'.rjust(11) % R[i] + '%.16E'.rjust(11) % dR[i] + '%.16E'.rjust(11) % Rho[i] + '%.16E'.rjust(11) % PRE[i] + '%.16E'.rjust(11) % T[i] + '%.16E'.rjust(11) % V[i] + '\n') # depends on [control=['for'], data=['i']] f.close() f = open(file_abn, 'w') # write header: f.write('# No.') f.write('Mr'.rjust(28) + 'H'.rjust(28) + 'He'.rjust(28) + 'C'.rjust(28) + 'N'.rjust(28) + 'O'.rjust(28) + 'Ne'.rjust(28) + 'Na'.rjust(28) + 'Mg'.rjust(28) + 'Al'.rjust(28) + 'Si'.rjust(28) + 'S'.rjust(28) + 'Ar'.rjust(28) + 'Ca'.rjust(28) + 'Fe'.rjust(28) + 'Co'.rjust(28) + 'Ni'.rjust(28) + 'X(56Ni)'.rjust(28) + '\n') # write data: for i in range(len(zn)): f.write(str(zn[i]).rjust(5) + '%.16E'.rjust(11) % Mr[i] + '%.16E'.rjust(11) % XH[i] + '%.16E'.rjust(11) % XHe[i] + '%.16E'.rjust(11) % XC[i] + '%.16E'.rjust(11) % XN[i] + '%.16E'.rjust(11) % XO[i] + '%.16E'.rjust(11) % XNe[i] + '%.16E'.rjust(11) % XNa[i] + '%.16E'.rjust(11) % XMg[i] + '%.16E'.rjust(11) % XAl[i] + '%.16E'.rjust(11) % XSi[i] + '%.16E'.rjust(11) % XS[i] + '%.16E'.rjust(11) % XAr[i] + '%.16E'.rjust(11) % XCa[i] + '%.16E'.rjust(11) % XFe[i] + '%.16E'.rjust(11) % XCo[i] + '%.16E'.rjust(11) % XNi[i] + '%.16E'.rjust(11) % XNi56[i] + '\n') # depends on [control=['for'], data=['i']]
def count_star(self) -> int: """ Implements the ``COUNT(*)`` specialization. """ count_query = (self.statement.with_only_columns([func.count()]) .order_by(None)) return self.session.execute(count_query).scalar()
def function[count_star, parameter[self]]: constant[ Implements the ``COUNT(*)`` specialization. ] variable[count_query] assign[=] call[call[name[self].statement.with_only_columns, parameter[list[[<ast.Call object at 0x7da1b184add0>]]]].order_by, parameter[constant[None]]] return[call[call[name[self].session.execute, parameter[name[count_query]]].scalar, parameter[]]]
keyword[def] identifier[count_star] ( identifier[self] )-> identifier[int] : literal[string] identifier[count_query] =( identifier[self] . identifier[statement] . identifier[with_only_columns] ([ identifier[func] . identifier[count] ()]) . identifier[order_by] ( keyword[None] )) keyword[return] identifier[self] . identifier[session] . identifier[execute] ( identifier[count_query] ). identifier[scalar] ()
def count_star(self) -> int: """ Implements the ``COUNT(*)`` specialization. """ count_query = self.statement.with_only_columns([func.count()]).order_by(None) return self.session.execute(count_query).scalar()
async def error(self, status=500, allowredirect = True, close = True, showerror = None, headers = []): """ Show default error response """ if showerror is None: showerror = self.showerrorinfo if self._sendHeaders: if showerror: typ, exc, tb = sys.exc_info() if exc: await self.write('<span style="white-space:pre-wrap">\n', buffering = False) await self.writelines((self.nl2br(self.escape(v)) for v in traceback.format_exception(typ, exc, tb)), buffering = False) await self.write('</span>\n', close, False) elif allowredirect and status in self.protocol.errorrewrite: await self.rewrite(self.protocol.errorrewrite[status], b'GET') elif allowredirect and status in self.protocol.errorredirect: await self.redirect(self.protocol.errorredirect[status]) else: self.start_response(status, headers) typ, exc, tb = sys.exc_info() if showerror and exc: await self.write('<span style="white-space:pre-wrap">\n', buffering = False) await self.writelines((self.nl2br(self.escape(v)) for v in traceback.format_exception(typ, exc, tb)), buffering = False) await self.write('</span>\n', close, False) else: await self.write(b'<h1>' + _createstatus(status) + b'</h1>', close, False)
<ast.AsyncFunctionDef object at 0x7da20c7c9420>
keyword[async] keyword[def] identifier[error] ( identifier[self] , identifier[status] = literal[int] , identifier[allowredirect] = keyword[True] , identifier[close] = keyword[True] , identifier[showerror] = keyword[None] , identifier[headers] =[]): literal[string] keyword[if] identifier[showerror] keyword[is] keyword[None] : identifier[showerror] = identifier[self] . identifier[showerrorinfo] keyword[if] identifier[self] . identifier[_sendHeaders] : keyword[if] identifier[showerror] : identifier[typ] , identifier[exc] , identifier[tb] = identifier[sys] . identifier[exc_info] () keyword[if] identifier[exc] : keyword[await] identifier[self] . identifier[write] ( literal[string] , identifier[buffering] = keyword[False] ) keyword[await] identifier[self] . identifier[writelines] (( identifier[self] . identifier[nl2br] ( identifier[self] . identifier[escape] ( identifier[v] )) keyword[for] identifier[v] keyword[in] identifier[traceback] . identifier[format_exception] ( identifier[typ] , identifier[exc] , identifier[tb] )), identifier[buffering] = keyword[False] ) keyword[await] identifier[self] . identifier[write] ( literal[string] , identifier[close] , keyword[False] ) keyword[elif] identifier[allowredirect] keyword[and] identifier[status] keyword[in] identifier[self] . identifier[protocol] . identifier[errorrewrite] : keyword[await] identifier[self] . identifier[rewrite] ( identifier[self] . identifier[protocol] . identifier[errorrewrite] [ identifier[status] ], literal[string] ) keyword[elif] identifier[allowredirect] keyword[and] identifier[status] keyword[in] identifier[self] . identifier[protocol] . identifier[errorredirect] : keyword[await] identifier[self] . identifier[redirect] ( identifier[self] . identifier[protocol] . identifier[errorredirect] [ identifier[status] ]) keyword[else] : identifier[self] . identifier[start_response] ( identifier[status] , identifier[headers] ) identifier[typ] , identifier[exc] , identifier[tb] = identifier[sys] . identifier[exc_info] () keyword[if] identifier[showerror] keyword[and] identifier[exc] : keyword[await] identifier[self] . identifier[write] ( literal[string] , identifier[buffering] = keyword[False] ) keyword[await] identifier[self] . identifier[writelines] (( identifier[self] . identifier[nl2br] ( identifier[self] . identifier[escape] ( identifier[v] )) keyword[for] identifier[v] keyword[in] identifier[traceback] . identifier[format_exception] ( identifier[typ] , identifier[exc] , identifier[tb] )), identifier[buffering] = keyword[False] ) keyword[await] identifier[self] . identifier[write] ( literal[string] , identifier[close] , keyword[False] ) keyword[else] : keyword[await] identifier[self] . identifier[write] ( literal[string] + identifier[_createstatus] ( identifier[status] )+ literal[string] , identifier[close] , keyword[False] )
async def error(self, status=500, allowredirect=True, close=True, showerror=None, headers=[]): """ Show default error response """ if showerror is None: showerror = self.showerrorinfo # depends on [control=['if'], data=['showerror']] if self._sendHeaders: if showerror: (typ, exc, tb) = sys.exc_info() if exc: await self.write('<span style="white-space:pre-wrap">\n', buffering=False) await self.writelines((self.nl2br(self.escape(v)) for v in traceback.format_exception(typ, exc, tb)), buffering=False) await self.write('</span>\n', close, False) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif allowredirect and status in self.protocol.errorrewrite: await self.rewrite(self.protocol.errorrewrite[status], b'GET') # depends on [control=['if'], data=[]] elif allowredirect and status in self.protocol.errorredirect: await self.redirect(self.protocol.errorredirect[status]) # depends on [control=['if'], data=[]] else: self.start_response(status, headers) (typ, exc, tb) = sys.exc_info() if showerror and exc: await self.write('<span style="white-space:pre-wrap">\n', buffering=False) await self.writelines((self.nl2br(self.escape(v)) for v in traceback.format_exception(typ, exc, tb)), buffering=False) await self.write('</span>\n', close, False) # depends on [control=['if'], data=[]] else: await self.write(b'<h1>' + _createstatus(status) + b'</h1>', close, False)
async def _seed2did(self) -> str: """ Derive DID, as per indy-sdk, from seed. :return: DID """ rv = None dids_with_meta = json.loads(await did.list_my_dids_with_meta(self.handle)) # list if dids_with_meta: for did_with_meta in dids_with_meta: # dict if 'metadata' in did_with_meta: try: meta = json.loads(did_with_meta['metadata']) if isinstance(meta, dict) and meta.get('seed', None) == self._seed: rv = did_with_meta.get('did') except json.decoder.JSONDecodeError: continue # it's not one of ours, carry on if not rv: # seed not in metadata, generate did again on temp wallet temp_wallet = await Wallet( self._seed, '{}.seed2did'.format(self.name), None, {'auto-remove': True}).create() rv = temp_wallet.did await temp_wallet.remove() return rv
<ast.AsyncFunctionDef object at 0x7da18fe93df0>
keyword[async] keyword[def] identifier[_seed2did] ( identifier[self] )-> identifier[str] : literal[string] identifier[rv] = keyword[None] identifier[dids_with_meta] = identifier[json] . identifier[loads] ( keyword[await] identifier[did] . identifier[list_my_dids_with_meta] ( identifier[self] . identifier[handle] )) keyword[if] identifier[dids_with_meta] : keyword[for] identifier[did_with_meta] keyword[in] identifier[dids_with_meta] : keyword[if] literal[string] keyword[in] identifier[did_with_meta] : keyword[try] : identifier[meta] = identifier[json] . identifier[loads] ( identifier[did_with_meta] [ literal[string] ]) keyword[if] identifier[isinstance] ( identifier[meta] , identifier[dict] ) keyword[and] identifier[meta] . identifier[get] ( literal[string] , keyword[None] )== identifier[self] . identifier[_seed] : identifier[rv] = identifier[did_with_meta] . identifier[get] ( literal[string] ) keyword[except] identifier[json] . identifier[decoder] . identifier[JSONDecodeError] : keyword[continue] keyword[if] keyword[not] identifier[rv] : identifier[temp_wallet] = keyword[await] identifier[Wallet] ( identifier[self] . identifier[_seed] , literal[string] . identifier[format] ( identifier[self] . identifier[name] ), keyword[None] , { literal[string] : keyword[True] }). identifier[create] () identifier[rv] = identifier[temp_wallet] . identifier[did] keyword[await] identifier[temp_wallet] . identifier[remove] () keyword[return] identifier[rv]
async def _seed2did(self) -> str: """ Derive DID, as per indy-sdk, from seed. :return: DID """ rv = None dids_with_meta = json.loads(await did.list_my_dids_with_meta(self.handle)) # list if dids_with_meta: for did_with_meta in dids_with_meta: # dict if 'metadata' in did_with_meta: try: meta = json.loads(did_with_meta['metadata']) if isinstance(meta, dict) and meta.get('seed', None) == self._seed: rv = did_with_meta.get('did') # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except json.decoder.JSONDecodeError: continue # it's not one of ours, carry on # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['did_with_meta']] # depends on [control=['for'], data=['did_with_meta']] # depends on [control=['if'], data=[]] if not rv: # seed not in metadata, generate did again on temp wallet temp_wallet = await Wallet(self._seed, '{}.seed2did'.format(self.name), None, {'auto-remove': True}).create() rv = temp_wallet.did await temp_wallet.remove() # depends on [control=['if'], data=[]] return rv
def var_cmd(): """Print a list of available variables. See :mod:`stagpy.phyvars` where the lists of variables organized by command are defined. """ print_all = not any(val for _, val in conf.var.opt_vals_()) if print_all or conf.var.field: print('field:') _layout(phyvars.FIELD, phyvars.FIELD_EXTRA) print() if print_all or conf.var.sfield: print('surface field:') _layout(phyvars.SFIELD, {}) print() if print_all or conf.var.rprof: print('rprof:') _layout(phyvars.RPROF, phyvars.RPROF_EXTRA) print() if print_all or conf.var.time: print('time:') _layout(phyvars.TIME, phyvars.TIME_EXTRA) print() if print_all or conf.var.plates: print('plates:') _layout(phyvars.PLATES, {})
def function[var_cmd, parameter[]]: constant[Print a list of available variables. See :mod:`stagpy.phyvars` where the lists of variables organized by command are defined. ] variable[print_all] assign[=] <ast.UnaryOp object at 0x7da1b19b1060> if <ast.BoolOp object at 0x7da1b19b22f0> begin[:] call[name[print], parameter[constant[field:]]] call[name[_layout], parameter[name[phyvars].FIELD, name[phyvars].FIELD_EXTRA]] call[name[print], parameter[]] if <ast.BoolOp object at 0x7da1b19430a0> begin[:] call[name[print], parameter[constant[surface field:]]] call[name[_layout], parameter[name[phyvars].SFIELD, dictionary[[], []]]] call[name[print], parameter[]] if <ast.BoolOp object at 0x7da1b19435e0> begin[:] call[name[print], parameter[constant[rprof:]]] call[name[_layout], parameter[name[phyvars].RPROF, name[phyvars].RPROF_EXTRA]] call[name[print], parameter[]] if <ast.BoolOp object at 0x7da1b1942770> begin[:] call[name[print], parameter[constant[time:]]] call[name[_layout], parameter[name[phyvars].TIME, name[phyvars].TIME_EXTRA]] call[name[print], parameter[]] if <ast.BoolOp object at 0x7da1b1943280> begin[:] call[name[print], parameter[constant[plates:]]] call[name[_layout], parameter[name[phyvars].PLATES, dictionary[[], []]]]
keyword[def] identifier[var_cmd] (): literal[string] identifier[print_all] = keyword[not] identifier[any] ( identifier[val] keyword[for] identifier[_] , identifier[val] keyword[in] identifier[conf] . identifier[var] . identifier[opt_vals_] ()) keyword[if] identifier[print_all] keyword[or] identifier[conf] . identifier[var] . identifier[field] : identifier[print] ( literal[string] ) identifier[_layout] ( identifier[phyvars] . identifier[FIELD] , identifier[phyvars] . identifier[FIELD_EXTRA] ) identifier[print] () keyword[if] identifier[print_all] keyword[or] identifier[conf] . identifier[var] . identifier[sfield] : identifier[print] ( literal[string] ) identifier[_layout] ( identifier[phyvars] . identifier[SFIELD] ,{}) identifier[print] () keyword[if] identifier[print_all] keyword[or] identifier[conf] . identifier[var] . identifier[rprof] : identifier[print] ( literal[string] ) identifier[_layout] ( identifier[phyvars] . identifier[RPROF] , identifier[phyvars] . identifier[RPROF_EXTRA] ) identifier[print] () keyword[if] identifier[print_all] keyword[or] identifier[conf] . identifier[var] . identifier[time] : identifier[print] ( literal[string] ) identifier[_layout] ( identifier[phyvars] . identifier[TIME] , identifier[phyvars] . identifier[TIME_EXTRA] ) identifier[print] () keyword[if] identifier[print_all] keyword[or] identifier[conf] . identifier[var] . identifier[plates] : identifier[print] ( literal[string] ) identifier[_layout] ( identifier[phyvars] . identifier[PLATES] ,{})
def var_cmd(): """Print a list of available variables. See :mod:`stagpy.phyvars` where the lists of variables organized by command are defined. """ print_all = not any((val for (_, val) in conf.var.opt_vals_())) if print_all or conf.var.field: print('field:') _layout(phyvars.FIELD, phyvars.FIELD_EXTRA) print() # depends on [control=['if'], data=[]] if print_all or conf.var.sfield: print('surface field:') _layout(phyvars.SFIELD, {}) print() # depends on [control=['if'], data=[]] if print_all or conf.var.rprof: print('rprof:') _layout(phyvars.RPROF, phyvars.RPROF_EXTRA) print() # depends on [control=['if'], data=[]] if print_all or conf.var.time: print('time:') _layout(phyvars.TIME, phyvars.TIME_EXTRA) print() # depends on [control=['if'], data=[]] if print_all or conf.var.plates: print('plates:') _layout(phyvars.PLATES, {}) # depends on [control=['if'], data=[]]
def unblock_all(self): """ Unblock all emitters in this group. """ self.unblock() for em in self._emitters.values(): em.unblock()
def function[unblock_all, parameter[self]]: constant[ Unblock all emitters in this group. ] call[name[self].unblock, parameter[]] for taget[name[em]] in starred[call[name[self]._emitters.values, parameter[]]] begin[:] call[name[em].unblock, parameter[]]
keyword[def] identifier[unblock_all] ( identifier[self] ): literal[string] identifier[self] . identifier[unblock] () keyword[for] identifier[em] keyword[in] identifier[self] . identifier[_emitters] . identifier[values] (): identifier[em] . identifier[unblock] ()
def unblock_all(self): """ Unblock all emitters in this group. """ self.unblock() for em in self._emitters.values(): em.unblock() # depends on [control=['for'], data=['em']]
def mechanism(self): """tuple[int]: The nodes of the mechanism in the partition.""" return tuple(sorted( chain.from_iterable(part.mechanism for part in self)))
def function[mechanism, parameter[self]]: constant[tuple[int]: The nodes of the mechanism in the partition.] return[call[name[tuple], parameter[call[name[sorted], parameter[call[name[chain].from_iterable, parameter[<ast.GeneratorExp object at 0x7da20c76e380>]]]]]]]
keyword[def] identifier[mechanism] ( identifier[self] ): literal[string] keyword[return] identifier[tuple] ( identifier[sorted] ( identifier[chain] . identifier[from_iterable] ( identifier[part] . identifier[mechanism] keyword[for] identifier[part] keyword[in] identifier[self] )))
def mechanism(self): """tuple[int]: The nodes of the mechanism in the partition.""" return tuple(sorted(chain.from_iterable((part.mechanism for part in self))))
def barf(msg, exit=None, f=sys.stderr): '''Exit with a log message (usually a fatal error)''' exit = const('FSQ_FAIL_TMP') if exit is None else exit shout(msg, f) sys.exit(exit)
def function[barf, parameter[msg, exit, f]]: constant[Exit with a log message (usually a fatal error)] variable[exit] assign[=] <ast.IfExp object at 0x7da204621ea0> call[name[shout], parameter[name[msg], name[f]]] call[name[sys].exit, parameter[name[exit]]]
keyword[def] identifier[barf] ( identifier[msg] , identifier[exit] = keyword[None] , identifier[f] = identifier[sys] . identifier[stderr] ): literal[string] identifier[exit] = identifier[const] ( literal[string] ) keyword[if] identifier[exit] keyword[is] keyword[None] keyword[else] identifier[exit] identifier[shout] ( identifier[msg] , identifier[f] ) identifier[sys] . identifier[exit] ( identifier[exit] )
def barf(msg, exit=None, f=sys.stderr): """Exit with a log message (usually a fatal error)""" exit = const('FSQ_FAIL_TMP') if exit is None else exit shout(msg, f) sys.exit(exit)
def get(cls, domain, name): """ Get the requested site entry @param domain: Domain name @type domain: Domain @param name: Site name @type name: str @rtype: Domain """ Site = cls return Session.query(Site).filter(Site.domain == domain).filter(collate(Site.name, 'NOCASE') == name).first()
def function[get, parameter[cls, domain, name]]: constant[ Get the requested site entry @param domain: Domain name @type domain: Domain @param name: Site name @type name: str @rtype: Domain ] variable[Site] assign[=] name[cls] return[call[call[call[call[name[Session].query, parameter[name[Site]]].filter, parameter[compare[name[Site].domain equal[==] name[domain]]]].filter, parameter[compare[call[name[collate], parameter[name[Site].name, constant[NOCASE]]] equal[==] name[name]]]].first, parameter[]]]
keyword[def] identifier[get] ( identifier[cls] , identifier[domain] , identifier[name] ): literal[string] identifier[Site] = identifier[cls] keyword[return] identifier[Session] . identifier[query] ( identifier[Site] ). identifier[filter] ( identifier[Site] . identifier[domain] == identifier[domain] ). identifier[filter] ( identifier[collate] ( identifier[Site] . identifier[name] , literal[string] )== identifier[name] ). identifier[first] ()
def get(cls, domain, name): """ Get the requested site entry @param domain: Domain name @type domain: Domain @param name: Site name @type name: str @rtype: Domain """ Site = cls return Session.query(Site).filter(Site.domain == domain).filter(collate(Site.name, 'NOCASE') == name).first()
def fetch(self): """ Fetch a TaskActionsInstance :returns: Fetched TaskActionsInstance :rtype: twilio.rest.autopilot.v1.assistant.task.task_actions.TaskActionsInstance """ params = values.of({}) payload = self._version.fetch( 'GET', self._uri, params=params, ) return TaskActionsInstance( self._version, payload, assistant_sid=self._solution['assistant_sid'], task_sid=self._solution['task_sid'], )
def function[fetch, parameter[self]]: constant[ Fetch a TaskActionsInstance :returns: Fetched TaskActionsInstance :rtype: twilio.rest.autopilot.v1.assistant.task.task_actions.TaskActionsInstance ] variable[params] assign[=] call[name[values].of, parameter[dictionary[[], []]]] variable[payload] assign[=] call[name[self]._version.fetch, parameter[constant[GET], name[self]._uri]] return[call[name[TaskActionsInstance], parameter[name[self]._version, name[payload]]]]
keyword[def] identifier[fetch] ( identifier[self] ): literal[string] identifier[params] = identifier[values] . identifier[of] ({}) identifier[payload] = identifier[self] . identifier[_version] . identifier[fetch] ( literal[string] , identifier[self] . identifier[_uri] , identifier[params] = identifier[params] , ) keyword[return] identifier[TaskActionsInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[assistant_sid] = identifier[self] . identifier[_solution] [ literal[string] ], identifier[task_sid] = identifier[self] . identifier[_solution] [ literal[string] ], )
def fetch(self): """ Fetch a TaskActionsInstance :returns: Fetched TaskActionsInstance :rtype: twilio.rest.autopilot.v1.assistant.task.task_actions.TaskActionsInstance """ params = values.of({}) payload = self._version.fetch('GET', self._uri, params=params) return TaskActionsInstance(self._version, payload, assistant_sid=self._solution['assistant_sid'], task_sid=self._solution['task_sid'])
def set_widgets(self): """Set widgets on the Hazard Layer From Browser tab.""" self.tvBrowserHazard_selection_changed() # Set icon hazard = self.parent.step_fc_functions1.selected_value( layer_purpose_hazard['key']) icon_path = get_image_path(hazard) self.lblIconIFCWHazardFromBrowser.setPixmap(QPixmap(icon_path))
def function[set_widgets, parameter[self]]: constant[Set widgets on the Hazard Layer From Browser tab.] call[name[self].tvBrowserHazard_selection_changed, parameter[]] variable[hazard] assign[=] call[name[self].parent.step_fc_functions1.selected_value, parameter[call[name[layer_purpose_hazard]][constant[key]]]] variable[icon_path] assign[=] call[name[get_image_path], parameter[name[hazard]]] call[name[self].lblIconIFCWHazardFromBrowser.setPixmap, parameter[call[name[QPixmap], parameter[name[icon_path]]]]]
keyword[def] identifier[set_widgets] ( identifier[self] ): literal[string] identifier[self] . identifier[tvBrowserHazard_selection_changed] () identifier[hazard] = identifier[self] . identifier[parent] . identifier[step_fc_functions1] . identifier[selected_value] ( identifier[layer_purpose_hazard] [ literal[string] ]) identifier[icon_path] = identifier[get_image_path] ( identifier[hazard] ) identifier[self] . identifier[lblIconIFCWHazardFromBrowser] . identifier[setPixmap] ( identifier[QPixmap] ( identifier[icon_path] ))
def set_widgets(self): """Set widgets on the Hazard Layer From Browser tab.""" self.tvBrowserHazard_selection_changed() # Set icon hazard = self.parent.step_fc_functions1.selected_value(layer_purpose_hazard['key']) icon_path = get_image_path(hazard) self.lblIconIFCWHazardFromBrowser.setPixmap(QPixmap(icon_path))
def resolve(self): """ Process the signature and find definition for type. """ # collect types for resolution t2resolv = [] if hasattr(self._sig, 'tret'): t2resolv.append(self._sig.tret) if hasattr(self._sig, 'tparams') and self._sig.tparams is not None: for p in self._sig.tparams: t2resolv.append(p) if self._translate_to is not None: t2resolv.append(self._translate_to.target) if self._variadic_types is not None: for t in self._variadic_types: t2resolv.append(t) for t in t2resolv: for c in t.components: if c not in self.resolution or self.resolution[c] is None: # try to find what is c parent = self.get_parent() if parent is not None: sc = parent.get_by_symbol_name(c) if len(sc) == 1: sc = list(sc.values())[0] # unwrap EvalCtx around Type if isinstance(sc, EvalCtx): sc = sc._sig rtyp = weakref.ref(sc) self.resolution[c] = rtyp continue # unresolved self.resolution[c] = None
def function[resolve, parameter[self]]: constant[ Process the signature and find definition for type. ] variable[t2resolv] assign[=] list[[]] if call[name[hasattr], parameter[name[self]._sig, constant[tret]]] begin[:] call[name[t2resolv].append, parameter[name[self]._sig.tret]] if <ast.BoolOp object at 0x7da1b0137340> begin[:] for taget[name[p]] in starred[name[self]._sig.tparams] begin[:] call[name[t2resolv].append, parameter[name[p]]] if compare[name[self]._translate_to is_not constant[None]] begin[:] call[name[t2resolv].append, parameter[name[self]._translate_to.target]] if compare[name[self]._variadic_types is_not constant[None]] begin[:] for taget[name[t]] in starred[name[self]._variadic_types] begin[:] call[name[t2resolv].append, parameter[name[t]]] for taget[name[t]] in starred[name[t2resolv]] begin[:] for taget[name[c]] in starred[name[t].components] begin[:] if <ast.BoolOp object at 0x7da1b0135060> begin[:] variable[parent] assign[=] call[name[self].get_parent, parameter[]] if compare[name[parent] is_not constant[None]] begin[:] variable[sc] assign[=] call[name[parent].get_by_symbol_name, parameter[name[c]]] if compare[call[name[len], parameter[name[sc]]] equal[==] constant[1]] begin[:] variable[sc] assign[=] call[call[name[list], parameter[call[name[sc].values, parameter[]]]]][constant[0]] if call[name[isinstance], parameter[name[sc], name[EvalCtx]]] begin[:] variable[sc] assign[=] name[sc]._sig variable[rtyp] assign[=] call[name[weakref].ref, parameter[name[sc]]] call[name[self].resolution][name[c]] assign[=] name[rtyp] continue call[name[self].resolution][name[c]] assign[=] constant[None]
keyword[def] identifier[resolve] ( identifier[self] ): literal[string] identifier[t2resolv] =[] keyword[if] identifier[hasattr] ( identifier[self] . identifier[_sig] , literal[string] ): identifier[t2resolv] . identifier[append] ( identifier[self] . identifier[_sig] . identifier[tret] ) keyword[if] identifier[hasattr] ( identifier[self] . identifier[_sig] , literal[string] ) keyword[and] identifier[self] . identifier[_sig] . identifier[tparams] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[p] keyword[in] identifier[self] . identifier[_sig] . identifier[tparams] : identifier[t2resolv] . identifier[append] ( identifier[p] ) keyword[if] identifier[self] . identifier[_translate_to] keyword[is] keyword[not] keyword[None] : identifier[t2resolv] . identifier[append] ( identifier[self] . identifier[_translate_to] . identifier[target] ) keyword[if] identifier[self] . identifier[_variadic_types] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[t] keyword[in] identifier[self] . identifier[_variadic_types] : identifier[t2resolv] . identifier[append] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[t2resolv] : keyword[for] identifier[c] keyword[in] identifier[t] . identifier[components] : keyword[if] identifier[c] keyword[not] keyword[in] identifier[self] . identifier[resolution] keyword[or] identifier[self] . identifier[resolution] [ identifier[c] ] keyword[is] keyword[None] : identifier[parent] = identifier[self] . identifier[get_parent] () keyword[if] identifier[parent] keyword[is] keyword[not] keyword[None] : identifier[sc] = identifier[parent] . identifier[get_by_symbol_name] ( identifier[c] ) keyword[if] identifier[len] ( identifier[sc] )== literal[int] : identifier[sc] = identifier[list] ( identifier[sc] . identifier[values] ())[ literal[int] ] keyword[if] identifier[isinstance] ( identifier[sc] , identifier[EvalCtx] ): identifier[sc] = identifier[sc] . identifier[_sig] identifier[rtyp] = identifier[weakref] . identifier[ref] ( identifier[sc] ) identifier[self] . identifier[resolution] [ identifier[c] ]= identifier[rtyp] keyword[continue] identifier[self] . identifier[resolution] [ identifier[c] ]= keyword[None]
def resolve(self): """ Process the signature and find definition for type. """ # collect types for resolution t2resolv = [] if hasattr(self._sig, 'tret'): t2resolv.append(self._sig.tret) # depends on [control=['if'], data=[]] if hasattr(self._sig, 'tparams') and self._sig.tparams is not None: for p in self._sig.tparams: t2resolv.append(p) # depends on [control=['for'], data=['p']] # depends on [control=['if'], data=[]] if self._translate_to is not None: t2resolv.append(self._translate_to.target) # depends on [control=['if'], data=[]] if self._variadic_types is not None: for t in self._variadic_types: t2resolv.append(t) # depends on [control=['for'], data=['t']] # depends on [control=['if'], data=[]] for t in t2resolv: for c in t.components: if c not in self.resolution or self.resolution[c] is None: # try to find what is c parent = self.get_parent() if parent is not None: sc = parent.get_by_symbol_name(c) if len(sc) == 1: sc = list(sc.values())[0] # unwrap EvalCtx around Type if isinstance(sc, EvalCtx): sc = sc._sig # depends on [control=['if'], data=[]] rtyp = weakref.ref(sc) self.resolution[c] = rtyp continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['parent']] # unresolved self.resolution[c] = None # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] # depends on [control=['for'], data=['t']]
def source(self, value): """ Set the source of the message. :type value: tuple :param value: (ip, port) :raise AttributeError: if value is not a ip and a port. """ if not isinstance(value, tuple) or len(value) != 2: raise AttributeError self._source = value
def function[source, parameter[self, value]]: constant[ Set the source of the message. :type value: tuple :param value: (ip, port) :raise AttributeError: if value is not a ip and a port. ] if <ast.BoolOp object at 0x7da20e9b3340> begin[:] <ast.Raise object at 0x7da20e9b3d00> name[self]._source assign[=] name[value]
keyword[def] identifier[source] ( identifier[self] , identifier[value] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[tuple] ) keyword[or] identifier[len] ( identifier[value] )!= literal[int] : keyword[raise] identifier[AttributeError] identifier[self] . identifier[_source] = identifier[value]
def source(self, value): """ Set the source of the message. :type value: tuple :param value: (ip, port) :raise AttributeError: if value is not a ip and a port. """ if not isinstance(value, tuple) or len(value) != 2: raise AttributeError # depends on [control=['if'], data=[]] self._source = value
def _render_str(self, string): """Returned a unicodified version of the string""" if isinstance(string, StrLabel): string = string._render(string.expr) string = str(string) if len(string) == 0: return '' name, supers, subs = split_super_sub(string) return render_unicode_sub_super( name, subs, supers, sub_first=True, translate_symbols=True, unicode_sub_super=self._settings['unicode_sub_super'])
def function[_render_str, parameter[self, string]]: constant[Returned a unicodified version of the string] if call[name[isinstance], parameter[name[string], name[StrLabel]]] begin[:] variable[string] assign[=] call[name[string]._render, parameter[name[string].expr]] variable[string] assign[=] call[name[str], parameter[name[string]]] if compare[call[name[len], parameter[name[string]]] equal[==] constant[0]] begin[:] return[constant[]] <ast.Tuple object at 0x7da18ede5930> assign[=] call[name[split_super_sub], parameter[name[string]]] return[call[name[render_unicode_sub_super], parameter[name[name], name[subs], name[supers]]]]
keyword[def] identifier[_render_str] ( identifier[self] , identifier[string] ): literal[string] keyword[if] identifier[isinstance] ( identifier[string] , identifier[StrLabel] ): identifier[string] = identifier[string] . identifier[_render] ( identifier[string] . identifier[expr] ) identifier[string] = identifier[str] ( identifier[string] ) keyword[if] identifier[len] ( identifier[string] )== literal[int] : keyword[return] literal[string] identifier[name] , identifier[supers] , identifier[subs] = identifier[split_super_sub] ( identifier[string] ) keyword[return] identifier[render_unicode_sub_super] ( identifier[name] , identifier[subs] , identifier[supers] , identifier[sub_first] = keyword[True] , identifier[translate_symbols] = keyword[True] , identifier[unicode_sub_super] = identifier[self] . identifier[_settings] [ literal[string] ])
def _render_str(self, string): """Returned a unicodified version of the string""" if isinstance(string, StrLabel): string = string._render(string.expr) # depends on [control=['if'], data=[]] string = str(string) if len(string) == 0: return '' # depends on [control=['if'], data=[]] (name, supers, subs) = split_super_sub(string) return render_unicode_sub_super(name, subs, supers, sub_first=True, translate_symbols=True, unicode_sub_super=self._settings['unicode_sub_super'])
def _add_orbfit_eph_to_database( self, orbfitMatches, expsoureObjects): """* add orbfit eph to database* **Key Arguments:** - ``orbfitMatches`` -- all of the ephemerides generated by Orbfit that match against the exact ATLAS exposure footprint - ``expsoureObjects`` -- the dictionary of original exposure objects **Return:** - None """ self.log.info('starting the ``_add_orbfit_eph_to_database`` method') dbSettings = self.settings["database settings"]["atlasMovers"] insert_list_of_dictionaries_into_database_tables( dbConn=self.atlasMoversDBConn, log=self.log, dictList=orbfitMatches, dbTableName="orbfit_positions", uniqueKeyList=["expname", "object_name"], dateModified=True, batchSize=10000, replace=True, dbSettings=dbSettings ) exposures = expsoureObjects.keys() exposures = '","'.join(exposures) sqlQuery = """update atlas_exposures set orbfit_positions = 1 where expname in ("%(exposures)s")""" % locals( ) writequery( log=self.log, sqlQuery=sqlQuery, dbConn=self.atlasMoversDBConn, ) sqlQuery = """update atlas_exposures set orbfit_positions = 2, dophot_match = 2 WHERE orbfit_positions = 1 AND (dophot_match = 0) and local_data = 1 and expname not in (select distinct expname from orbfit_positions);""" % locals( ) writequery( log=self.log, sqlQuery=sqlQuery, dbConn=self.atlasMoversDBConn, ) self.log.info('completed the ``_add_orbfit_eph_to_database`` method') return None
def function[_add_orbfit_eph_to_database, parameter[self, orbfitMatches, expsoureObjects]]: constant[* add orbfit eph to database* **Key Arguments:** - ``orbfitMatches`` -- all of the ephemerides generated by Orbfit that match against the exact ATLAS exposure footprint - ``expsoureObjects`` -- the dictionary of original exposure objects **Return:** - None ] call[name[self].log.info, parameter[constant[starting the ``_add_orbfit_eph_to_database`` method]]] variable[dbSettings] assign[=] call[call[name[self].settings][constant[database settings]]][constant[atlasMovers]] call[name[insert_list_of_dictionaries_into_database_tables], parameter[]] variable[exposures] assign[=] call[name[expsoureObjects].keys, parameter[]] variable[exposures] assign[=] call[constant[","].join, parameter[name[exposures]]] variable[sqlQuery] assign[=] binary_operation[constant[update atlas_exposures set orbfit_positions = 1 where expname in ("%(exposures)s")] <ast.Mod object at 0x7da2590d6920> call[name[locals], parameter[]]] call[name[writequery], parameter[]] variable[sqlQuery] assign[=] binary_operation[constant[update atlas_exposures set orbfit_positions = 2, dophot_match = 2 WHERE orbfit_positions = 1 AND (dophot_match = 0) and local_data = 1 and expname not in (select distinct expname from orbfit_positions);] <ast.Mod object at 0x7da2590d6920> call[name[locals], parameter[]]] call[name[writequery], parameter[]] call[name[self].log.info, parameter[constant[completed the ``_add_orbfit_eph_to_database`` method]]] return[constant[None]]
keyword[def] identifier[_add_orbfit_eph_to_database] ( identifier[self] , identifier[orbfitMatches] , identifier[expsoureObjects] ): literal[string] identifier[self] . identifier[log] . identifier[info] ( literal[string] ) identifier[dbSettings] = identifier[self] . identifier[settings] [ literal[string] ][ literal[string] ] identifier[insert_list_of_dictionaries_into_database_tables] ( identifier[dbConn] = identifier[self] . identifier[atlasMoversDBConn] , identifier[log] = identifier[self] . identifier[log] , identifier[dictList] = identifier[orbfitMatches] , identifier[dbTableName] = literal[string] , identifier[uniqueKeyList] =[ literal[string] , literal[string] ], identifier[dateModified] = keyword[True] , identifier[batchSize] = literal[int] , identifier[replace] = keyword[True] , identifier[dbSettings] = identifier[dbSettings] ) identifier[exposures] = identifier[expsoureObjects] . identifier[keys] () identifier[exposures] = literal[string] . identifier[join] ( identifier[exposures] ) identifier[sqlQuery] = literal[string] % identifier[locals] ( ) identifier[writequery] ( identifier[log] = identifier[self] . identifier[log] , identifier[sqlQuery] = identifier[sqlQuery] , identifier[dbConn] = identifier[self] . identifier[atlasMoversDBConn] , ) identifier[sqlQuery] = literal[string] % identifier[locals] ( ) identifier[writequery] ( identifier[log] = identifier[self] . identifier[log] , identifier[sqlQuery] = identifier[sqlQuery] , identifier[dbConn] = identifier[self] . identifier[atlasMoversDBConn] , ) identifier[self] . identifier[log] . identifier[info] ( literal[string] ) keyword[return] keyword[None]
def _add_orbfit_eph_to_database(self, orbfitMatches, expsoureObjects): """* add orbfit eph to database* **Key Arguments:** - ``orbfitMatches`` -- all of the ephemerides generated by Orbfit that match against the exact ATLAS exposure footprint - ``expsoureObjects`` -- the dictionary of original exposure objects **Return:** - None """ self.log.info('starting the ``_add_orbfit_eph_to_database`` method') dbSettings = self.settings['database settings']['atlasMovers'] insert_list_of_dictionaries_into_database_tables(dbConn=self.atlasMoversDBConn, log=self.log, dictList=orbfitMatches, dbTableName='orbfit_positions', uniqueKeyList=['expname', 'object_name'], dateModified=True, batchSize=10000, replace=True, dbSettings=dbSettings) exposures = expsoureObjects.keys() exposures = '","'.join(exposures) sqlQuery = 'update atlas_exposures set orbfit_positions = 1 where expname in ("%(exposures)s")' % locals() writequery(log=self.log, sqlQuery=sqlQuery, dbConn=self.atlasMoversDBConn) sqlQuery = 'update\n atlas_exposures\n set orbfit_positions = 2, dophot_match = 2\n WHERE\n orbfit_positions = 1\n AND (dophot_match = 0) and local_data = 1 and expname not in (select distinct expname from orbfit_positions);' % locals() writequery(log=self.log, sqlQuery=sqlQuery, dbConn=self.atlasMoversDBConn) self.log.info('completed the ``_add_orbfit_eph_to_database`` method') return None
def _deploy_iapp(self, iapp_name, actions, deploying_device): '''Deploy iapp to add trusted device :param iapp_name: str -- name of iapp :param actions: dict -- actions definition of iapp sections :param deploying_device: ManagementRoot object -- device where the iapp will be created ''' tmpl = deploying_device.tm.sys.application.templates.template serv = deploying_device.tm.sys.application.services.service tmpl.create(name=iapp_name, partition=self.partition, actions=actions) pollster(deploying_device.tm.sys.application.templates.template.load)( name=iapp_name, partition=self.partition ) serv.create( name=iapp_name, partition=self.partition, template='/%s/%s' % (self.partition, iapp_name) )
def function[_deploy_iapp, parameter[self, iapp_name, actions, deploying_device]]: constant[Deploy iapp to add trusted device :param iapp_name: str -- name of iapp :param actions: dict -- actions definition of iapp sections :param deploying_device: ManagementRoot object -- device where the iapp will be created ] variable[tmpl] assign[=] name[deploying_device].tm.sys.application.templates.template variable[serv] assign[=] name[deploying_device].tm.sys.application.services.service call[name[tmpl].create, parameter[]] call[call[name[pollster], parameter[name[deploying_device].tm.sys.application.templates.template.load]], parameter[]] call[name[serv].create, parameter[]]
keyword[def] identifier[_deploy_iapp] ( identifier[self] , identifier[iapp_name] , identifier[actions] , identifier[deploying_device] ): literal[string] identifier[tmpl] = identifier[deploying_device] . identifier[tm] . identifier[sys] . identifier[application] . identifier[templates] . identifier[template] identifier[serv] = identifier[deploying_device] . identifier[tm] . identifier[sys] . identifier[application] . identifier[services] . identifier[service] identifier[tmpl] . identifier[create] ( identifier[name] = identifier[iapp_name] , identifier[partition] = identifier[self] . identifier[partition] , identifier[actions] = identifier[actions] ) identifier[pollster] ( identifier[deploying_device] . identifier[tm] . identifier[sys] . identifier[application] . identifier[templates] . identifier[template] . identifier[load] )( identifier[name] = identifier[iapp_name] , identifier[partition] = identifier[self] . identifier[partition] ) identifier[serv] . identifier[create] ( identifier[name] = identifier[iapp_name] , identifier[partition] = identifier[self] . identifier[partition] , identifier[template] = literal[string] %( identifier[self] . identifier[partition] , identifier[iapp_name] ) )
def _deploy_iapp(self, iapp_name, actions, deploying_device): """Deploy iapp to add trusted device :param iapp_name: str -- name of iapp :param actions: dict -- actions definition of iapp sections :param deploying_device: ManagementRoot object -- device where the iapp will be created """ tmpl = deploying_device.tm.sys.application.templates.template serv = deploying_device.tm.sys.application.services.service tmpl.create(name=iapp_name, partition=self.partition, actions=actions) pollster(deploying_device.tm.sys.application.templates.template.load)(name=iapp_name, partition=self.partition) serv.create(name=iapp_name, partition=self.partition, template='/%s/%s' % (self.partition, iapp_name))
def SingleModeCombine(pupils,modeDiameter=None): """ Return the instantaneous coherent fluxes and photometric fluxes for a multiway single-mode fibre combiner """ if modeDiameter is None: modeDiameter=0.9*pupils.shape[-1] amplitudes=FibreCouple(pupils,modeDiameter) cc=np.conj(amplitudes) fluxes=(amplitudes*cc).real coherentFluxes=[amplitudes[i]*cc[j] for i in range(1,len(amplitudes)) for j in range(i)] return fluxes,coherentFluxes
def function[SingleModeCombine, parameter[pupils, modeDiameter]]: constant[ Return the instantaneous coherent fluxes and photometric fluxes for a multiway single-mode fibre combiner ] if compare[name[modeDiameter] is constant[None]] begin[:] variable[modeDiameter] assign[=] binary_operation[constant[0.9] * call[name[pupils].shape][<ast.UnaryOp object at 0x7da18f813430>]] variable[amplitudes] assign[=] call[name[FibreCouple], parameter[name[pupils], name[modeDiameter]]] variable[cc] assign[=] call[name[np].conj, parameter[name[amplitudes]]] variable[fluxes] assign[=] binary_operation[name[amplitudes] * name[cc]].real variable[coherentFluxes] assign[=] <ast.ListComp object at 0x7da18f810bb0> return[tuple[[<ast.Name object at 0x7da18f811360>, <ast.Name object at 0x7da18f8117e0>]]]
keyword[def] identifier[SingleModeCombine] ( identifier[pupils] , identifier[modeDiameter] = keyword[None] ): literal[string] keyword[if] identifier[modeDiameter] keyword[is] keyword[None] : identifier[modeDiameter] = literal[int] * identifier[pupils] . identifier[shape] [- literal[int] ] identifier[amplitudes] = identifier[FibreCouple] ( identifier[pupils] , identifier[modeDiameter] ) identifier[cc] = identifier[np] . identifier[conj] ( identifier[amplitudes] ) identifier[fluxes] =( identifier[amplitudes] * identifier[cc] ). identifier[real] identifier[coherentFluxes] =[ identifier[amplitudes] [ identifier[i] ]* identifier[cc] [ identifier[j] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[amplitudes] )) keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[i] )] keyword[return] identifier[fluxes] , identifier[coherentFluxes]
def SingleModeCombine(pupils, modeDiameter=None): """ Return the instantaneous coherent fluxes and photometric fluxes for a multiway single-mode fibre combiner """ if modeDiameter is None: modeDiameter = 0.9 * pupils.shape[-1] # depends on [control=['if'], data=['modeDiameter']] amplitudes = FibreCouple(pupils, modeDiameter) cc = np.conj(amplitudes) fluxes = (amplitudes * cc).real coherentFluxes = [amplitudes[i] * cc[j] for i in range(1, len(amplitudes)) for j in range(i)] return (fluxes, coherentFluxes)
def _find_glob_matches(in_files, metadata): """Group files that match by globs for merging, rather than by explicit pairs. """ reg_files = copy.deepcopy(in_files) glob_files = [] for glob_search in [x for x in metadata.keys() if "*" in x]: cur = [] for fname in in_files: if fnmatch.fnmatch(fname, "*/%s" % glob_search): cur.append(fname) reg_files.remove(fname) assert cur, "Did not find file matches for %s" % glob_search glob_files.append(cur) return reg_files, glob_files
def function[_find_glob_matches, parameter[in_files, metadata]]: constant[Group files that match by globs for merging, rather than by explicit pairs. ] variable[reg_files] assign[=] call[name[copy].deepcopy, parameter[name[in_files]]] variable[glob_files] assign[=] list[[]] for taget[name[glob_search]] in starred[<ast.ListComp object at 0x7da1b1987dc0>] begin[:] variable[cur] assign[=] list[[]] for taget[name[fname]] in starred[name[in_files]] begin[:] if call[name[fnmatch].fnmatch, parameter[name[fname], binary_operation[constant[*/%s] <ast.Mod object at 0x7da2590d6920> name[glob_search]]]] begin[:] call[name[cur].append, parameter[name[fname]]] call[name[reg_files].remove, parameter[name[fname]]] assert[name[cur]] call[name[glob_files].append, parameter[name[cur]]] return[tuple[[<ast.Name object at 0x7da20c6e4bb0>, <ast.Name object at 0x7da20c6e76d0>]]]
keyword[def] identifier[_find_glob_matches] ( identifier[in_files] , identifier[metadata] ): literal[string] identifier[reg_files] = identifier[copy] . identifier[deepcopy] ( identifier[in_files] ) identifier[glob_files] =[] keyword[for] identifier[glob_search] keyword[in] [ identifier[x] keyword[for] identifier[x] keyword[in] identifier[metadata] . identifier[keys] () keyword[if] literal[string] keyword[in] identifier[x] ]: identifier[cur] =[] keyword[for] identifier[fname] keyword[in] identifier[in_files] : keyword[if] identifier[fnmatch] . identifier[fnmatch] ( identifier[fname] , literal[string] % identifier[glob_search] ): identifier[cur] . identifier[append] ( identifier[fname] ) identifier[reg_files] . identifier[remove] ( identifier[fname] ) keyword[assert] identifier[cur] , literal[string] % identifier[glob_search] identifier[glob_files] . identifier[append] ( identifier[cur] ) keyword[return] identifier[reg_files] , identifier[glob_files]
def _find_glob_matches(in_files, metadata): """Group files that match by globs for merging, rather than by explicit pairs. """ reg_files = copy.deepcopy(in_files) glob_files = [] for glob_search in [x for x in metadata.keys() if '*' in x]: cur = [] for fname in in_files: if fnmatch.fnmatch(fname, '*/%s' % glob_search): cur.append(fname) reg_files.remove(fname) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fname']] assert cur, 'Did not find file matches for %s' % glob_search glob_files.append(cur) # depends on [control=['for'], data=['glob_search']] return (reg_files, glob_files)
def parameters_dict(self): """ Get the tool parameters as a simple dictionary :return: The tool parameters """ d = {} for k, v in self.__dict__.items(): if not k.startswith("_"): d[k] = v return d
def function[parameters_dict, parameter[self]]: constant[ Get the tool parameters as a simple dictionary :return: The tool parameters ] variable[d] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b24b6830>, <ast.Name object at 0x7da1b24b44f0>]]] in starred[call[name[self].__dict__.items, parameter[]]] begin[:] if <ast.UnaryOp object at 0x7da1b24b73d0> begin[:] call[name[d]][name[k]] assign[=] name[v] return[name[d]]
keyword[def] identifier[parameters_dict] ( identifier[self] ): literal[string] identifier[d] ={} keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[__dict__] . identifier[items] (): keyword[if] keyword[not] identifier[k] . identifier[startswith] ( literal[string] ): identifier[d] [ identifier[k] ]= identifier[v] keyword[return] identifier[d]
def parameters_dict(self): """ Get the tool parameters as a simple dictionary :return: The tool parameters """ d = {} for (k, v) in self.__dict__.items(): if not k.startswith('_'): d[k] = v # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return d
def init_read_line(self): """init_read_line() initializes fields relevant to input matching""" format_list = self._format_list self._re_cvt = self.match_input_fmt(format_list) regexp0_str = "".join([subs[0] for subs in self._re_cvt]) self._regexp_str = regexp0_str self._re = re.compile(regexp0_str) self._match_exps = [ subs[1] for subs in self._re_cvt if subs[1] is not None ] self._divisors = [subs[2] for subs in self._re_cvt if subs[2] is not None] self._in_cvt_fns = [ subs[3] for subs in self._re_cvt if subs[3] is not None ] self._read_line_init = True
def function[init_read_line, parameter[self]]: constant[init_read_line() initializes fields relevant to input matching] variable[format_list] assign[=] name[self]._format_list name[self]._re_cvt assign[=] call[name[self].match_input_fmt, parameter[name[format_list]]] variable[regexp0_str] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da1b04fd1e0>]] name[self]._regexp_str assign[=] name[regexp0_str] name[self]._re assign[=] call[name[re].compile, parameter[name[regexp0_str]]] name[self]._match_exps assign[=] <ast.ListComp object at 0x7da1b04fce80> name[self]._divisors assign[=] <ast.ListComp object at 0x7da1b04fe5f0> name[self]._in_cvt_fns assign[=] <ast.ListComp object at 0x7da1b04fc430> name[self]._read_line_init assign[=] constant[True]
keyword[def] identifier[init_read_line] ( identifier[self] ): literal[string] identifier[format_list] = identifier[self] . identifier[_format_list] identifier[self] . identifier[_re_cvt] = identifier[self] . identifier[match_input_fmt] ( identifier[format_list] ) identifier[regexp0_str] = literal[string] . identifier[join] ([ identifier[subs] [ literal[int] ] keyword[for] identifier[subs] keyword[in] identifier[self] . identifier[_re_cvt] ]) identifier[self] . identifier[_regexp_str] = identifier[regexp0_str] identifier[self] . identifier[_re] = identifier[re] . identifier[compile] ( identifier[regexp0_str] ) identifier[self] . identifier[_match_exps] =[ identifier[subs] [ literal[int] ] keyword[for] identifier[subs] keyword[in] identifier[self] . identifier[_re_cvt] keyword[if] identifier[subs] [ literal[int] ] keyword[is] keyword[not] keyword[None] ] identifier[self] . identifier[_divisors] =[ identifier[subs] [ literal[int] ] keyword[for] identifier[subs] keyword[in] identifier[self] . identifier[_re_cvt] keyword[if] identifier[subs] [ literal[int] ] keyword[is] keyword[not] keyword[None] ] identifier[self] . identifier[_in_cvt_fns] =[ identifier[subs] [ literal[int] ] keyword[for] identifier[subs] keyword[in] identifier[self] . identifier[_re_cvt] keyword[if] identifier[subs] [ literal[int] ] keyword[is] keyword[not] keyword[None] ] identifier[self] . identifier[_read_line_init] = keyword[True]
def init_read_line(self): """init_read_line() initializes fields relevant to input matching""" format_list = self._format_list self._re_cvt = self.match_input_fmt(format_list) regexp0_str = ''.join([subs[0] for subs in self._re_cvt]) self._regexp_str = regexp0_str self._re = re.compile(regexp0_str) self._match_exps = [subs[1] for subs in self._re_cvt if subs[1] is not None] self._divisors = [subs[2] for subs in self._re_cvt if subs[2] is not None] self._in_cvt_fns = [subs[3] for subs in self._re_cvt if subs[3] is not None] self._read_line_init = True
def navdatapush(self): """ Pushes the current :referenceframe: out to clients. :return: """ try: self.fireEvent(referenceframe({ 'data': self.referenceframe, 'ages': self.referenceages }), "navdata") self.intervalcount += 1 if self.intervalcount == self.passiveinterval and len( self.referenceframe) > 0: self.fireEvent(broadcast('users', { 'component': 'hfos.navdata.sensors', 'action': 'update', 'data': { 'data': self.referenceframe, 'ages': self.referenceages } }), "hfosweb") self.intervalcount = 0 # self.log("Reference frame successfully pushed.", # lvl=verbose) except Exception as e: self.log("Could not push referenceframe: ", e, type(e), lvl=critical)
def function[navdatapush, parameter[self]]: constant[ Pushes the current :referenceframe: out to clients. :return: ] <ast.Try object at 0x7da1b0fe8f10>
keyword[def] identifier[navdatapush] ( identifier[self] ): literal[string] keyword[try] : identifier[self] . identifier[fireEvent] ( identifier[referenceframe] ({ literal[string] : identifier[self] . identifier[referenceframe] , literal[string] : identifier[self] . identifier[referenceages] }), literal[string] ) identifier[self] . identifier[intervalcount] += literal[int] keyword[if] identifier[self] . identifier[intervalcount] == identifier[self] . identifier[passiveinterval] keyword[and] identifier[len] ( identifier[self] . identifier[referenceframe] )> literal[int] : identifier[self] . identifier[fireEvent] ( identifier[broadcast] ( literal[string] ,{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] :{ literal[string] : identifier[self] . identifier[referenceframe] , literal[string] : identifier[self] . identifier[referenceages] } }), literal[string] ) identifier[self] . identifier[intervalcount] = literal[int] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[self] . identifier[log] ( literal[string] , identifier[e] , identifier[type] ( identifier[e] ), identifier[lvl] = identifier[critical] )
def navdatapush(self): """ Pushes the current :referenceframe: out to clients. :return: """ try: self.fireEvent(referenceframe({'data': self.referenceframe, 'ages': self.referenceages}), 'navdata') self.intervalcount += 1 if self.intervalcount == self.passiveinterval and len(self.referenceframe) > 0: self.fireEvent(broadcast('users', {'component': 'hfos.navdata.sensors', 'action': 'update', 'data': {'data': self.referenceframe, 'ages': self.referenceages}}), 'hfosweb') self.intervalcount = 0 # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] # self.log("Reference frame successfully pushed.", # lvl=verbose) except Exception as e: self.log('Could not push referenceframe: ', e, type(e), lvl=critical) # depends on [control=['except'], data=['e']]
def rlp_encode(item): r""" Recursive Length Prefix Encoding :param item: the object to encode, either a string, bytes, bytearray, int, long, or sequence https://github.com/ethereum/wiki/wiki/RLP >>> rlp_encode('dog') b'\x83dog' >>> rlp_encode([ 'cat', 'dog' ]) b'\xc8\x83cat\x83dog' >>> rlp_encode('') b'\x80' >>> rlp_encode([]) b'\xc0' >>> rlp_encode(0) b'\x80' >>> rlp_encode('\x00') b'\x00' >>> rlp_encode(15) b'\x0f' >>> rlp_encode(1024) b'\x82\x04\x00' >>> rlp_encode([ [], [[]], [ [], [[]] ] ]) b'\xc7\xc0\xc1\xc0\xc3\xc0\xc1\xc0' """ if item is None or item == 0: ret = b'\x80' elif isinstance(item, str): ret = rlp_encode(item.encode('utf8')) elif isinstance(item, (bytearray, bytes)): if len(item) == 1 and item[0] < 0x80: # For a single byte whose value is in the [0x00, 0x7f] range, that byte is its own RLP encoding. ret = item else: ret = encode_length(len(item), 0x80) + item elif isinstance(item, collections.abc.Sequence): output = b''.join(map(rlp_encode, item)) ret = encode_length(len(output), 0xC0) + output elif isinstance(item, int): ret = rlp_encode(int_to_bytes(item)) else: raise Exception("Cannot encode object of type %s" % type(item).__name__) return ret
def function[rlp_encode, parameter[item]]: constant[ Recursive Length Prefix Encoding :param item: the object to encode, either a string, bytes, bytearray, int, long, or sequence https://github.com/ethereum/wiki/wiki/RLP >>> rlp_encode('dog') b'\x83dog' >>> rlp_encode([ 'cat', 'dog' ]) b'\xc8\x83cat\x83dog' >>> rlp_encode('') b'\x80' >>> rlp_encode([]) b'\xc0' >>> rlp_encode(0) b'\x80' >>> rlp_encode('\x00') b'\x00' >>> rlp_encode(15) b'\x0f' >>> rlp_encode(1024) b'\x82\x04\x00' >>> rlp_encode([ [], [[]], [ [], [[]] ] ]) b'\xc7\xc0\xc1\xc0\xc3\xc0\xc1\xc0' ] if <ast.BoolOp object at 0x7da2054a47f0> begin[:] variable[ret] assign[=] constant[b'\x80'] return[name[ret]]
keyword[def] identifier[rlp_encode] ( identifier[item] ): literal[string] keyword[if] identifier[item] keyword[is] keyword[None] keyword[or] identifier[item] == literal[int] : identifier[ret] = literal[string] keyword[elif] identifier[isinstance] ( identifier[item] , identifier[str] ): identifier[ret] = identifier[rlp_encode] ( identifier[item] . identifier[encode] ( literal[string] )) keyword[elif] identifier[isinstance] ( identifier[item] ,( identifier[bytearray] , identifier[bytes] )): keyword[if] identifier[len] ( identifier[item] )== literal[int] keyword[and] identifier[item] [ literal[int] ]< literal[int] : identifier[ret] = identifier[item] keyword[else] : identifier[ret] = identifier[encode_length] ( identifier[len] ( identifier[item] ), literal[int] )+ identifier[item] keyword[elif] identifier[isinstance] ( identifier[item] , identifier[collections] . identifier[abc] . identifier[Sequence] ): identifier[output] = literal[string] . identifier[join] ( identifier[map] ( identifier[rlp_encode] , identifier[item] )) identifier[ret] = identifier[encode_length] ( identifier[len] ( identifier[output] ), literal[int] )+ identifier[output] keyword[elif] identifier[isinstance] ( identifier[item] , identifier[int] ): identifier[ret] = identifier[rlp_encode] ( identifier[int_to_bytes] ( identifier[item] )) keyword[else] : keyword[raise] identifier[Exception] ( literal[string] % identifier[type] ( identifier[item] ). identifier[__name__] ) keyword[return] identifier[ret]
def rlp_encode(item): """ Recursive Length Prefix Encoding :param item: the object to encode, either a string, bytes, bytearray, int, long, or sequence https://github.com/ethereum/wiki/wiki/RLP >>> rlp_encode('dog') b'\\x83dog' >>> rlp_encode([ 'cat', 'dog' ]) b'\\xc8\\x83cat\\x83dog' >>> rlp_encode('') b'\\x80' >>> rlp_encode([]) b'\\xc0' >>> rlp_encode(0) b'\\x80' >>> rlp_encode('\\x00') b'\\x00' >>> rlp_encode(15) b'\\x0f' >>> rlp_encode(1024) b'\\x82\\x04\\x00' >>> rlp_encode([ [], [[]], [ [], [[]] ] ]) b'\\xc7\\xc0\\xc1\\xc0\\xc3\\xc0\\xc1\\xc0' """ if item is None or item == 0: ret = b'\x80' # depends on [control=['if'], data=[]] elif isinstance(item, str): ret = rlp_encode(item.encode('utf8')) # depends on [control=['if'], data=[]] elif isinstance(item, (bytearray, bytes)): if len(item) == 1 and item[0] < 128: # For a single byte whose value is in the [0x00, 0x7f] range, that byte is its own RLP encoding. ret = item # depends on [control=['if'], data=[]] else: ret = encode_length(len(item), 128) + item # depends on [control=['if'], data=[]] elif isinstance(item, collections.abc.Sequence): output = b''.join(map(rlp_encode, item)) ret = encode_length(len(output), 192) + output # depends on [control=['if'], data=[]] elif isinstance(item, int): ret = rlp_encode(int_to_bytes(item)) # depends on [control=['if'], data=[]] else: raise Exception('Cannot encode object of type %s' % type(item).__name__) return ret
def insertChild(self, childItem, position=None): """ Inserts a child item to the current item. Overridden from BaseTreeItem. """ childItem = super(BoolCti, self).insertChild(childItem, position=None) enableChildren = self.enabled and self.data != self.childrenDisabledValue #logger.debug("BoolCti.insertChild: {} enableChildren={}".format(childItem, enableChildren)) childItem.enableBranch(enableChildren) childItem.enabled = enableChildren return childItem
def function[insertChild, parameter[self, childItem, position]]: constant[ Inserts a child item to the current item. Overridden from BaseTreeItem. ] variable[childItem] assign[=] call[call[name[super], parameter[name[BoolCti], name[self]]].insertChild, parameter[name[childItem]]] variable[enableChildren] assign[=] <ast.BoolOp object at 0x7da1b04f95d0> call[name[childItem].enableBranch, parameter[name[enableChildren]]] name[childItem].enabled assign[=] name[enableChildren] return[name[childItem]]
keyword[def] identifier[insertChild] ( identifier[self] , identifier[childItem] , identifier[position] = keyword[None] ): literal[string] identifier[childItem] = identifier[super] ( identifier[BoolCti] , identifier[self] ). identifier[insertChild] ( identifier[childItem] , identifier[position] = keyword[None] ) identifier[enableChildren] = identifier[self] . identifier[enabled] keyword[and] identifier[self] . identifier[data] != identifier[self] . identifier[childrenDisabledValue] identifier[childItem] . identifier[enableBranch] ( identifier[enableChildren] ) identifier[childItem] . identifier[enabled] = identifier[enableChildren] keyword[return] identifier[childItem]
def insertChild(self, childItem, position=None): """ Inserts a child item to the current item. Overridden from BaseTreeItem. """ childItem = super(BoolCti, self).insertChild(childItem, position=None) enableChildren = self.enabled and self.data != self.childrenDisabledValue #logger.debug("BoolCti.insertChild: {} enableChildren={}".format(childItem, enableChildren)) childItem.enableBranch(enableChildren) childItem.enabled = enableChildren return childItem
def auth_token_required(fn): """Decorator that protects endpoints using token authentication. The token should be added to the request by the client by using a query string variable with a name equal to the configuration value of `SECURITY_TOKEN_AUTHENTICATION_KEY` or in a request header named that of the configuration value of `SECURITY_TOKEN_AUTHENTICATION_HEADER` """ @wraps(fn) def decorated(*args, **kwargs): if _check_token(): return fn(*args, **kwargs) if _security._unauthorized_callback: return _security._unauthorized_callback() else: return _get_unauthorized_response() return decorated
def function[auth_token_required, parameter[fn]]: constant[Decorator that protects endpoints using token authentication. The token should be added to the request by the client by using a query string variable with a name equal to the configuration value of `SECURITY_TOKEN_AUTHENTICATION_KEY` or in a request header named that of the configuration value of `SECURITY_TOKEN_AUTHENTICATION_HEADER` ] def function[decorated, parameter[]]: if call[name[_check_token], parameter[]] begin[:] return[call[name[fn], parameter[<ast.Starred object at 0x7da204567970>]]] if name[_security]._unauthorized_callback begin[:] return[call[name[_security]._unauthorized_callback, parameter[]]] return[name[decorated]]
keyword[def] identifier[auth_token_required] ( identifier[fn] ): literal[string] @ identifier[wraps] ( identifier[fn] ) keyword[def] identifier[decorated] (* identifier[args] ,** identifier[kwargs] ): keyword[if] identifier[_check_token] (): keyword[return] identifier[fn] (* identifier[args] ,** identifier[kwargs] ) keyword[if] identifier[_security] . identifier[_unauthorized_callback] : keyword[return] identifier[_security] . identifier[_unauthorized_callback] () keyword[else] : keyword[return] identifier[_get_unauthorized_response] () keyword[return] identifier[decorated]
def auth_token_required(fn): """Decorator that protects endpoints using token authentication. The token should be added to the request by the client by using a query string variable with a name equal to the configuration value of `SECURITY_TOKEN_AUTHENTICATION_KEY` or in a request header named that of the configuration value of `SECURITY_TOKEN_AUTHENTICATION_HEADER` """ @wraps(fn) def decorated(*args, **kwargs): if _check_token(): return fn(*args, **kwargs) # depends on [control=['if'], data=[]] if _security._unauthorized_callback: return _security._unauthorized_callback() # depends on [control=['if'], data=[]] else: return _get_unauthorized_response() return decorated
def patch_file_open(): # pragma: no cover """A Monkey patch to log opening and closing of files, which is useful for debugging file descriptor exhaustion.""" openfiles = set() oldfile = builtins.file class newfile(oldfile): def __init__(self, *args, **kwargs): self.x = args[0] all_fds = count_open_fds() print('### {} OPENING {} ( {} total )###'.format( len(openfiles), str(self.x), all_fds)) oldfile.__init__(self, *args, **kwargs) openfiles.add(self) def close(self): print('### {} CLOSING {} ###'.format(len(openfiles), str(self.x))) oldfile.close(self) openfiles.remove(self) def newopen(*args, **kwargs): return newfile(*args, **kwargs) builtins.file = newfile builtins.open = newopen
def function[patch_file_open, parameter[]]: constant[A Monkey patch to log opening and closing of files, which is useful for debugging file descriptor exhaustion.] variable[openfiles] assign[=] call[name[set], parameter[]] variable[oldfile] assign[=] name[builtins].file class class[newfile, parameter[]] begin[:] def function[__init__, parameter[self]]: name[self].x assign[=] call[name[args]][constant[0]] variable[all_fds] assign[=] call[name[count_open_fds], parameter[]] call[name[print], parameter[call[constant[### {} OPENING {} ( {} total )###].format, parameter[call[name[len], parameter[name[openfiles]]], call[name[str], parameter[name[self].x]], name[all_fds]]]]] call[name[oldfile].__init__, parameter[name[self], <ast.Starred object at 0x7da20c7950f0>]] call[name[openfiles].add, parameter[name[self]]] def function[close, parameter[self]]: call[name[print], parameter[call[constant[### {} CLOSING {} ###].format, parameter[call[name[len], parameter[name[openfiles]]], call[name[str], parameter[name[self].x]]]]]] call[name[oldfile].close, parameter[name[self]]] call[name[openfiles].remove, parameter[name[self]]] def function[newopen, parameter[]]: return[call[name[newfile], parameter[<ast.Starred object at 0x7da18dc05930>]]] name[builtins].file assign[=] name[newfile] name[builtins].open assign[=] name[newopen]
keyword[def] identifier[patch_file_open] (): literal[string] identifier[openfiles] = identifier[set] () identifier[oldfile] = identifier[builtins] . identifier[file] keyword[class] identifier[newfile] ( identifier[oldfile] ): keyword[def] identifier[__init__] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): identifier[self] . identifier[x] = identifier[args] [ literal[int] ] identifier[all_fds] = identifier[count_open_fds] () identifier[print] ( literal[string] . identifier[format] ( identifier[len] ( identifier[openfiles] ), identifier[str] ( identifier[self] . identifier[x] ), identifier[all_fds] )) identifier[oldfile] . identifier[__init__] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ) identifier[openfiles] . identifier[add] ( identifier[self] ) keyword[def] identifier[close] ( identifier[self] ): identifier[print] ( literal[string] . identifier[format] ( identifier[len] ( identifier[openfiles] ), identifier[str] ( identifier[self] . identifier[x] ))) identifier[oldfile] . identifier[close] ( identifier[self] ) identifier[openfiles] . identifier[remove] ( identifier[self] ) keyword[def] identifier[newopen] (* identifier[args] ,** identifier[kwargs] ): keyword[return] identifier[newfile] (* identifier[args] ,** identifier[kwargs] ) identifier[builtins] . identifier[file] = identifier[newfile] identifier[builtins] . identifier[open] = identifier[newopen]
def patch_file_open(): # pragma: no cover 'A Monkey patch to log opening and closing of files, which is useful for\n debugging file descriptor exhaustion.' openfiles = set() oldfile = builtins.file class newfile(oldfile): def __init__(self, *args, **kwargs): self.x = args[0] all_fds = count_open_fds() print('### {} OPENING {} ( {} total )###'.format(len(openfiles), str(self.x), all_fds)) oldfile.__init__(self, *args, **kwargs) openfiles.add(self) def close(self): print('### {} CLOSING {} ###'.format(len(openfiles), str(self.x))) oldfile.close(self) openfiles.remove(self) def newopen(*args, **kwargs): return newfile(*args, **kwargs) builtins.file = newfile builtins.open = newopen
def ravel(self, name=None): """ Convert 2D histogram into 1D histogram with the y-axis repeated along the x-axis, similar to NumPy's ravel(). """ nbinsx = self.nbins(0) nbinsy = self.nbins(1) left_edge = self.xedgesl(1) right_edge = self.xedgesh(nbinsx) out = Hist(nbinsx * nbinsy, left_edge, nbinsy * (right_edge - left_edge) + left_edge, type=self.TYPE, name=name, title=self.title, **self.decorators) for i, bin in enumerate(self.bins(overflow=False)): out.SetBinContent(i + 1, bin.value) out.SetBinError(i + 1, bin.error) return out
def function[ravel, parameter[self, name]]: constant[ Convert 2D histogram into 1D histogram with the y-axis repeated along the x-axis, similar to NumPy's ravel(). ] variable[nbinsx] assign[=] call[name[self].nbins, parameter[constant[0]]] variable[nbinsy] assign[=] call[name[self].nbins, parameter[constant[1]]] variable[left_edge] assign[=] call[name[self].xedgesl, parameter[constant[1]]] variable[right_edge] assign[=] call[name[self].xedgesh, parameter[name[nbinsx]]] variable[out] assign[=] call[name[Hist], parameter[binary_operation[name[nbinsx] * name[nbinsy]], name[left_edge], binary_operation[binary_operation[name[nbinsy] * binary_operation[name[right_edge] - name[left_edge]]] + name[left_edge]]]] for taget[tuple[[<ast.Name object at 0x7da1b11dba30>, <ast.Name object at 0x7da1b11d90c0>]]] in starred[call[name[enumerate], parameter[call[name[self].bins, parameter[]]]]] begin[:] call[name[out].SetBinContent, parameter[binary_operation[name[i] + constant[1]], name[bin].value]] call[name[out].SetBinError, parameter[binary_operation[name[i] + constant[1]], name[bin].error]] return[name[out]]
keyword[def] identifier[ravel] ( identifier[self] , identifier[name] = keyword[None] ): literal[string] identifier[nbinsx] = identifier[self] . identifier[nbins] ( literal[int] ) identifier[nbinsy] = identifier[self] . identifier[nbins] ( literal[int] ) identifier[left_edge] = identifier[self] . identifier[xedgesl] ( literal[int] ) identifier[right_edge] = identifier[self] . identifier[xedgesh] ( identifier[nbinsx] ) identifier[out] = identifier[Hist] ( identifier[nbinsx] * identifier[nbinsy] , identifier[left_edge] , identifier[nbinsy] *( identifier[right_edge] - identifier[left_edge] )+ identifier[left_edge] , identifier[type] = identifier[self] . identifier[TYPE] , identifier[name] = identifier[name] , identifier[title] = identifier[self] . identifier[title] , ** identifier[self] . identifier[decorators] ) keyword[for] identifier[i] , identifier[bin] keyword[in] identifier[enumerate] ( identifier[self] . identifier[bins] ( identifier[overflow] = keyword[False] )): identifier[out] . identifier[SetBinContent] ( identifier[i] + literal[int] , identifier[bin] . identifier[value] ) identifier[out] . identifier[SetBinError] ( identifier[i] + literal[int] , identifier[bin] . identifier[error] ) keyword[return] identifier[out]
def ravel(self, name=None): """ Convert 2D histogram into 1D histogram with the y-axis repeated along the x-axis, similar to NumPy's ravel(). """ nbinsx = self.nbins(0) nbinsy = self.nbins(1) left_edge = self.xedgesl(1) right_edge = self.xedgesh(nbinsx) out = Hist(nbinsx * nbinsy, left_edge, nbinsy * (right_edge - left_edge) + left_edge, type=self.TYPE, name=name, title=self.title, **self.decorators) for (i, bin) in enumerate(self.bins(overflow=False)): out.SetBinContent(i + 1, bin.value) out.SetBinError(i + 1, bin.error) # depends on [control=['for'], data=[]] return out
def order_duplicate_volume(self, origin_volume_id, origin_snapshot_id=None, duplicate_size=None, duplicate_iops=None, duplicate_tier_level=None, duplicate_snapshot_size=None, hourly_billing_flag=False): """Places an order for a duplicate block volume. :param origin_volume_id: The ID of the origin volume to be duplicated :param origin_snapshot_id: Origin snapshot ID to use for duplication :param duplicate_size: Size/capacity for the duplicate volume :param duplicate_iops: The IOPS per GB for the duplicate volume :param duplicate_tier_level: Tier level for the duplicate volume :param duplicate_snapshot_size: Snapshot space size for the duplicate :param hourly_billing_flag: Billing type, monthly (False) or hourly (True), default to monthly. :return: Returns a SoftLayer_Container_Product_Order_Receipt """ block_mask = 'id,billingItem[location,hourlyFlag],snapshotCapacityGb,'\ 'storageType[keyName],capacityGb,originalVolumeSize,'\ 'provisionedIops,storageTierLevel,osType[keyName],'\ 'staasVersion,hasEncryptionAtRest' origin_volume = self.get_block_volume_details(origin_volume_id, mask=block_mask) if isinstance(utils.lookup(origin_volume, 'osType', 'keyName'), str): os_type = origin_volume['osType']['keyName'] else: raise exceptions.SoftLayerError( "Cannot find origin volume's os-type") order = storage_utils.prepare_duplicate_order_object( self, origin_volume, duplicate_iops, duplicate_tier_level, duplicate_size, duplicate_snapshot_size, 'block', hourly_billing_flag ) order['osFormatType'] = {'keyName': os_type} if origin_snapshot_id is not None: order['duplicateOriginSnapshotId'] = origin_snapshot_id return self.client.call('Product_Order', 'placeOrder', order)
def function[order_duplicate_volume, parameter[self, origin_volume_id, origin_snapshot_id, duplicate_size, duplicate_iops, duplicate_tier_level, duplicate_snapshot_size, hourly_billing_flag]]: constant[Places an order for a duplicate block volume. :param origin_volume_id: The ID of the origin volume to be duplicated :param origin_snapshot_id: Origin snapshot ID to use for duplication :param duplicate_size: Size/capacity for the duplicate volume :param duplicate_iops: The IOPS per GB for the duplicate volume :param duplicate_tier_level: Tier level for the duplicate volume :param duplicate_snapshot_size: Snapshot space size for the duplicate :param hourly_billing_flag: Billing type, monthly (False) or hourly (True), default to monthly. :return: Returns a SoftLayer_Container_Product_Order_Receipt ] variable[block_mask] assign[=] constant[id,billingItem[location,hourlyFlag],snapshotCapacityGb,storageType[keyName],capacityGb,originalVolumeSize,provisionedIops,storageTierLevel,osType[keyName],staasVersion,hasEncryptionAtRest] variable[origin_volume] assign[=] call[name[self].get_block_volume_details, parameter[name[origin_volume_id]]] if call[name[isinstance], parameter[call[name[utils].lookup, parameter[name[origin_volume], constant[osType], constant[keyName]]], name[str]]] begin[:] variable[os_type] assign[=] call[call[name[origin_volume]][constant[osType]]][constant[keyName]] variable[order] assign[=] call[name[storage_utils].prepare_duplicate_order_object, parameter[name[self], name[origin_volume], name[duplicate_iops], name[duplicate_tier_level], name[duplicate_size], name[duplicate_snapshot_size], constant[block], name[hourly_billing_flag]]] call[name[order]][constant[osFormatType]] assign[=] dictionary[[<ast.Constant object at 0x7da20c7c9d20>], [<ast.Name object at 0x7da20c7c8310>]] if compare[name[origin_snapshot_id] is_not constant[None]] begin[:] call[name[order]][constant[duplicateOriginSnapshotId]] assign[=] name[origin_snapshot_id] return[call[name[self].client.call, parameter[constant[Product_Order], constant[placeOrder], name[order]]]]
keyword[def] identifier[order_duplicate_volume] ( identifier[self] , identifier[origin_volume_id] , identifier[origin_snapshot_id] = keyword[None] , identifier[duplicate_size] = keyword[None] , identifier[duplicate_iops] = keyword[None] , identifier[duplicate_tier_level] = keyword[None] , identifier[duplicate_snapshot_size] = keyword[None] , identifier[hourly_billing_flag] = keyword[False] ): literal[string] identifier[block_mask] = literal[string] literal[string] literal[string] literal[string] identifier[origin_volume] = identifier[self] . identifier[get_block_volume_details] ( identifier[origin_volume_id] , identifier[mask] = identifier[block_mask] ) keyword[if] identifier[isinstance] ( identifier[utils] . identifier[lookup] ( identifier[origin_volume] , literal[string] , literal[string] ), identifier[str] ): identifier[os_type] = identifier[origin_volume] [ literal[string] ][ literal[string] ] keyword[else] : keyword[raise] identifier[exceptions] . identifier[SoftLayerError] ( literal[string] ) identifier[order] = identifier[storage_utils] . identifier[prepare_duplicate_order_object] ( identifier[self] , identifier[origin_volume] , identifier[duplicate_iops] , identifier[duplicate_tier_level] , identifier[duplicate_size] , identifier[duplicate_snapshot_size] , literal[string] , identifier[hourly_billing_flag] ) identifier[order] [ literal[string] ]={ literal[string] : identifier[os_type] } keyword[if] identifier[origin_snapshot_id] keyword[is] keyword[not] keyword[None] : identifier[order] [ literal[string] ]= identifier[origin_snapshot_id] keyword[return] identifier[self] . identifier[client] . identifier[call] ( literal[string] , literal[string] , identifier[order] )
def order_duplicate_volume(self, origin_volume_id, origin_snapshot_id=None, duplicate_size=None, duplicate_iops=None, duplicate_tier_level=None, duplicate_snapshot_size=None, hourly_billing_flag=False): """Places an order for a duplicate block volume. :param origin_volume_id: The ID of the origin volume to be duplicated :param origin_snapshot_id: Origin snapshot ID to use for duplication :param duplicate_size: Size/capacity for the duplicate volume :param duplicate_iops: The IOPS per GB for the duplicate volume :param duplicate_tier_level: Tier level for the duplicate volume :param duplicate_snapshot_size: Snapshot space size for the duplicate :param hourly_billing_flag: Billing type, monthly (False) or hourly (True), default to monthly. :return: Returns a SoftLayer_Container_Product_Order_Receipt """ block_mask = 'id,billingItem[location,hourlyFlag],snapshotCapacityGb,storageType[keyName],capacityGb,originalVolumeSize,provisionedIops,storageTierLevel,osType[keyName],staasVersion,hasEncryptionAtRest' origin_volume = self.get_block_volume_details(origin_volume_id, mask=block_mask) if isinstance(utils.lookup(origin_volume, 'osType', 'keyName'), str): os_type = origin_volume['osType']['keyName'] # depends on [control=['if'], data=[]] else: raise exceptions.SoftLayerError("Cannot find origin volume's os-type") order = storage_utils.prepare_duplicate_order_object(self, origin_volume, duplicate_iops, duplicate_tier_level, duplicate_size, duplicate_snapshot_size, 'block', hourly_billing_flag) order['osFormatType'] = {'keyName': os_type} if origin_snapshot_id is not None: order['duplicateOriginSnapshotId'] = origin_snapshot_id # depends on [control=['if'], data=['origin_snapshot_id']] return self.client.call('Product_Order', 'placeOrder', order)
def a_connection_timeout(ctx): """Check the prompt and update the drivers.""" prompt = ctx.ctrl.after ctx.msg = "Received the jump host prompt: '{}'".format(prompt) ctx.device.connected = False ctx.finished = True raise ConnectionTimeoutError("Unable to connect to the device.", ctx.ctrl.hostname)
def function[a_connection_timeout, parameter[ctx]]: constant[Check the prompt and update the drivers.] variable[prompt] assign[=] name[ctx].ctrl.after name[ctx].msg assign[=] call[constant[Received the jump host prompt: '{}'].format, parameter[name[prompt]]] name[ctx].device.connected assign[=] constant[False] name[ctx].finished assign[=] constant[True] <ast.Raise object at 0x7da18eb56c80>
keyword[def] identifier[a_connection_timeout] ( identifier[ctx] ): literal[string] identifier[prompt] = identifier[ctx] . identifier[ctrl] . identifier[after] identifier[ctx] . identifier[msg] = literal[string] . identifier[format] ( identifier[prompt] ) identifier[ctx] . identifier[device] . identifier[connected] = keyword[False] identifier[ctx] . identifier[finished] = keyword[True] keyword[raise] identifier[ConnectionTimeoutError] ( literal[string] , identifier[ctx] . identifier[ctrl] . identifier[hostname] )
def a_connection_timeout(ctx): """Check the prompt and update the drivers.""" prompt = ctx.ctrl.after ctx.msg = "Received the jump host prompt: '{}'".format(prompt) ctx.device.connected = False ctx.finished = True raise ConnectionTimeoutError('Unable to connect to the device.', ctx.ctrl.hostname)
def _create_ssh_keys(self): """ Generate a pair of ssh keys for this prefix Returns: None Raises: RuntimeError: if it fails to create the keys """ ret, _, _ = utils.run_command( [ 'ssh-keygen', '-t', 'rsa', '-m', 'PEM', '-N', '', '-f', self.paths.ssh_id_rsa(), ] ) if ret != 0: raise RuntimeError( 'Failed to crate ssh keys at %s', self.paths.ssh_id_rsa(), )
def function[_create_ssh_keys, parameter[self]]: constant[ Generate a pair of ssh keys for this prefix Returns: None Raises: RuntimeError: if it fails to create the keys ] <ast.Tuple object at 0x7da204346530> assign[=] call[name[utils].run_command, parameter[list[[<ast.Constant object at 0x7da2043470a0>, <ast.Constant object at 0x7da2043466e0>, <ast.Constant object at 0x7da2043456c0>, <ast.Constant object at 0x7da204344a90>, <ast.Constant object at 0x7da2043442b0>, <ast.Constant object at 0x7da204345900>, <ast.Constant object at 0x7da204344a00>, <ast.Constant object at 0x7da204345840>, <ast.Call object at 0x7da2043440a0>]]]] if compare[name[ret] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da204347220>
keyword[def] identifier[_create_ssh_keys] ( identifier[self] ): literal[string] identifier[ret] , identifier[_] , identifier[_] = identifier[utils] . identifier[run_command] ( [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , identifier[self] . identifier[paths] . identifier[ssh_id_rsa] (), ] ) keyword[if] identifier[ret] != literal[int] : keyword[raise] identifier[RuntimeError] ( literal[string] , identifier[self] . identifier[paths] . identifier[ssh_id_rsa] (), )
def _create_ssh_keys(self): """ Generate a pair of ssh keys for this prefix Returns: None Raises: RuntimeError: if it fails to create the keys """ (ret, _, _) = utils.run_command(['ssh-keygen', '-t', 'rsa', '-m', 'PEM', '-N', '', '-f', self.paths.ssh_id_rsa()]) if ret != 0: raise RuntimeError('Failed to crate ssh keys at %s', self.paths.ssh_id_rsa()) # depends on [control=['if'], data=[]]
def _generate_random_leaf_count(height): """Return a random leaf count for building binary trees. :param height: Height of the binary tree. :type height: int :return: Random leaf count. :rtype: int """ max_leaf_count = 2 ** height half_leaf_count = max_leaf_count // 2 # A very naive way of mimicking normal distribution roll_1 = random.randint(0, half_leaf_count) roll_2 = random.randint(0, max_leaf_count - half_leaf_count) return roll_1 + roll_2 or half_leaf_count
def function[_generate_random_leaf_count, parameter[height]]: constant[Return a random leaf count for building binary trees. :param height: Height of the binary tree. :type height: int :return: Random leaf count. :rtype: int ] variable[max_leaf_count] assign[=] binary_operation[constant[2] ** name[height]] variable[half_leaf_count] assign[=] binary_operation[name[max_leaf_count] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] variable[roll_1] assign[=] call[name[random].randint, parameter[constant[0], name[half_leaf_count]]] variable[roll_2] assign[=] call[name[random].randint, parameter[constant[0], binary_operation[name[max_leaf_count] - name[half_leaf_count]]]] return[<ast.BoolOp object at 0x7da18f723e20>]
keyword[def] identifier[_generate_random_leaf_count] ( identifier[height] ): literal[string] identifier[max_leaf_count] = literal[int] ** identifier[height] identifier[half_leaf_count] = identifier[max_leaf_count] // literal[int] identifier[roll_1] = identifier[random] . identifier[randint] ( literal[int] , identifier[half_leaf_count] ) identifier[roll_2] = identifier[random] . identifier[randint] ( literal[int] , identifier[max_leaf_count] - identifier[half_leaf_count] ) keyword[return] identifier[roll_1] + identifier[roll_2] keyword[or] identifier[half_leaf_count]
def _generate_random_leaf_count(height): """Return a random leaf count for building binary trees. :param height: Height of the binary tree. :type height: int :return: Random leaf count. :rtype: int """ max_leaf_count = 2 ** height half_leaf_count = max_leaf_count // 2 # A very naive way of mimicking normal distribution roll_1 = random.randint(0, half_leaf_count) roll_2 = random.randint(0, max_leaf_count - half_leaf_count) return roll_1 + roll_2 or half_leaf_count
def use(app=None, gl=None): """ Set the usage options for vispy Specify what app backend and GL backend to use. Parameters ---------- app : str The app backend to use (case insensitive). Standard backends: * 'PyQt4': use Qt widget toolkit via PyQt4. * 'PyQt5': use Qt widget toolkit via PyQt5. * 'PySide': use Qt widget toolkit via PySide. * 'PyGlet': use Pyglet backend. * 'Glfw': use Glfw backend (successor of Glut). Widely available on Linux. * 'SDL2': use SDL v2 backend. * 'osmesa': Use OSMesa backend Additional backends: * 'ipynb_vnc': render in the IPython notebook via a VNC approach (experimental) gl : str The gl backend to use (case insensitive). Options are: * 'gl2': use Vispy's desktop OpenGL API. * 'pyopengl2': use PyOpenGL's desktop OpenGL API. Mostly for testing. * 'es2': (TO COME) use real OpenGL ES 2.0 on Windows via Angle. Availability of ES 2.0 is larger for Windows, since it relies on DirectX. * 'gl+': use the full OpenGL functionality available on your system (via PyOpenGL). Notes ----- If the app option is given, ``vispy.app.use_app()`` is called. If the gl option is given, ``vispy.gloo.use_gl()`` is called. If an app backend name is provided, and that backend could not be loaded, an error is raised. If no backend name is provided, Vispy will first check if the GUI toolkit corresponding to each backend is already imported, and try that backend first. If this is unsuccessful, it will try the 'default_backend' provided in the vispy config. If still not succesful, it will try each backend in a predetermined order. See Also -------- vispy.app.use_app vispy.gloo.gl.use_gl """ if app is None and gl is None: raise TypeError('Must specify at least one of "app" or "gl".') # Example for future. This wont work (yet). if app == 'ipynb_webgl': app = 'headless' gl = 'webgl' if app == 'osmesa': from ..util.osmesa_gl import fix_osmesa_gl_lib fix_osmesa_gl_lib() if gl is not None: raise ValueError("Do not specify gl when using osmesa") # Apply now if gl: from .. import gloo, config config['gl_backend'] = gl gloo.gl.use_gl(gl) if app: from ..app import use_app use_app(app)
def function[use, parameter[app, gl]]: constant[ Set the usage options for vispy Specify what app backend and GL backend to use. Parameters ---------- app : str The app backend to use (case insensitive). Standard backends: * 'PyQt4': use Qt widget toolkit via PyQt4. * 'PyQt5': use Qt widget toolkit via PyQt5. * 'PySide': use Qt widget toolkit via PySide. * 'PyGlet': use Pyglet backend. * 'Glfw': use Glfw backend (successor of Glut). Widely available on Linux. * 'SDL2': use SDL v2 backend. * 'osmesa': Use OSMesa backend Additional backends: * 'ipynb_vnc': render in the IPython notebook via a VNC approach (experimental) gl : str The gl backend to use (case insensitive). Options are: * 'gl2': use Vispy's desktop OpenGL API. * 'pyopengl2': use PyOpenGL's desktop OpenGL API. Mostly for testing. * 'es2': (TO COME) use real OpenGL ES 2.0 on Windows via Angle. Availability of ES 2.0 is larger for Windows, since it relies on DirectX. * 'gl+': use the full OpenGL functionality available on your system (via PyOpenGL). Notes ----- If the app option is given, ``vispy.app.use_app()`` is called. If the gl option is given, ``vispy.gloo.use_gl()`` is called. If an app backend name is provided, and that backend could not be loaded, an error is raised. If no backend name is provided, Vispy will first check if the GUI toolkit corresponding to each backend is already imported, and try that backend first. If this is unsuccessful, it will try the 'default_backend' provided in the vispy config. If still not succesful, it will try each backend in a predetermined order. See Also -------- vispy.app.use_app vispy.gloo.gl.use_gl ] if <ast.BoolOp object at 0x7da18c4ce620> begin[:] <ast.Raise object at 0x7da18c4ce710> if compare[name[app] equal[==] constant[ipynb_webgl]] begin[:] variable[app] assign[=] constant[headless] variable[gl] assign[=] constant[webgl] if compare[name[app] equal[==] constant[osmesa]] begin[:] from relative_module[util.osmesa_gl] import module[fix_osmesa_gl_lib] call[name[fix_osmesa_gl_lib], parameter[]] if compare[name[gl] is_not constant[None]] begin[:] <ast.Raise object at 0x7da18c4cfe20> if name[gl] begin[:] from relative_module[None] import module[gloo], module[config] call[name[config]][constant[gl_backend]] assign[=] name[gl] call[name[gloo].gl.use_gl, parameter[name[gl]]] if name[app] begin[:] from relative_module[app] import module[use_app] call[name[use_app], parameter[name[app]]]
keyword[def] identifier[use] ( identifier[app] = keyword[None] , identifier[gl] = keyword[None] ): literal[string] keyword[if] identifier[app] keyword[is] keyword[None] keyword[and] identifier[gl] keyword[is] keyword[None] : keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] identifier[app] == literal[string] : identifier[app] = literal[string] identifier[gl] = literal[string] keyword[if] identifier[app] == literal[string] : keyword[from] .. identifier[util] . identifier[osmesa_gl] keyword[import] identifier[fix_osmesa_gl_lib] identifier[fix_osmesa_gl_lib] () keyword[if] identifier[gl] keyword[is] keyword[not] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[gl] : keyword[from] .. keyword[import] identifier[gloo] , identifier[config] identifier[config] [ literal[string] ]= identifier[gl] identifier[gloo] . identifier[gl] . identifier[use_gl] ( identifier[gl] ) keyword[if] identifier[app] : keyword[from] .. identifier[app] keyword[import] identifier[use_app] identifier[use_app] ( identifier[app] )
def use(app=None, gl=None): """ Set the usage options for vispy Specify what app backend and GL backend to use. Parameters ---------- app : str The app backend to use (case insensitive). Standard backends: * 'PyQt4': use Qt widget toolkit via PyQt4. * 'PyQt5': use Qt widget toolkit via PyQt5. * 'PySide': use Qt widget toolkit via PySide. * 'PyGlet': use Pyglet backend. * 'Glfw': use Glfw backend (successor of Glut). Widely available on Linux. * 'SDL2': use SDL v2 backend. * 'osmesa': Use OSMesa backend Additional backends: * 'ipynb_vnc': render in the IPython notebook via a VNC approach (experimental) gl : str The gl backend to use (case insensitive). Options are: * 'gl2': use Vispy's desktop OpenGL API. * 'pyopengl2': use PyOpenGL's desktop OpenGL API. Mostly for testing. * 'es2': (TO COME) use real OpenGL ES 2.0 on Windows via Angle. Availability of ES 2.0 is larger for Windows, since it relies on DirectX. * 'gl+': use the full OpenGL functionality available on your system (via PyOpenGL). Notes ----- If the app option is given, ``vispy.app.use_app()`` is called. If the gl option is given, ``vispy.gloo.use_gl()`` is called. If an app backend name is provided, and that backend could not be loaded, an error is raised. If no backend name is provided, Vispy will first check if the GUI toolkit corresponding to each backend is already imported, and try that backend first. If this is unsuccessful, it will try the 'default_backend' provided in the vispy config. If still not succesful, it will try each backend in a predetermined order. See Also -------- vispy.app.use_app vispy.gloo.gl.use_gl """ if app is None and gl is None: raise TypeError('Must specify at least one of "app" or "gl".') # depends on [control=['if'], data=[]] # Example for future. This wont work (yet). if app == 'ipynb_webgl': app = 'headless' gl = 'webgl' # depends on [control=['if'], data=['app']] if app == 'osmesa': from ..util.osmesa_gl import fix_osmesa_gl_lib fix_osmesa_gl_lib() if gl is not None: raise ValueError('Do not specify gl when using osmesa') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Apply now if gl: from .. import gloo, config config['gl_backend'] = gl gloo.gl.use_gl(gl) # depends on [control=['if'], data=[]] if app: from ..app import use_app use_app(app) # depends on [control=['if'], data=[]]
def pull_all_rtl(configuration): """ Pulls all translations - reviewed or not - for RTL languages """ print("Pulling all translated RTL languages from transifex...") for lang in configuration.rtl_langs: print('rm -rf conf/locale/' + lang) execute('rm -rf conf/locale/' + lang) execute('tx pull -l ' + lang) clean_translated_locales(configuration, langs=configuration.rtl_langs)
def function[pull_all_rtl, parameter[configuration]]: constant[ Pulls all translations - reviewed or not - for RTL languages ] call[name[print], parameter[constant[Pulling all translated RTL languages from transifex...]]] for taget[name[lang]] in starred[name[configuration].rtl_langs] begin[:] call[name[print], parameter[binary_operation[constant[rm -rf conf/locale/] + name[lang]]]] call[name[execute], parameter[binary_operation[constant[rm -rf conf/locale/] + name[lang]]]] call[name[execute], parameter[binary_operation[constant[tx pull -l ] + name[lang]]]] call[name[clean_translated_locales], parameter[name[configuration]]]
keyword[def] identifier[pull_all_rtl] ( identifier[configuration] ): literal[string] identifier[print] ( literal[string] ) keyword[for] identifier[lang] keyword[in] identifier[configuration] . identifier[rtl_langs] : identifier[print] ( literal[string] + identifier[lang] ) identifier[execute] ( literal[string] + identifier[lang] ) identifier[execute] ( literal[string] + identifier[lang] ) identifier[clean_translated_locales] ( identifier[configuration] , identifier[langs] = identifier[configuration] . identifier[rtl_langs] )
def pull_all_rtl(configuration): """ Pulls all translations - reviewed or not - for RTL languages """ print('Pulling all translated RTL languages from transifex...') for lang in configuration.rtl_langs: print('rm -rf conf/locale/' + lang) execute('rm -rf conf/locale/' + lang) execute('tx pull -l ' + lang) # depends on [control=['for'], data=['lang']] clean_translated_locales(configuration, langs=configuration.rtl_langs)
def overall_serovar_call(serovar_prediction, antigen_predictor): """ Predict serovar from cgMLST cluster membership analysis and antigen BLAST results. SerovarPrediction object is assigned H1, H2 and Serogroup from the antigen BLAST results. Antigen BLAST results will predict a particular serovar or list of serovars, however, the cgMLST membership may be able to help narrow down the list of potential serovars. Notes: If the cgMLST predicted serovar is within the list of antigen BLAST predicted serovars, then the serovar is assigned the cgMLST predicted serovar. If all antigens are found, but an antigen serovar is not found then the serovar is assigned a pseudo-antigenic formula (Serogroup:H1:H2), otherwise the serovar is assigned the cgMLST prediction. If the antigen predicted serovar does not match the cgMLST predicted serovar, - the serovar is the cgMLST serovar if the cgMLST cluster level is <= 0.1 (10% or less) - otherwise, the serovar is antigen predicted serovar(s) Args: serovar_prediction (src.serovar_prediction.SerovarPrediction): Serovar prediction results (antigen+cgMLST[+Mash]) antigen_predictor (src.serovar_prediction.SerovarPredictor): Antigen search results Returns: src.serovar_prediction.SerovarPrediction: Serovar prediction results with overall prediction from antigen + cgMLST """ assert isinstance(serovar_prediction, SerovarPrediction) assert isinstance(antigen_predictor, SerovarPredictor) h1 = antigen_predictor.h1 h2 = antigen_predictor.h2 sg = antigen_predictor.serogroup spp = serovar_prediction.cgmlst_subspecies if spp is None: if 'mash_match' in serovar_prediction.__dict__: spp = serovar_prediction.__dict__['mash_subspecies'] serovar_prediction.serovar_antigen = antigen_predictor.serovar cgmlst_serovar = serovar_prediction.serovar_cgmlst cgmlst_distance = float(serovar_prediction.cgmlst_distance) null_result = '-:-:-' try: spp_roman = spp_name_to_roman[spp] except: spp_roman = None is_antigen_null = lambda x: (x is None or x == '' or x == '-') if antigen_predictor.serovar is None: if is_antigen_null(sg) and is_antigen_null(h1) and is_antigen_null(h2): if spp_roman is not None: serovar_prediction.serovar = '{} {}:{}:{}'.format(spp_roman, sg, h1, h2) else: serovar_prediction.serovar = '{}:{}:{}'.format(spp_roman, sg, h1, h2) elif cgmlst_serovar is not None and cgmlst_distance <= CGMLST_DISTANCE_THRESHOLD: serovar_prediction.serovar = cgmlst_serovar else: serovar_prediction.serovar = null_result if 'mash_match' in serovar_prediction.__dict__: spd = serovar_prediction.__dict__ mash_dist = float(spd['mash_distance']) if mash_dist <= MASH_DISTANCE_THRESHOLD: serovar_prediction.serovar = spd['mash_serovar'] else: serovars_from_antigen = antigen_predictor.serovar.split('|') if not isinstance(serovars_from_antigen, list): serovars_from_antigen = [serovars_from_antigen] if cgmlst_serovar is not None: if cgmlst_serovar in serovars_from_antigen: serovar_prediction.serovar = cgmlst_serovar else: if float(cgmlst_distance) <= CGMLST_DISTANCE_THRESHOLD: serovar_prediction.serovar = cgmlst_serovar elif 'mash_match' in serovar_prediction.__dict__: spd = serovar_prediction.__dict__ mash_serovar = spd['mash_serovar'] mash_dist = float(spd['mash_distance']) if mash_serovar in serovars_from_antigen: serovar_prediction.serovar = mash_serovar else: if mash_dist <= MASH_DISTANCE_THRESHOLD: serovar_prediction.serovar = mash_serovar if serovar_prediction.serovar is None: serovar_prediction.serovar = serovar_prediction.serovar_antigen if serovar_prediction.h1 is None: serovar_prediction.h1 = '-' if serovar_prediction.h2 is None: serovar_prediction.h2 = '-' if serovar_prediction.serogroup is None: serovar_prediction.serogroup = '-' if serovar_prediction.serovar_antigen is None: if spp_roman is not None: serovar_prediction.serovar_antigen = '{} -:-:-'.format(spp_roman) else: serovar_prediction.serovar_antigen = '-:-:-' if serovar_prediction.serovar is None: serovar_prediction.serovar = serovar_prediction.serovar_antigen return serovar_prediction
def function[overall_serovar_call, parameter[serovar_prediction, antigen_predictor]]: constant[ Predict serovar from cgMLST cluster membership analysis and antigen BLAST results. SerovarPrediction object is assigned H1, H2 and Serogroup from the antigen BLAST results. Antigen BLAST results will predict a particular serovar or list of serovars, however, the cgMLST membership may be able to help narrow down the list of potential serovars. Notes: If the cgMLST predicted serovar is within the list of antigen BLAST predicted serovars, then the serovar is assigned the cgMLST predicted serovar. If all antigens are found, but an antigen serovar is not found then the serovar is assigned a pseudo-antigenic formula (Serogroup:H1:H2), otherwise the serovar is assigned the cgMLST prediction. If the antigen predicted serovar does not match the cgMLST predicted serovar, - the serovar is the cgMLST serovar if the cgMLST cluster level is <= 0.1 (10% or less) - otherwise, the serovar is antigen predicted serovar(s) Args: serovar_prediction (src.serovar_prediction.SerovarPrediction): Serovar prediction results (antigen+cgMLST[+Mash]) antigen_predictor (src.serovar_prediction.SerovarPredictor): Antigen search results Returns: src.serovar_prediction.SerovarPrediction: Serovar prediction results with overall prediction from antigen + cgMLST ] assert[call[name[isinstance], parameter[name[serovar_prediction], name[SerovarPrediction]]]] assert[call[name[isinstance], parameter[name[antigen_predictor], name[SerovarPredictor]]]] variable[h1] assign[=] name[antigen_predictor].h1 variable[h2] assign[=] name[antigen_predictor].h2 variable[sg] assign[=] name[antigen_predictor].serogroup variable[spp] assign[=] name[serovar_prediction].cgmlst_subspecies if compare[name[spp] is constant[None]] begin[:] if compare[constant[mash_match] in name[serovar_prediction].__dict__] begin[:] variable[spp] assign[=] call[name[serovar_prediction].__dict__][constant[mash_subspecies]] name[serovar_prediction].serovar_antigen assign[=] name[antigen_predictor].serovar variable[cgmlst_serovar] assign[=] name[serovar_prediction].serovar_cgmlst variable[cgmlst_distance] assign[=] call[name[float], parameter[name[serovar_prediction].cgmlst_distance]] variable[null_result] assign[=] constant[-:-:-] <ast.Try object at 0x7da1b1a7f250> variable[is_antigen_null] assign[=] <ast.Lambda object at 0x7da1b1a7ef80> if compare[name[antigen_predictor].serovar is constant[None]] begin[:] if <ast.BoolOp object at 0x7da1b1a7dea0> begin[:] if compare[name[spp_roman] is_not constant[None]] begin[:] name[serovar_prediction].serovar assign[=] call[constant[{} {}:{}:{}].format, parameter[name[spp_roman], name[sg], name[h1], name[h2]]] if compare[name[serovar_prediction].h1 is constant[None]] begin[:] name[serovar_prediction].h1 assign[=] constant[-] if compare[name[serovar_prediction].h2 is constant[None]] begin[:] name[serovar_prediction].h2 assign[=] constant[-] if compare[name[serovar_prediction].serogroup is constant[None]] begin[:] name[serovar_prediction].serogroup assign[=] constant[-] if compare[name[serovar_prediction].serovar_antigen is constant[None]] begin[:] if compare[name[spp_roman] is_not constant[None]] begin[:] name[serovar_prediction].serovar_antigen assign[=] call[constant[{} -:-:-].format, parameter[name[spp_roman]]] if compare[name[serovar_prediction].serovar is constant[None]] begin[:] name[serovar_prediction].serovar assign[=] name[serovar_prediction].serovar_antigen return[name[serovar_prediction]]
keyword[def] identifier[overall_serovar_call] ( identifier[serovar_prediction] , identifier[antigen_predictor] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[serovar_prediction] , identifier[SerovarPrediction] ) keyword[assert] identifier[isinstance] ( identifier[antigen_predictor] , identifier[SerovarPredictor] ) identifier[h1] = identifier[antigen_predictor] . identifier[h1] identifier[h2] = identifier[antigen_predictor] . identifier[h2] identifier[sg] = identifier[antigen_predictor] . identifier[serogroup] identifier[spp] = identifier[serovar_prediction] . identifier[cgmlst_subspecies] keyword[if] identifier[spp] keyword[is] keyword[None] : keyword[if] literal[string] keyword[in] identifier[serovar_prediction] . identifier[__dict__] : identifier[spp] = identifier[serovar_prediction] . identifier[__dict__] [ literal[string] ] identifier[serovar_prediction] . identifier[serovar_antigen] = identifier[antigen_predictor] . identifier[serovar] identifier[cgmlst_serovar] = identifier[serovar_prediction] . identifier[serovar_cgmlst] identifier[cgmlst_distance] = identifier[float] ( identifier[serovar_prediction] . identifier[cgmlst_distance] ) identifier[null_result] = literal[string] keyword[try] : identifier[spp_roman] = identifier[spp_name_to_roman] [ identifier[spp] ] keyword[except] : identifier[spp_roman] = keyword[None] identifier[is_antigen_null] = keyword[lambda] identifier[x] :( identifier[x] keyword[is] keyword[None] keyword[or] identifier[x] == literal[string] keyword[or] identifier[x] == literal[string] ) keyword[if] identifier[antigen_predictor] . identifier[serovar] keyword[is] keyword[None] : keyword[if] identifier[is_antigen_null] ( identifier[sg] ) keyword[and] identifier[is_antigen_null] ( identifier[h1] ) keyword[and] identifier[is_antigen_null] ( identifier[h2] ): keyword[if] identifier[spp_roman] keyword[is] keyword[not] keyword[None] : identifier[serovar_prediction] . identifier[serovar] = literal[string] . identifier[format] ( identifier[spp_roman] , identifier[sg] , identifier[h1] , identifier[h2] ) keyword[else] : identifier[serovar_prediction] . identifier[serovar] = literal[string] . identifier[format] ( identifier[spp_roman] , identifier[sg] , identifier[h1] , identifier[h2] ) keyword[elif] identifier[cgmlst_serovar] keyword[is] keyword[not] keyword[None] keyword[and] identifier[cgmlst_distance] <= identifier[CGMLST_DISTANCE_THRESHOLD] : identifier[serovar_prediction] . identifier[serovar] = identifier[cgmlst_serovar] keyword[else] : identifier[serovar_prediction] . identifier[serovar] = identifier[null_result] keyword[if] literal[string] keyword[in] identifier[serovar_prediction] . identifier[__dict__] : identifier[spd] = identifier[serovar_prediction] . identifier[__dict__] identifier[mash_dist] = identifier[float] ( identifier[spd] [ literal[string] ]) keyword[if] identifier[mash_dist] <= identifier[MASH_DISTANCE_THRESHOLD] : identifier[serovar_prediction] . identifier[serovar] = identifier[spd] [ literal[string] ] keyword[else] : identifier[serovars_from_antigen] = identifier[antigen_predictor] . identifier[serovar] . identifier[split] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[serovars_from_antigen] , identifier[list] ): identifier[serovars_from_antigen] =[ identifier[serovars_from_antigen] ] keyword[if] identifier[cgmlst_serovar] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[cgmlst_serovar] keyword[in] identifier[serovars_from_antigen] : identifier[serovar_prediction] . identifier[serovar] = identifier[cgmlst_serovar] keyword[else] : keyword[if] identifier[float] ( identifier[cgmlst_distance] )<= identifier[CGMLST_DISTANCE_THRESHOLD] : identifier[serovar_prediction] . identifier[serovar] = identifier[cgmlst_serovar] keyword[elif] literal[string] keyword[in] identifier[serovar_prediction] . identifier[__dict__] : identifier[spd] = identifier[serovar_prediction] . identifier[__dict__] identifier[mash_serovar] = identifier[spd] [ literal[string] ] identifier[mash_dist] = identifier[float] ( identifier[spd] [ literal[string] ]) keyword[if] identifier[mash_serovar] keyword[in] identifier[serovars_from_antigen] : identifier[serovar_prediction] . identifier[serovar] = identifier[mash_serovar] keyword[else] : keyword[if] identifier[mash_dist] <= identifier[MASH_DISTANCE_THRESHOLD] : identifier[serovar_prediction] . identifier[serovar] = identifier[mash_serovar] keyword[if] identifier[serovar_prediction] . identifier[serovar] keyword[is] keyword[None] : identifier[serovar_prediction] . identifier[serovar] = identifier[serovar_prediction] . identifier[serovar_antigen] keyword[if] identifier[serovar_prediction] . identifier[h1] keyword[is] keyword[None] : identifier[serovar_prediction] . identifier[h1] = literal[string] keyword[if] identifier[serovar_prediction] . identifier[h2] keyword[is] keyword[None] : identifier[serovar_prediction] . identifier[h2] = literal[string] keyword[if] identifier[serovar_prediction] . identifier[serogroup] keyword[is] keyword[None] : identifier[serovar_prediction] . identifier[serogroup] = literal[string] keyword[if] identifier[serovar_prediction] . identifier[serovar_antigen] keyword[is] keyword[None] : keyword[if] identifier[spp_roman] keyword[is] keyword[not] keyword[None] : identifier[serovar_prediction] . identifier[serovar_antigen] = literal[string] . identifier[format] ( identifier[spp_roman] ) keyword[else] : identifier[serovar_prediction] . identifier[serovar_antigen] = literal[string] keyword[if] identifier[serovar_prediction] . identifier[serovar] keyword[is] keyword[None] : identifier[serovar_prediction] . identifier[serovar] = identifier[serovar_prediction] . identifier[serovar_antigen] keyword[return] identifier[serovar_prediction]
def overall_serovar_call(serovar_prediction, antigen_predictor): """ Predict serovar from cgMLST cluster membership analysis and antigen BLAST results. SerovarPrediction object is assigned H1, H2 and Serogroup from the antigen BLAST results. Antigen BLAST results will predict a particular serovar or list of serovars, however, the cgMLST membership may be able to help narrow down the list of potential serovars. Notes: If the cgMLST predicted serovar is within the list of antigen BLAST predicted serovars, then the serovar is assigned the cgMLST predicted serovar. If all antigens are found, but an antigen serovar is not found then the serovar is assigned a pseudo-antigenic formula (Serogroup:H1:H2), otherwise the serovar is assigned the cgMLST prediction. If the antigen predicted serovar does not match the cgMLST predicted serovar, - the serovar is the cgMLST serovar if the cgMLST cluster level is <= 0.1 (10% or less) - otherwise, the serovar is antigen predicted serovar(s) Args: serovar_prediction (src.serovar_prediction.SerovarPrediction): Serovar prediction results (antigen+cgMLST[+Mash]) antigen_predictor (src.serovar_prediction.SerovarPredictor): Antigen search results Returns: src.serovar_prediction.SerovarPrediction: Serovar prediction results with overall prediction from antigen + cgMLST """ assert isinstance(serovar_prediction, SerovarPrediction) assert isinstance(antigen_predictor, SerovarPredictor) h1 = antigen_predictor.h1 h2 = antigen_predictor.h2 sg = antigen_predictor.serogroup spp = serovar_prediction.cgmlst_subspecies if spp is None: if 'mash_match' in serovar_prediction.__dict__: spp = serovar_prediction.__dict__['mash_subspecies'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['spp']] serovar_prediction.serovar_antigen = antigen_predictor.serovar cgmlst_serovar = serovar_prediction.serovar_cgmlst cgmlst_distance = float(serovar_prediction.cgmlst_distance) null_result = '-:-:-' try: spp_roman = spp_name_to_roman[spp] # depends on [control=['try'], data=[]] except: spp_roman = None # depends on [control=['except'], data=[]] is_antigen_null = lambda x: x is None or x == '' or x == '-' if antigen_predictor.serovar is None: if is_antigen_null(sg) and is_antigen_null(h1) and is_antigen_null(h2): if spp_roman is not None: serovar_prediction.serovar = '{} {}:{}:{}'.format(spp_roman, sg, h1, h2) # depends on [control=['if'], data=['spp_roman']] else: serovar_prediction.serovar = '{}:{}:{}'.format(spp_roman, sg, h1, h2) # depends on [control=['if'], data=[]] elif cgmlst_serovar is not None and cgmlst_distance <= CGMLST_DISTANCE_THRESHOLD: serovar_prediction.serovar = cgmlst_serovar # depends on [control=['if'], data=[]] else: serovar_prediction.serovar = null_result if 'mash_match' in serovar_prediction.__dict__: spd = serovar_prediction.__dict__ mash_dist = float(spd['mash_distance']) if mash_dist <= MASH_DISTANCE_THRESHOLD: serovar_prediction.serovar = spd['mash_serovar'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: serovars_from_antigen = antigen_predictor.serovar.split('|') if not isinstance(serovars_from_antigen, list): serovars_from_antigen = [serovars_from_antigen] # depends on [control=['if'], data=[]] if cgmlst_serovar is not None: if cgmlst_serovar in serovars_from_antigen: serovar_prediction.serovar = cgmlst_serovar # depends on [control=['if'], data=['cgmlst_serovar']] elif float(cgmlst_distance) <= CGMLST_DISTANCE_THRESHOLD: serovar_prediction.serovar = cgmlst_serovar # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['cgmlst_serovar']] elif 'mash_match' in serovar_prediction.__dict__: spd = serovar_prediction.__dict__ mash_serovar = spd['mash_serovar'] mash_dist = float(spd['mash_distance']) if mash_serovar in serovars_from_antigen: serovar_prediction.serovar = mash_serovar # depends on [control=['if'], data=['mash_serovar']] elif mash_dist <= MASH_DISTANCE_THRESHOLD: serovar_prediction.serovar = mash_serovar # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if serovar_prediction.serovar is None: serovar_prediction.serovar = serovar_prediction.serovar_antigen # depends on [control=['if'], data=[]] if serovar_prediction.h1 is None: serovar_prediction.h1 = '-' # depends on [control=['if'], data=[]] if serovar_prediction.h2 is None: serovar_prediction.h2 = '-' # depends on [control=['if'], data=[]] if serovar_prediction.serogroup is None: serovar_prediction.serogroup = '-' # depends on [control=['if'], data=[]] if serovar_prediction.serovar_antigen is None: if spp_roman is not None: serovar_prediction.serovar_antigen = '{} -:-:-'.format(spp_roman) # depends on [control=['if'], data=['spp_roman']] else: serovar_prediction.serovar_antigen = '-:-:-' # depends on [control=['if'], data=[]] if serovar_prediction.serovar is None: serovar_prediction.serovar = serovar_prediction.serovar_antigen # depends on [control=['if'], data=[]] return serovar_prediction
def find_all(self, string, callback): """ Wrapper on iter method, callback gets an iterator result """ for index, output in self.iter(string): callback(index, output)
def function[find_all, parameter[self, string, callback]]: constant[ Wrapper on iter method, callback gets an iterator result ] for taget[tuple[[<ast.Name object at 0x7da18ede6290>, <ast.Name object at 0x7da18ede44c0>]]] in starred[call[name[self].iter, parameter[name[string]]]] begin[:] call[name[callback], parameter[name[index], name[output]]]
keyword[def] identifier[find_all] ( identifier[self] , identifier[string] , identifier[callback] ): literal[string] keyword[for] identifier[index] , identifier[output] keyword[in] identifier[self] . identifier[iter] ( identifier[string] ): identifier[callback] ( identifier[index] , identifier[output] )
def find_all(self, string, callback): """ Wrapper on iter method, callback gets an iterator result """ for (index, output) in self.iter(string): callback(index, output) # depends on [control=['for'], data=[]]
def immediateAspects(self, ID, aspList): """ Returns the last separation and next application considering a list of possible aspects. """ asps = self.aspectsByCat(ID, aspList) applications = asps[const.APPLICATIVE] separations = asps[const.SEPARATIVE] exact = asps[const.EXACT] # Get applications and separations sorted by orb applications = applications + [val for val in exact if val['orb'] >= 0] applications = sorted(applications, key=lambda var: var['orb']) separations = sorted(separations, key=lambda var: var['orb']) return ( separations[0] if separations else None, applications[0] if applications else None )
def function[immediateAspects, parameter[self, ID, aspList]]: constant[ Returns the last separation and next application considering a list of possible aspects. ] variable[asps] assign[=] call[name[self].aspectsByCat, parameter[name[ID], name[aspList]]] variable[applications] assign[=] call[name[asps]][name[const].APPLICATIVE] variable[separations] assign[=] call[name[asps]][name[const].SEPARATIVE] variable[exact] assign[=] call[name[asps]][name[const].EXACT] variable[applications] assign[=] binary_operation[name[applications] + <ast.ListComp object at 0x7da1b11a3460>] variable[applications] assign[=] call[name[sorted], parameter[name[applications]]] variable[separations] assign[=] call[name[sorted], parameter[name[separations]]] return[tuple[[<ast.IfExp object at 0x7da1b0f71cc0>, <ast.IfExp object at 0x7da1b0f71c30>]]]
keyword[def] identifier[immediateAspects] ( identifier[self] , identifier[ID] , identifier[aspList] ): literal[string] identifier[asps] = identifier[self] . identifier[aspectsByCat] ( identifier[ID] , identifier[aspList] ) identifier[applications] = identifier[asps] [ identifier[const] . identifier[APPLICATIVE] ] identifier[separations] = identifier[asps] [ identifier[const] . identifier[SEPARATIVE] ] identifier[exact] = identifier[asps] [ identifier[const] . identifier[EXACT] ] identifier[applications] = identifier[applications] +[ identifier[val] keyword[for] identifier[val] keyword[in] identifier[exact] keyword[if] identifier[val] [ literal[string] ]>= literal[int] ] identifier[applications] = identifier[sorted] ( identifier[applications] , identifier[key] = keyword[lambda] identifier[var] : identifier[var] [ literal[string] ]) identifier[separations] = identifier[sorted] ( identifier[separations] , identifier[key] = keyword[lambda] identifier[var] : identifier[var] [ literal[string] ]) keyword[return] ( identifier[separations] [ literal[int] ] keyword[if] identifier[separations] keyword[else] keyword[None] , identifier[applications] [ literal[int] ] keyword[if] identifier[applications] keyword[else] keyword[None] )
def immediateAspects(self, ID, aspList): """ Returns the last separation and next application considering a list of possible aspects. """ asps = self.aspectsByCat(ID, aspList) applications = asps[const.APPLICATIVE] separations = asps[const.SEPARATIVE] exact = asps[const.EXACT] # Get applications and separations sorted by orb applications = applications + [val for val in exact if val['orb'] >= 0] applications = sorted(applications, key=lambda var: var['orb']) separations = sorted(separations, key=lambda var: var['orb']) return (separations[0] if separations else None, applications[0] if applications else None)
def get_mesh_name(mesh_id, offline=False): """Get the MESH label for the given MESH ID. Uses the mappings table in `indra/resources`; if the MESH ID is not listed there, falls back on the NLM REST API. Parameters ---------- mesh_id : str MESH Identifier, e.g. 'D003094'. offline : bool Whether to allow queries to the NLM REST API if the given MESH ID is not contained in INDRA's internal MESH mappings file. Default is False (allows REST API queries). Returns ------- str Label for the MESH ID, or None if the query failed or no label was found. """ indra_mesh_mapping = mesh_id_to_name.get(mesh_id) if offline or indra_mesh_mapping is not None: return indra_mesh_mapping # Look up the MESH mapping from NLM if we don't have it locally return get_mesh_name_from_web(mesh_id)
def function[get_mesh_name, parameter[mesh_id, offline]]: constant[Get the MESH label for the given MESH ID. Uses the mappings table in `indra/resources`; if the MESH ID is not listed there, falls back on the NLM REST API. Parameters ---------- mesh_id : str MESH Identifier, e.g. 'D003094'. offline : bool Whether to allow queries to the NLM REST API if the given MESH ID is not contained in INDRA's internal MESH mappings file. Default is False (allows REST API queries). Returns ------- str Label for the MESH ID, or None if the query failed or no label was found. ] variable[indra_mesh_mapping] assign[=] call[name[mesh_id_to_name].get, parameter[name[mesh_id]]] if <ast.BoolOp object at 0x7da18dc9bbb0> begin[:] return[name[indra_mesh_mapping]] return[call[name[get_mesh_name_from_web], parameter[name[mesh_id]]]]
keyword[def] identifier[get_mesh_name] ( identifier[mesh_id] , identifier[offline] = keyword[False] ): literal[string] identifier[indra_mesh_mapping] = identifier[mesh_id_to_name] . identifier[get] ( identifier[mesh_id] ) keyword[if] identifier[offline] keyword[or] identifier[indra_mesh_mapping] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[indra_mesh_mapping] keyword[return] identifier[get_mesh_name_from_web] ( identifier[mesh_id] )
def get_mesh_name(mesh_id, offline=False): """Get the MESH label for the given MESH ID. Uses the mappings table in `indra/resources`; if the MESH ID is not listed there, falls back on the NLM REST API. Parameters ---------- mesh_id : str MESH Identifier, e.g. 'D003094'. offline : bool Whether to allow queries to the NLM REST API if the given MESH ID is not contained in INDRA's internal MESH mappings file. Default is False (allows REST API queries). Returns ------- str Label for the MESH ID, or None if the query failed or no label was found. """ indra_mesh_mapping = mesh_id_to_name.get(mesh_id) if offline or indra_mesh_mapping is not None: return indra_mesh_mapping # depends on [control=['if'], data=[]] # Look up the MESH mapping from NLM if we don't have it locally return get_mesh_name_from_web(mesh_id)
def get_cache_key(brain_or_object): """Generate a cache key for a common brain or object :param brain_or_object: A single catalog brain or content object :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain :returns: Cache Key :rtype: str """ key = [ get_portal_type(brain_or_object), get_id(brain_or_object), get_uid(brain_or_object), # handle different domains gracefully get_url(brain_or_object), # Return the microsecond since the epoch in GMT get_modification_date(brain_or_object).micros(), ] return "-".join(map(lambda x: str(x), key))
def function[get_cache_key, parameter[brain_or_object]]: constant[Generate a cache key for a common brain or object :param brain_or_object: A single catalog brain or content object :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain :returns: Cache Key :rtype: str ] variable[key] assign[=] list[[<ast.Call object at 0x7da2047ea770>, <ast.Call object at 0x7da2047ea1d0>, <ast.Call object at 0x7da2047ea620>, <ast.Call object at 0x7da2047eb4f0>, <ast.Call object at 0x7da2047ebac0>]] return[call[constant[-].join, parameter[call[name[map], parameter[<ast.Lambda object at 0x7da204963f40>, name[key]]]]]]
keyword[def] identifier[get_cache_key] ( identifier[brain_or_object] ): literal[string] identifier[key] =[ identifier[get_portal_type] ( identifier[brain_or_object] ), identifier[get_id] ( identifier[brain_or_object] ), identifier[get_uid] ( identifier[brain_or_object] ), identifier[get_url] ( identifier[brain_or_object] ), identifier[get_modification_date] ( identifier[brain_or_object] ). identifier[micros] (), ] keyword[return] literal[string] . identifier[join] ( identifier[map] ( keyword[lambda] identifier[x] : identifier[str] ( identifier[x] ), identifier[key] ))
def get_cache_key(brain_or_object): """Generate a cache key for a common brain or object :param brain_or_object: A single catalog brain or content object :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain :returns: Cache Key :rtype: str """ # handle different domains gracefully # Return the microsecond since the epoch in GMT key = [get_portal_type(brain_or_object), get_id(brain_or_object), get_uid(brain_or_object), get_url(brain_or_object), get_modification_date(brain_or_object).micros()] return '-'.join(map(lambda x: str(x), key))
def _ParsePage(self, parser_mediator, file_offset, page_data): """Parses a page. Args: parser_mediator (ParserMediator): parser mediator. file_offset (int): offset of the data relative from the start of the file-like object. page_data (bytes): page data. Raises: ParseError: when the page cannot be parsed. """ page_header_map = self._GetDataTypeMap('binarycookies_page_header') try: page_header = self._ReadStructureFromByteStream( page_data, file_offset, page_header_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError(( 'Unable to map page header data at offset: 0x{0:08x} with error: ' '{1!s}').format(file_offset, exception)) for record_offset in page_header.offsets: if parser_mediator.abort: break self._ParseRecord(parser_mediator, page_data, record_offset)
def function[_ParsePage, parameter[self, parser_mediator, file_offset, page_data]]: constant[Parses a page. Args: parser_mediator (ParserMediator): parser mediator. file_offset (int): offset of the data relative from the start of the file-like object. page_data (bytes): page data. Raises: ParseError: when the page cannot be parsed. ] variable[page_header_map] assign[=] call[name[self]._GetDataTypeMap, parameter[constant[binarycookies_page_header]]] <ast.Try object at 0x7da18ede43d0> for taget[name[record_offset]] in starred[name[page_header].offsets] begin[:] if name[parser_mediator].abort begin[:] break call[name[self]._ParseRecord, parameter[name[parser_mediator], name[page_data], name[record_offset]]]
keyword[def] identifier[_ParsePage] ( identifier[self] , identifier[parser_mediator] , identifier[file_offset] , identifier[page_data] ): literal[string] identifier[page_header_map] = identifier[self] . identifier[_GetDataTypeMap] ( literal[string] ) keyword[try] : identifier[page_header] = identifier[self] . identifier[_ReadStructureFromByteStream] ( identifier[page_data] , identifier[file_offset] , identifier[page_header_map] ) keyword[except] ( identifier[ValueError] , identifier[errors] . identifier[ParseError] ) keyword[as] identifier[exception] : keyword[raise] identifier[errors] . identifier[ParseError] (( literal[string] literal[string] ). identifier[format] ( identifier[file_offset] , identifier[exception] )) keyword[for] identifier[record_offset] keyword[in] identifier[page_header] . identifier[offsets] : keyword[if] identifier[parser_mediator] . identifier[abort] : keyword[break] identifier[self] . identifier[_ParseRecord] ( identifier[parser_mediator] , identifier[page_data] , identifier[record_offset] )
def _ParsePage(self, parser_mediator, file_offset, page_data): """Parses a page. Args: parser_mediator (ParserMediator): parser mediator. file_offset (int): offset of the data relative from the start of the file-like object. page_data (bytes): page data. Raises: ParseError: when the page cannot be parsed. """ page_header_map = self._GetDataTypeMap('binarycookies_page_header') try: page_header = self._ReadStructureFromByteStream(page_data, file_offset, page_header_map) # depends on [control=['try'], data=[]] except (ValueError, errors.ParseError) as exception: raise errors.ParseError('Unable to map page header data at offset: 0x{0:08x} with error: {1!s}'.format(file_offset, exception)) # depends on [control=['except'], data=['exception']] for record_offset in page_header.offsets: if parser_mediator.abort: break # depends on [control=['if'], data=[]] self._ParseRecord(parser_mediator, page_data, record_offset) # depends on [control=['for'], data=['record_offset']]
def CompareStores(self): """Compares the contents of two stores. Returns: bool: True if the content of the stores is identical. """ storage_reader = storage_factory.StorageFactory.CreateStorageReaderForFile( self._storage_file_path) if not storage_reader: logger.error( 'Format of storage file: {0:s} not supported'.format( self._storage_file_path)) return False compare_storage_reader = ( storage_factory.StorageFactory.CreateStorageReaderForFile( self._compare_storage_file_path)) if not compare_storage_reader: logger.error( 'Format of storage file: {0:s} not supported'.format( self._compare_storage_file_path)) return False try: result = self._CompareStores(storage_reader, compare_storage_reader) finally: compare_storage_reader.Close() storage_reader.Close() if result: self._output_writer.Write('Storage files are identical.\n') else: self._output_writer.Write('Storage files are different.\n') return result
def function[CompareStores, parameter[self]]: constant[Compares the contents of two stores. Returns: bool: True if the content of the stores is identical. ] variable[storage_reader] assign[=] call[name[storage_factory].StorageFactory.CreateStorageReaderForFile, parameter[name[self]._storage_file_path]] if <ast.UnaryOp object at 0x7da20c6a8ee0> begin[:] call[name[logger].error, parameter[call[constant[Format of storage file: {0:s} not supported].format, parameter[name[self]._storage_file_path]]]] return[constant[False]] variable[compare_storage_reader] assign[=] call[name[storage_factory].StorageFactory.CreateStorageReaderForFile, parameter[name[self]._compare_storage_file_path]] if <ast.UnaryOp object at 0x7da20c6a9600> begin[:] call[name[logger].error, parameter[call[constant[Format of storage file: {0:s} not supported].format, parameter[name[self]._compare_storage_file_path]]]] return[constant[False]] <ast.Try object at 0x7da20c6a8fd0> if name[result] begin[:] call[name[self]._output_writer.Write, parameter[constant[Storage files are identical. ]]] return[name[result]]
keyword[def] identifier[CompareStores] ( identifier[self] ): literal[string] identifier[storage_reader] = identifier[storage_factory] . identifier[StorageFactory] . identifier[CreateStorageReaderForFile] ( identifier[self] . identifier[_storage_file_path] ) keyword[if] keyword[not] identifier[storage_reader] : identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[self] . identifier[_storage_file_path] )) keyword[return] keyword[False] identifier[compare_storage_reader] =( identifier[storage_factory] . identifier[StorageFactory] . identifier[CreateStorageReaderForFile] ( identifier[self] . identifier[_compare_storage_file_path] )) keyword[if] keyword[not] identifier[compare_storage_reader] : identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[self] . identifier[_compare_storage_file_path] )) keyword[return] keyword[False] keyword[try] : identifier[result] = identifier[self] . identifier[_CompareStores] ( identifier[storage_reader] , identifier[compare_storage_reader] ) keyword[finally] : identifier[compare_storage_reader] . identifier[Close] () identifier[storage_reader] . identifier[Close] () keyword[if] identifier[result] : identifier[self] . identifier[_output_writer] . identifier[Write] ( literal[string] ) keyword[else] : identifier[self] . identifier[_output_writer] . identifier[Write] ( literal[string] ) keyword[return] identifier[result]
def CompareStores(self): """Compares the contents of two stores. Returns: bool: True if the content of the stores is identical. """ storage_reader = storage_factory.StorageFactory.CreateStorageReaderForFile(self._storage_file_path) if not storage_reader: logger.error('Format of storage file: {0:s} not supported'.format(self._storage_file_path)) return False # depends on [control=['if'], data=[]] compare_storage_reader = storage_factory.StorageFactory.CreateStorageReaderForFile(self._compare_storage_file_path) if not compare_storage_reader: logger.error('Format of storage file: {0:s} not supported'.format(self._compare_storage_file_path)) return False # depends on [control=['if'], data=[]] try: result = self._CompareStores(storage_reader, compare_storage_reader) # depends on [control=['try'], data=[]] finally: compare_storage_reader.Close() storage_reader.Close() if result: self._output_writer.Write('Storage files are identical.\n') # depends on [control=['if'], data=[]] else: self._output_writer.Write('Storage files are different.\n') return result
def round_geom(geom, precision=None): """Round coordinates of a geometric object to given precision.""" if geom['type'] == 'Point': x, y = geom['coordinates'] xp, yp = [x], [y] if precision is not None: xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] new_coords = tuple(zip(xp, yp))[0] if geom['type'] in ['LineString', 'MultiPoint']: xp, yp = zip(*geom['coordinates']) if precision is not None: xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] new_coords = tuple(zip(xp, yp)) elif geom['type'] in ['Polygon', 'MultiLineString']: new_coords = [] for piece in geom['coordinates']: xp, yp = zip(*piece) if precision is not None: xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] new_coords.append(tuple(zip(xp, yp))) elif geom['type'] == 'MultiPolygon': parts = geom['coordinates'] new_coords = [] for part in parts: inner_coords = [] for ring in part: xp, yp = zip(*ring) if precision is not None: xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] inner_coords.append(tuple(zip(xp, yp))) new_coords.append(inner_coords) return {'type': geom['type'], 'coordinates': new_coords}
def function[round_geom, parameter[geom, precision]]: constant[Round coordinates of a geometric object to given precision.] if compare[call[name[geom]][constant[type]] equal[==] constant[Point]] begin[:] <ast.Tuple object at 0x7da1b0f73c70> assign[=] call[name[geom]][constant[coordinates]] <ast.Tuple object at 0x7da1b0f73b20> assign[=] tuple[[<ast.List object at 0x7da1b0f73a60>, <ast.List object at 0x7da1b0f3a770>]] if compare[name[precision] is_not constant[None]] begin[:] variable[xp] assign[=] <ast.ListComp object at 0x7da1b0f396c0> variable[yp] assign[=] <ast.ListComp object at 0x7da1b0f40fa0> variable[new_coords] assign[=] call[call[name[tuple], parameter[call[name[zip], parameter[name[xp], name[yp]]]]]][constant[0]] if compare[call[name[geom]][constant[type]] in list[[<ast.Constant object at 0x7da1b0f738e0>, <ast.Constant object at 0x7da1b0f738b0>]]] begin[:] <ast.Tuple object at 0x7da1b0f73850> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b0f73760>]] if compare[name[precision] is_not constant[None]] begin[:] variable[xp] assign[=] <ast.ListComp object at 0x7da1b0f73580> variable[yp] assign[=] <ast.ListComp object at 0x7da1b0f73370> variable[new_coords] assign[=] call[name[tuple], parameter[call[name[zip], parameter[name[xp], name[yp]]]]] return[dictionary[[<ast.Constant object at 0x7da1b0f71690>, <ast.Constant object at 0x7da1b0f71660>], [<ast.Subscript object at 0x7da1b0f71630>, <ast.Name object at 0x7da1b0f715a0>]]]
keyword[def] identifier[round_geom] ( identifier[geom] , identifier[precision] = keyword[None] ): literal[string] keyword[if] identifier[geom] [ literal[string] ]== literal[string] : identifier[x] , identifier[y] = identifier[geom] [ literal[string] ] identifier[xp] , identifier[yp] =[ identifier[x] ],[ identifier[y] ] keyword[if] identifier[precision] keyword[is] keyword[not] keyword[None] : identifier[xp] =[ identifier[round] ( identifier[v] , identifier[precision] ) keyword[for] identifier[v] keyword[in] identifier[xp] ] identifier[yp] =[ identifier[round] ( identifier[v] , identifier[precision] ) keyword[for] identifier[v] keyword[in] identifier[yp] ] identifier[new_coords] = identifier[tuple] ( identifier[zip] ( identifier[xp] , identifier[yp] ))[ literal[int] ] keyword[if] identifier[geom] [ literal[string] ] keyword[in] [ literal[string] , literal[string] ]: identifier[xp] , identifier[yp] = identifier[zip] (* identifier[geom] [ literal[string] ]) keyword[if] identifier[precision] keyword[is] keyword[not] keyword[None] : identifier[xp] =[ identifier[round] ( identifier[v] , identifier[precision] ) keyword[for] identifier[v] keyword[in] identifier[xp] ] identifier[yp] =[ identifier[round] ( identifier[v] , identifier[precision] ) keyword[for] identifier[v] keyword[in] identifier[yp] ] identifier[new_coords] = identifier[tuple] ( identifier[zip] ( identifier[xp] , identifier[yp] )) keyword[elif] identifier[geom] [ literal[string] ] keyword[in] [ literal[string] , literal[string] ]: identifier[new_coords] =[] keyword[for] identifier[piece] keyword[in] identifier[geom] [ literal[string] ]: identifier[xp] , identifier[yp] = identifier[zip] (* identifier[piece] ) keyword[if] identifier[precision] keyword[is] keyword[not] keyword[None] : identifier[xp] =[ identifier[round] ( identifier[v] , identifier[precision] ) keyword[for] identifier[v] keyword[in] identifier[xp] ] identifier[yp] =[ identifier[round] ( identifier[v] , identifier[precision] ) keyword[for] identifier[v] keyword[in] identifier[yp] ] identifier[new_coords] . identifier[append] ( identifier[tuple] ( identifier[zip] ( identifier[xp] , identifier[yp] ))) keyword[elif] identifier[geom] [ literal[string] ]== literal[string] : identifier[parts] = identifier[geom] [ literal[string] ] identifier[new_coords] =[] keyword[for] identifier[part] keyword[in] identifier[parts] : identifier[inner_coords] =[] keyword[for] identifier[ring] keyword[in] identifier[part] : identifier[xp] , identifier[yp] = identifier[zip] (* identifier[ring] ) keyword[if] identifier[precision] keyword[is] keyword[not] keyword[None] : identifier[xp] =[ identifier[round] ( identifier[v] , identifier[precision] ) keyword[for] identifier[v] keyword[in] identifier[xp] ] identifier[yp] =[ identifier[round] ( identifier[v] , identifier[precision] ) keyword[for] identifier[v] keyword[in] identifier[yp] ] identifier[inner_coords] . identifier[append] ( identifier[tuple] ( identifier[zip] ( identifier[xp] , identifier[yp] ))) identifier[new_coords] . identifier[append] ( identifier[inner_coords] ) keyword[return] { literal[string] : identifier[geom] [ literal[string] ], literal[string] : identifier[new_coords] }
def round_geom(geom, precision=None): """Round coordinates of a geometric object to given precision.""" if geom['type'] == 'Point': (x, y) = geom['coordinates'] (xp, yp) = ([x], [y]) if precision is not None: xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] # depends on [control=['if'], data=['precision']] new_coords = tuple(zip(xp, yp))[0] # depends on [control=['if'], data=[]] if geom['type'] in ['LineString', 'MultiPoint']: (xp, yp) = zip(*geom['coordinates']) if precision is not None: xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] # depends on [control=['if'], data=['precision']] new_coords = tuple(zip(xp, yp)) # depends on [control=['if'], data=[]] elif geom['type'] in ['Polygon', 'MultiLineString']: new_coords = [] for piece in geom['coordinates']: (xp, yp) = zip(*piece) if precision is not None: xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] # depends on [control=['if'], data=['precision']] new_coords.append(tuple(zip(xp, yp))) # depends on [control=['for'], data=['piece']] # depends on [control=['if'], data=[]] elif geom['type'] == 'MultiPolygon': parts = geom['coordinates'] new_coords = [] for part in parts: inner_coords = [] for ring in part: (xp, yp) = zip(*ring) if precision is not None: xp = [round(v, precision) for v in xp] yp = [round(v, precision) for v in yp] # depends on [control=['if'], data=['precision']] inner_coords.append(tuple(zip(xp, yp))) # depends on [control=['for'], data=['ring']] new_coords.append(inner_coords) # depends on [control=['for'], data=['part']] # depends on [control=['if'], data=[]] return {'type': geom['type'], 'coordinates': new_coords}
def stop_loss_replace(self, accountID, orderID, **kwargs): """ Shortcut to replace a pending Stop Loss Order in an Account Args: accountID : The ID of the Account orderID : The ID of the Stop Loss Order to replace kwargs : The arguments to create a StopLossOrderRequest Returns: v20.response.Response containing the results from submitting the request """ return self.replace( accountID, orderID, order=StopLossOrderRequest(**kwargs) )
def function[stop_loss_replace, parameter[self, accountID, orderID]]: constant[ Shortcut to replace a pending Stop Loss Order in an Account Args: accountID : The ID of the Account orderID : The ID of the Stop Loss Order to replace kwargs : The arguments to create a StopLossOrderRequest Returns: v20.response.Response containing the results from submitting the request ] return[call[name[self].replace, parameter[name[accountID], name[orderID]]]]
keyword[def] identifier[stop_loss_replace] ( identifier[self] , identifier[accountID] , identifier[orderID] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[self] . identifier[replace] ( identifier[accountID] , identifier[orderID] , identifier[order] = identifier[StopLossOrderRequest] (** identifier[kwargs] ) )
def stop_loss_replace(self, accountID, orderID, **kwargs): """ Shortcut to replace a pending Stop Loss Order in an Account Args: accountID : The ID of the Account orderID : The ID of the Stop Loss Order to replace kwargs : The arguments to create a StopLossOrderRequest Returns: v20.response.Response containing the results from submitting the request """ return self.replace(accountID, orderID, order=StopLossOrderRequest(**kwargs))
def pipe_notebook(notebook, command, fmt='py:percent', update=True, preserve_outputs=True): """Pipe the notebook, in the desired representation, to the given command. Update the notebook with the returned content if desired.""" if command in ['black', 'flake8', 'autopep8']: command = command + ' -' fmt = long_form_one_format(fmt, notebook.metadata) text = writes(notebook, fmt) process = subprocess.Popen(command.split(' '), stdout=subprocess.PIPE, stdin=subprocess.PIPE) cmd_output, err = process.communicate(input=text.encode('utf-8')) if process.returncode: sys.stderr.write("Command '{}' exited with code {}: {}" .format(command, process.returncode, err or cmd_output)) raise SystemExit(process.returncode) if not update: return notebook if not cmd_output: sys.stderr.write("[jupytext] The command '{}' had no output. As a result, the notebook is empty. " "Is this expected? If not, use --check rather than --pipe for this command.".format(command)) piped_notebook = reads(cmd_output.decode('utf-8'), fmt) if preserve_outputs: combine_inputs_with_outputs(piped_notebook, notebook, fmt) # Remove jupytext / text_representation entry piped_notebook.metadata.pop('jupytext') if 'jupytext' in notebook.metadata: piped_notebook.metadata['jupytext'] = notebook.metadata['jupytext'] return piped_notebook
def function[pipe_notebook, parameter[notebook, command, fmt, update, preserve_outputs]]: constant[Pipe the notebook, in the desired representation, to the given command. Update the notebook with the returned content if desired.] if compare[name[command] in list[[<ast.Constant object at 0x7da204962650>, <ast.Constant object at 0x7da204962230>, <ast.Constant object at 0x7da204960160>]]] begin[:] variable[command] assign[=] binary_operation[name[command] + constant[ -]] variable[fmt] assign[=] call[name[long_form_one_format], parameter[name[fmt], name[notebook].metadata]] variable[text] assign[=] call[name[writes], parameter[name[notebook], name[fmt]]] variable[process] assign[=] call[name[subprocess].Popen, parameter[call[name[command].split, parameter[constant[ ]]]]] <ast.Tuple object at 0x7da204962350> assign[=] call[name[process].communicate, parameter[]] if name[process].returncode begin[:] call[name[sys].stderr.write, parameter[call[constant[Command '{}' exited with code {}: {}].format, parameter[name[command], name[process].returncode, <ast.BoolOp object at 0x7da2054a40a0>]]]] <ast.Raise object at 0x7da2054a6140> if <ast.UnaryOp object at 0x7da2054a72e0> begin[:] return[name[notebook]] if <ast.UnaryOp object at 0x7da2054a7580> begin[:] call[name[sys].stderr.write, parameter[call[constant[[jupytext] The command '{}' had no output. As a result, the notebook is empty. Is this expected? If not, use --check rather than --pipe for this command.].format, parameter[name[command]]]]] variable[piped_notebook] assign[=] call[name[reads], parameter[call[name[cmd_output].decode, parameter[constant[utf-8]]], name[fmt]]] if name[preserve_outputs] begin[:] call[name[combine_inputs_with_outputs], parameter[name[piped_notebook], name[notebook], name[fmt]]] call[name[piped_notebook].metadata.pop, parameter[constant[jupytext]]] if compare[constant[jupytext] in name[notebook].metadata] begin[:] call[name[piped_notebook].metadata][constant[jupytext]] assign[=] call[name[notebook].metadata][constant[jupytext]] return[name[piped_notebook]]
keyword[def] identifier[pipe_notebook] ( identifier[notebook] , identifier[command] , identifier[fmt] = literal[string] , identifier[update] = keyword[True] , identifier[preserve_outputs] = keyword[True] ): literal[string] keyword[if] identifier[command] keyword[in] [ literal[string] , literal[string] , literal[string] ]: identifier[command] = identifier[command] + literal[string] identifier[fmt] = identifier[long_form_one_format] ( identifier[fmt] , identifier[notebook] . identifier[metadata] ) identifier[text] = identifier[writes] ( identifier[notebook] , identifier[fmt] ) identifier[process] = identifier[subprocess] . identifier[Popen] ( identifier[command] . identifier[split] ( literal[string] ), identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[stdin] = identifier[subprocess] . identifier[PIPE] ) identifier[cmd_output] , identifier[err] = identifier[process] . identifier[communicate] ( identifier[input] = identifier[text] . identifier[encode] ( literal[string] )) keyword[if] identifier[process] . identifier[returncode] : identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] . identifier[format] ( identifier[command] , identifier[process] . identifier[returncode] , identifier[err] keyword[or] identifier[cmd_output] )) keyword[raise] identifier[SystemExit] ( identifier[process] . identifier[returncode] ) keyword[if] keyword[not] identifier[update] : keyword[return] identifier[notebook] keyword[if] keyword[not] identifier[cmd_output] : identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] literal[string] . identifier[format] ( identifier[command] )) identifier[piped_notebook] = identifier[reads] ( identifier[cmd_output] . identifier[decode] ( literal[string] ), identifier[fmt] ) keyword[if] identifier[preserve_outputs] : identifier[combine_inputs_with_outputs] ( identifier[piped_notebook] , identifier[notebook] , identifier[fmt] ) identifier[piped_notebook] . identifier[metadata] . identifier[pop] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[notebook] . identifier[metadata] : identifier[piped_notebook] . identifier[metadata] [ literal[string] ]= identifier[notebook] . identifier[metadata] [ literal[string] ] keyword[return] identifier[piped_notebook]
def pipe_notebook(notebook, command, fmt='py:percent', update=True, preserve_outputs=True): """Pipe the notebook, in the desired representation, to the given command. Update the notebook with the returned content if desired.""" if command in ['black', 'flake8', 'autopep8']: command = command + ' -' # depends on [control=['if'], data=['command']] fmt = long_form_one_format(fmt, notebook.metadata) text = writes(notebook, fmt) process = subprocess.Popen(command.split(' '), stdout=subprocess.PIPE, stdin=subprocess.PIPE) (cmd_output, err) = process.communicate(input=text.encode('utf-8')) if process.returncode: sys.stderr.write("Command '{}' exited with code {}: {}".format(command, process.returncode, err or cmd_output)) raise SystemExit(process.returncode) # depends on [control=['if'], data=[]] if not update: return notebook # depends on [control=['if'], data=[]] if not cmd_output: sys.stderr.write("[jupytext] The command '{}' had no output. As a result, the notebook is empty. Is this expected? If not, use --check rather than --pipe for this command.".format(command)) # depends on [control=['if'], data=[]] piped_notebook = reads(cmd_output.decode('utf-8'), fmt) if preserve_outputs: combine_inputs_with_outputs(piped_notebook, notebook, fmt) # depends on [control=['if'], data=[]] # Remove jupytext / text_representation entry piped_notebook.metadata.pop('jupytext') if 'jupytext' in notebook.metadata: piped_notebook.metadata['jupytext'] = notebook.metadata['jupytext'] # depends on [control=['if'], data=[]] return piped_notebook
def _encrypt_private(self, ret, dictkey, target): ''' The server equivalent of ReqChannel.crypted_transfer_decode_dictentry ''' # encrypt with a specific AES key pubfn = os.path.join(self.opts['pki_dir'], 'minions', target) key = salt.crypt.Crypticle.generate_key_string() pcrypt = salt.crypt.Crypticle( self.opts, key) try: pub = salt.crypt.get_rsa_pub_key(pubfn) except (ValueError, IndexError, TypeError): return self.crypticle.dumps({}) except IOError: log.error('AES key not found') return {'error': 'AES key not found'} pret = {} if not six.PY2: key = salt.utils.stringutils.to_bytes(key) if HAS_M2: pret['key'] = pub.public_encrypt(key, RSA.pkcs1_oaep_padding) else: cipher = PKCS1_OAEP.new(pub) pret['key'] = cipher.encrypt(key) pret[dictkey] = pcrypt.dumps( ret if ret is not False else {} ) return pret
def function[_encrypt_private, parameter[self, ret, dictkey, target]]: constant[ The server equivalent of ReqChannel.crypted_transfer_decode_dictentry ] variable[pubfn] assign[=] call[name[os].path.join, parameter[call[name[self].opts][constant[pki_dir]], constant[minions], name[target]]] variable[key] assign[=] call[name[salt].crypt.Crypticle.generate_key_string, parameter[]] variable[pcrypt] assign[=] call[name[salt].crypt.Crypticle, parameter[name[self].opts, name[key]]] <ast.Try object at 0x7da20c6c6080> variable[pret] assign[=] dictionary[[], []] if <ast.UnaryOp object at 0x7da20c6c7940> begin[:] variable[key] assign[=] call[name[salt].utils.stringutils.to_bytes, parameter[name[key]]] if name[HAS_M2] begin[:] call[name[pret]][constant[key]] assign[=] call[name[pub].public_encrypt, parameter[name[key], name[RSA].pkcs1_oaep_padding]] call[name[pret]][name[dictkey]] assign[=] call[name[pcrypt].dumps, parameter[<ast.IfExp object at 0x7da20c6c42e0>]] return[name[pret]]
keyword[def] identifier[_encrypt_private] ( identifier[self] , identifier[ret] , identifier[dictkey] , identifier[target] ): literal[string] identifier[pubfn] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[opts] [ literal[string] ], literal[string] , identifier[target] ) identifier[key] = identifier[salt] . identifier[crypt] . identifier[Crypticle] . identifier[generate_key_string] () identifier[pcrypt] = identifier[salt] . identifier[crypt] . identifier[Crypticle] ( identifier[self] . identifier[opts] , identifier[key] ) keyword[try] : identifier[pub] = identifier[salt] . identifier[crypt] . identifier[get_rsa_pub_key] ( identifier[pubfn] ) keyword[except] ( identifier[ValueError] , identifier[IndexError] , identifier[TypeError] ): keyword[return] identifier[self] . identifier[crypticle] . identifier[dumps] ({}) keyword[except] identifier[IOError] : identifier[log] . identifier[error] ( literal[string] ) keyword[return] { literal[string] : literal[string] } identifier[pret] ={} keyword[if] keyword[not] identifier[six] . identifier[PY2] : identifier[key] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_bytes] ( identifier[key] ) keyword[if] identifier[HAS_M2] : identifier[pret] [ literal[string] ]= identifier[pub] . identifier[public_encrypt] ( identifier[key] , identifier[RSA] . identifier[pkcs1_oaep_padding] ) keyword[else] : identifier[cipher] = identifier[PKCS1_OAEP] . identifier[new] ( identifier[pub] ) identifier[pret] [ literal[string] ]= identifier[cipher] . identifier[encrypt] ( identifier[key] ) identifier[pret] [ identifier[dictkey] ]= identifier[pcrypt] . identifier[dumps] ( identifier[ret] keyword[if] identifier[ret] keyword[is] keyword[not] keyword[False] keyword[else] {} ) keyword[return] identifier[pret]
def _encrypt_private(self, ret, dictkey, target): """ The server equivalent of ReqChannel.crypted_transfer_decode_dictentry """ # encrypt with a specific AES key pubfn = os.path.join(self.opts['pki_dir'], 'minions', target) key = salt.crypt.Crypticle.generate_key_string() pcrypt = salt.crypt.Crypticle(self.opts, key) try: pub = salt.crypt.get_rsa_pub_key(pubfn) # depends on [control=['try'], data=[]] except (ValueError, IndexError, TypeError): return self.crypticle.dumps({}) # depends on [control=['except'], data=[]] except IOError: log.error('AES key not found') return {'error': 'AES key not found'} # depends on [control=['except'], data=[]] pret = {} if not six.PY2: key = salt.utils.stringutils.to_bytes(key) # depends on [control=['if'], data=[]] if HAS_M2: pret['key'] = pub.public_encrypt(key, RSA.pkcs1_oaep_padding) # depends on [control=['if'], data=[]] else: cipher = PKCS1_OAEP.new(pub) pret['key'] = cipher.encrypt(key) pret[dictkey] = pcrypt.dumps(ret if ret is not False else {}) return pret
def abi_encode_args(method, args): "encode args for method: method_id|data" assert issubclass(method.im_class, NativeABIContract), method.im_class m_abi = method.im_class._get_method_abi(method) return zpad(encode_int(m_abi['id']), 4) + abi.encode_abi(m_abi['arg_types'], args)
def function[abi_encode_args, parameter[method, args]]: constant[encode args for method: method_id|data] assert[call[name[issubclass], parameter[name[method].im_class, name[NativeABIContract]]]] variable[m_abi] assign[=] call[name[method].im_class._get_method_abi, parameter[name[method]]] return[binary_operation[call[name[zpad], parameter[call[name[encode_int], parameter[call[name[m_abi]][constant[id]]]], constant[4]]] + call[name[abi].encode_abi, parameter[call[name[m_abi]][constant[arg_types]], name[args]]]]]
keyword[def] identifier[abi_encode_args] ( identifier[method] , identifier[args] ): literal[string] keyword[assert] identifier[issubclass] ( identifier[method] . identifier[im_class] , identifier[NativeABIContract] ), identifier[method] . identifier[im_class] identifier[m_abi] = identifier[method] . identifier[im_class] . identifier[_get_method_abi] ( identifier[method] ) keyword[return] identifier[zpad] ( identifier[encode_int] ( identifier[m_abi] [ literal[string] ]), literal[int] )+ identifier[abi] . identifier[encode_abi] ( identifier[m_abi] [ literal[string] ], identifier[args] )
def abi_encode_args(method, args): """encode args for method: method_id|data""" assert issubclass(method.im_class, NativeABIContract), method.im_class m_abi = method.im_class._get_method_abi(method) return zpad(encode_int(m_abi['id']), 4) + abi.encode_abi(m_abi['arg_types'], args)
def bfill(arr, dim=None, limit=None): '''backfill missing values''' axis = arr.get_axis_num(dim) # work around for bottleneck 178 _limit = limit if limit is not None else arr.shape[axis] return apply_ufunc(_bfill, arr, dask='parallelized', keep_attrs=True, output_dtypes=[arr.dtype], kwargs=dict(n=_limit, axis=axis)).transpose(*arr.dims)
def function[bfill, parameter[arr, dim, limit]]: constant[backfill missing values] variable[axis] assign[=] call[name[arr].get_axis_num, parameter[name[dim]]] variable[_limit] assign[=] <ast.IfExp object at 0x7da18dc9a710> return[call[call[name[apply_ufunc], parameter[name[_bfill], name[arr]]].transpose, parameter[<ast.Starred object at 0x7da18dc9a800>]]]
keyword[def] identifier[bfill] ( identifier[arr] , identifier[dim] = keyword[None] , identifier[limit] = keyword[None] ): literal[string] identifier[axis] = identifier[arr] . identifier[get_axis_num] ( identifier[dim] ) identifier[_limit] = identifier[limit] keyword[if] identifier[limit] keyword[is] keyword[not] keyword[None] keyword[else] identifier[arr] . identifier[shape] [ identifier[axis] ] keyword[return] identifier[apply_ufunc] ( identifier[_bfill] , identifier[arr] , identifier[dask] = literal[string] , identifier[keep_attrs] = keyword[True] , identifier[output_dtypes] =[ identifier[arr] . identifier[dtype] ], identifier[kwargs] = identifier[dict] ( identifier[n] = identifier[_limit] , identifier[axis] = identifier[axis] )). identifier[transpose] (* identifier[arr] . identifier[dims] )
def bfill(arr, dim=None, limit=None): """backfill missing values""" axis = arr.get_axis_num(dim) # work around for bottleneck 178 _limit = limit if limit is not None else arr.shape[axis] return apply_ufunc(_bfill, arr, dask='parallelized', keep_attrs=True, output_dtypes=[arr.dtype], kwargs=dict(n=_limit, axis=axis)).transpose(*arr.dims)
def insert_many(self, rows, chunk_size=1000, ensure=None, types=None): """Add many rows at a time. This is significantly faster than adding them one by one. Per default the rows are processed in chunks of 1000 per commit, unless you specify a different ``chunk_size``. See :py:meth:`insert() <dataset.Table.insert>` for details on the other parameters. :: rows = [dict(name='Dolly')] * 10000 table.insert_many(rows) """ chunk = [] for row in rows: row = self._sync_columns(row, ensure, types=types) chunk.append(row) if len(chunk) == chunk_size: chunk = pad_chunk_columns(chunk) self.table.insert().execute(chunk) chunk = [] if len(chunk): chunk = pad_chunk_columns(chunk) self.table.insert().execute(chunk)
def function[insert_many, parameter[self, rows, chunk_size, ensure, types]]: constant[Add many rows at a time. This is significantly faster than adding them one by one. Per default the rows are processed in chunks of 1000 per commit, unless you specify a different ``chunk_size``. See :py:meth:`insert() <dataset.Table.insert>` for details on the other parameters. :: rows = [dict(name='Dolly')] * 10000 table.insert_many(rows) ] variable[chunk] assign[=] list[[]] for taget[name[row]] in starred[name[rows]] begin[:] variable[row] assign[=] call[name[self]._sync_columns, parameter[name[row], name[ensure]]] call[name[chunk].append, parameter[name[row]]] if compare[call[name[len], parameter[name[chunk]]] equal[==] name[chunk_size]] begin[:] variable[chunk] assign[=] call[name[pad_chunk_columns], parameter[name[chunk]]] call[call[name[self].table.insert, parameter[]].execute, parameter[name[chunk]]] variable[chunk] assign[=] list[[]] if call[name[len], parameter[name[chunk]]] begin[:] variable[chunk] assign[=] call[name[pad_chunk_columns], parameter[name[chunk]]] call[call[name[self].table.insert, parameter[]].execute, parameter[name[chunk]]]
keyword[def] identifier[insert_many] ( identifier[self] , identifier[rows] , identifier[chunk_size] = literal[int] , identifier[ensure] = keyword[None] , identifier[types] = keyword[None] ): literal[string] identifier[chunk] =[] keyword[for] identifier[row] keyword[in] identifier[rows] : identifier[row] = identifier[self] . identifier[_sync_columns] ( identifier[row] , identifier[ensure] , identifier[types] = identifier[types] ) identifier[chunk] . identifier[append] ( identifier[row] ) keyword[if] identifier[len] ( identifier[chunk] )== identifier[chunk_size] : identifier[chunk] = identifier[pad_chunk_columns] ( identifier[chunk] ) identifier[self] . identifier[table] . identifier[insert] (). identifier[execute] ( identifier[chunk] ) identifier[chunk] =[] keyword[if] identifier[len] ( identifier[chunk] ): identifier[chunk] = identifier[pad_chunk_columns] ( identifier[chunk] ) identifier[self] . identifier[table] . identifier[insert] (). identifier[execute] ( identifier[chunk] )
def insert_many(self, rows, chunk_size=1000, ensure=None, types=None): """Add many rows at a time. This is significantly faster than adding them one by one. Per default the rows are processed in chunks of 1000 per commit, unless you specify a different ``chunk_size``. See :py:meth:`insert() <dataset.Table.insert>` for details on the other parameters. :: rows = [dict(name='Dolly')] * 10000 table.insert_many(rows) """ chunk = [] for row in rows: row = self._sync_columns(row, ensure, types=types) chunk.append(row) if len(chunk) == chunk_size: chunk = pad_chunk_columns(chunk) self.table.insert().execute(chunk) chunk = [] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']] if len(chunk): chunk = pad_chunk_columns(chunk) self.table.insert().execute(chunk) # depends on [control=['if'], data=[]]
def _get_ordered_access_keys(self): "Return ordered access keys for inspection. Used for testing." self.access_log_lock.acquire() r = [e.key for e in self.access_log if e.is_valid] self.access_log_lock.release() return r
def function[_get_ordered_access_keys, parameter[self]]: constant[Return ordered access keys for inspection. Used for testing.] call[name[self].access_log_lock.acquire, parameter[]] variable[r] assign[=] <ast.ListComp object at 0x7da1b2581c30> call[name[self].access_log_lock.release, parameter[]] return[name[r]]
keyword[def] identifier[_get_ordered_access_keys] ( identifier[self] ): literal[string] identifier[self] . identifier[access_log_lock] . identifier[acquire] () identifier[r] =[ identifier[e] . identifier[key] keyword[for] identifier[e] keyword[in] identifier[self] . identifier[access_log] keyword[if] identifier[e] . identifier[is_valid] ] identifier[self] . identifier[access_log_lock] . identifier[release] () keyword[return] identifier[r]
def _get_ordered_access_keys(self): """Return ordered access keys for inspection. Used for testing.""" self.access_log_lock.acquire() r = [e.key for e in self.access_log if e.is_valid] self.access_log_lock.release() return r
def start_auth(self, context, internal_request, get_state=stateID): """ :param get_state: Generates a state to be used in authentication call :type get_state: Callable[[str, bytes], str] :type context: satosa.context.Context :type internal_request: satosa.internal.InternalData :rtype satosa.response.Redirect """ request_args = dict( client_id=self.config['client_config']['client_id'], redirect_uri=self.redirect_url, scope=' '.join(self.config['scope']), ) cis = self.consumer.construct_AuthorizationRequest( request_args=request_args) return Redirect(cis.request(self.consumer.authorization_endpoint))
def function[start_auth, parameter[self, context, internal_request, get_state]]: constant[ :param get_state: Generates a state to be used in authentication call :type get_state: Callable[[str, bytes], str] :type context: satosa.context.Context :type internal_request: satosa.internal.InternalData :rtype satosa.response.Redirect ] variable[request_args] assign[=] call[name[dict], parameter[]] variable[cis] assign[=] call[name[self].consumer.construct_AuthorizationRequest, parameter[]] return[call[name[Redirect], parameter[call[name[cis].request, parameter[name[self].consumer.authorization_endpoint]]]]]
keyword[def] identifier[start_auth] ( identifier[self] , identifier[context] , identifier[internal_request] , identifier[get_state] = identifier[stateID] ): literal[string] identifier[request_args] = identifier[dict] ( identifier[client_id] = identifier[self] . identifier[config] [ literal[string] ][ literal[string] ], identifier[redirect_uri] = identifier[self] . identifier[redirect_url] , identifier[scope] = literal[string] . identifier[join] ( identifier[self] . identifier[config] [ literal[string] ]),) identifier[cis] = identifier[self] . identifier[consumer] . identifier[construct_AuthorizationRequest] ( identifier[request_args] = identifier[request_args] ) keyword[return] identifier[Redirect] ( identifier[cis] . identifier[request] ( identifier[self] . identifier[consumer] . identifier[authorization_endpoint] ))
def start_auth(self, context, internal_request, get_state=stateID): """ :param get_state: Generates a state to be used in authentication call :type get_state: Callable[[str, bytes], str] :type context: satosa.context.Context :type internal_request: satosa.internal.InternalData :rtype satosa.response.Redirect """ request_args = dict(client_id=self.config['client_config']['client_id'], redirect_uri=self.redirect_url, scope=' '.join(self.config['scope'])) cis = self.consumer.construct_AuthorizationRequest(request_args=request_args) return Redirect(cis.request(self.consumer.authorization_endpoint))
def open(self, inp, opts=None): """Use this to set what file to read from. """ if isinstance(inp, io.TextIOWrapper): self.input = inp elif isinstance(inp, 'string'.__class__): # FIXME self.name = inp self.input = open(inp, 'r') else: raise IOError("Invalid input type (%s) for %s" % (inp.__class__.__name__, inp)) return
def function[open, parameter[self, inp, opts]]: constant[Use this to set what file to read from. ] if call[name[isinstance], parameter[name[inp], name[io].TextIOWrapper]] begin[:] name[self].input assign[=] name[inp] return[None]
keyword[def] identifier[open] ( identifier[self] , identifier[inp] , identifier[opts] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[inp] , identifier[io] . identifier[TextIOWrapper] ): identifier[self] . identifier[input] = identifier[inp] keyword[elif] identifier[isinstance] ( identifier[inp] , literal[string] . identifier[__class__] ): identifier[self] . identifier[name] = identifier[inp] identifier[self] . identifier[input] = identifier[open] ( identifier[inp] , literal[string] ) keyword[else] : keyword[raise] identifier[IOError] ( literal[string] % ( identifier[inp] . identifier[__class__] . identifier[__name__] , identifier[inp] )) keyword[return]
def open(self, inp, opts=None): """Use this to set what file to read from. """ if isinstance(inp, io.TextIOWrapper): self.input = inp # depends on [control=['if'], data=[]] elif isinstance(inp, 'string'.__class__): # FIXME self.name = inp self.input = open(inp, 'r') # depends on [control=['if'], data=[]] else: raise IOError('Invalid input type (%s) for %s' % (inp.__class__.__name__, inp)) return
def _build_egg(egg, archive_filename, to_dir): """Build Setuptools egg.""" with archive_context(archive_filename): # building an egg log.warn('Building a Setuptools egg in %s', to_dir) _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) # returning the result log.warn(egg) if not os.path.exists(egg): raise IOError('Could not build the egg.')
def function[_build_egg, parameter[egg, archive_filename, to_dir]]: constant[Build Setuptools egg.] with call[name[archive_context], parameter[name[archive_filename]]] begin[:] call[name[log].warn, parameter[constant[Building a Setuptools egg in %s], name[to_dir]]] call[name[_python_cmd], parameter[constant[setup.py], constant[-q], constant[bdist_egg], constant[--dist-dir], name[to_dir]]] call[name[log].warn, parameter[name[egg]]] if <ast.UnaryOp object at 0x7da1b18c0250> begin[:] <ast.Raise object at 0x7da1b18c2b60>
keyword[def] identifier[_build_egg] ( identifier[egg] , identifier[archive_filename] , identifier[to_dir] ): literal[string] keyword[with] identifier[archive_context] ( identifier[archive_filename] ): identifier[log] . identifier[warn] ( literal[string] , identifier[to_dir] ) identifier[_python_cmd] ( literal[string] , literal[string] , literal[string] , literal[string] , identifier[to_dir] ) identifier[log] . identifier[warn] ( identifier[egg] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[egg] ): keyword[raise] identifier[IOError] ( literal[string] )
def _build_egg(egg, archive_filename, to_dir): """Build Setuptools egg.""" with archive_context(archive_filename): # building an egg log.warn('Building a Setuptools egg in %s', to_dir) _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) # depends on [control=['with'], data=[]] # returning the result log.warn(egg) if not os.path.exists(egg): raise IOError('Could not build the egg.') # depends on [control=['if'], data=[]]
def _partial(self): """Callback for partial output.""" raw_stdout = self._process.readAllStandardOutput() stdout = handle_qbytearray(raw_stdout, self._get_encoding()) if self._partial_stdout is None: self._partial_stdout = stdout else: self._partial_stdout += stdout self.sig_partial.emit(self, stdout, None)
def function[_partial, parameter[self]]: constant[Callback for partial output.] variable[raw_stdout] assign[=] call[name[self]._process.readAllStandardOutput, parameter[]] variable[stdout] assign[=] call[name[handle_qbytearray], parameter[name[raw_stdout], call[name[self]._get_encoding, parameter[]]]] if compare[name[self]._partial_stdout is constant[None]] begin[:] name[self]._partial_stdout assign[=] name[stdout] call[name[self].sig_partial.emit, parameter[name[self], name[stdout], constant[None]]]
keyword[def] identifier[_partial] ( identifier[self] ): literal[string] identifier[raw_stdout] = identifier[self] . identifier[_process] . identifier[readAllStandardOutput] () identifier[stdout] = identifier[handle_qbytearray] ( identifier[raw_stdout] , identifier[self] . identifier[_get_encoding] ()) keyword[if] identifier[self] . identifier[_partial_stdout] keyword[is] keyword[None] : identifier[self] . identifier[_partial_stdout] = identifier[stdout] keyword[else] : identifier[self] . identifier[_partial_stdout] += identifier[stdout] identifier[self] . identifier[sig_partial] . identifier[emit] ( identifier[self] , identifier[stdout] , keyword[None] )
def _partial(self): """Callback for partial output.""" raw_stdout = self._process.readAllStandardOutput() stdout = handle_qbytearray(raw_stdout, self._get_encoding()) if self._partial_stdout is None: self._partial_stdout = stdout # depends on [control=['if'], data=[]] else: self._partial_stdout += stdout self.sig_partial.emit(self, stdout, None)
def filter_with_english_letters(buf): """ Returns a copy of ``buf`` that retains only the sequences of English alphabet and high byte characters that are not between <> characters. Also retains English alphabet and high byte characters immediately before occurrences of >. This filter can be applied to all scripts which contain both English characters and extended ASCII characters, but is currently only used by ``Latin1Prober``. """ filtered = bytearray() in_tag = False prev = 0 for curr in range(len(buf)): # Slice here to get bytes instead of an int with Python 3 buf_char = buf[curr:curr + 1] # Check if we're coming out of or entering an HTML tag if buf_char == b'>': in_tag = False elif buf_char == b'<': in_tag = True # If current character is not extended-ASCII and not alphabetic... if buf_char < b'\x80' and not buf_char.isalpha(): # ...and we're not in a tag if curr > prev and not in_tag: # Keep everything after last non-extended-ASCII, # non-alphabetic character filtered.extend(buf[prev:curr]) # Output a space to delimit stretch we kept filtered.extend(b' ') prev = curr + 1 # If we're not in a tag... if not in_tag: # Keep everything after last non-extended-ASCII, non-alphabetic # character filtered.extend(buf[prev:]) return filtered
def function[filter_with_english_letters, parameter[buf]]: constant[ Returns a copy of ``buf`` that retains only the sequences of English alphabet and high byte characters that are not between <> characters. Also retains English alphabet and high byte characters immediately before occurrences of >. This filter can be applied to all scripts which contain both English characters and extended ASCII characters, but is currently only used by ``Latin1Prober``. ] variable[filtered] assign[=] call[name[bytearray], parameter[]] variable[in_tag] assign[=] constant[False] variable[prev] assign[=] constant[0] for taget[name[curr]] in starred[call[name[range], parameter[call[name[len], parameter[name[buf]]]]]] begin[:] variable[buf_char] assign[=] call[name[buf]][<ast.Slice object at 0x7da2041d9030>] if compare[name[buf_char] equal[==] constant[b'>']] begin[:] variable[in_tag] assign[=] constant[False] if <ast.BoolOp object at 0x7da2041d9c60> begin[:] if <ast.BoolOp object at 0x7da2041d9570> begin[:] call[name[filtered].extend, parameter[call[name[buf]][<ast.Slice object at 0x7da1b1f80190>]]] call[name[filtered].extend, parameter[constant[b' ']]] variable[prev] assign[=] binary_operation[name[curr] + constant[1]] if <ast.UnaryOp object at 0x7da2044c0be0> begin[:] call[name[filtered].extend, parameter[call[name[buf]][<ast.Slice object at 0x7da20c6c5990>]]] return[name[filtered]]
keyword[def] identifier[filter_with_english_letters] ( identifier[buf] ): literal[string] identifier[filtered] = identifier[bytearray] () identifier[in_tag] = keyword[False] identifier[prev] = literal[int] keyword[for] identifier[curr] keyword[in] identifier[range] ( identifier[len] ( identifier[buf] )): identifier[buf_char] = identifier[buf] [ identifier[curr] : identifier[curr] + literal[int] ] keyword[if] identifier[buf_char] == literal[string] : identifier[in_tag] = keyword[False] keyword[elif] identifier[buf_char] == literal[string] : identifier[in_tag] = keyword[True] keyword[if] identifier[buf_char] < literal[string] keyword[and] keyword[not] identifier[buf_char] . identifier[isalpha] (): keyword[if] identifier[curr] > identifier[prev] keyword[and] keyword[not] identifier[in_tag] : identifier[filtered] . identifier[extend] ( identifier[buf] [ identifier[prev] : identifier[curr] ]) identifier[filtered] . identifier[extend] ( literal[string] ) identifier[prev] = identifier[curr] + literal[int] keyword[if] keyword[not] identifier[in_tag] : identifier[filtered] . identifier[extend] ( identifier[buf] [ identifier[prev] :]) keyword[return] identifier[filtered]
def filter_with_english_letters(buf): """ Returns a copy of ``buf`` that retains only the sequences of English alphabet and high byte characters that are not between <> characters. Also retains English alphabet and high byte characters immediately before occurrences of >. This filter can be applied to all scripts which contain both English characters and extended ASCII characters, but is currently only used by ``Latin1Prober``. """ filtered = bytearray() in_tag = False prev = 0 for curr in range(len(buf)): # Slice here to get bytes instead of an int with Python 3 buf_char = buf[curr:curr + 1] # Check if we're coming out of or entering an HTML tag if buf_char == b'>': in_tag = False # depends on [control=['if'], data=[]] elif buf_char == b'<': in_tag = True # depends on [control=['if'], data=[]] # If current character is not extended-ASCII and not alphabetic... if buf_char < b'\x80' and (not buf_char.isalpha()): # ...and we're not in a tag if curr > prev and (not in_tag): # Keep everything after last non-extended-ASCII, # non-alphabetic character filtered.extend(buf[prev:curr]) # Output a space to delimit stretch we kept filtered.extend(b' ') # depends on [control=['if'], data=[]] prev = curr + 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['curr']] # If we're not in a tag... if not in_tag: # Keep everything after last non-extended-ASCII, non-alphabetic # character filtered.extend(buf[prev:]) # depends on [control=['if'], data=[]] return filtered
def _get_distance_term(self, C, mag, rrup): """ Returns the distance scaling term """ return (C['C4'] + C['C5'] * (mag - 6.3)) *\ np.log(np.sqrt(rrup ** 2. + np.exp(C['H']) ** 2.))
def function[_get_distance_term, parameter[self, C, mag, rrup]]: constant[ Returns the distance scaling term ] return[binary_operation[binary_operation[call[name[C]][constant[C4]] + binary_operation[call[name[C]][constant[C5]] * binary_operation[name[mag] - constant[6.3]]]] * call[name[np].log, parameter[call[name[np].sqrt, parameter[binary_operation[binary_operation[name[rrup] ** constant[2.0]] + binary_operation[call[name[np].exp, parameter[call[name[C]][constant[H]]]] ** constant[2.0]]]]]]]]]
keyword[def] identifier[_get_distance_term] ( identifier[self] , identifier[C] , identifier[mag] , identifier[rrup] ): literal[string] keyword[return] ( identifier[C] [ literal[string] ]+ identifier[C] [ literal[string] ]*( identifier[mag] - literal[int] ))* identifier[np] . identifier[log] ( identifier[np] . identifier[sqrt] ( identifier[rrup] ** literal[int] + identifier[np] . identifier[exp] ( identifier[C] [ literal[string] ])** literal[int] ))
def _get_distance_term(self, C, mag, rrup): """ Returns the distance scaling term """ return (C['C4'] + C['C5'] * (mag - 6.3)) * np.log(np.sqrt(rrup ** 2.0 + np.exp(C['H']) ** 2.0))
def _parse(self, registers): """Parse the response, returning a dictionary.""" result = {'ip': self.ip, 'connected': True} decoder = BinaryPayloadDecoder.fromRegisters(registers, byteorder=Endian.Big, wordorder=Endian.Little) # Register 40001 is a collection of alarm status signals b = [decoder.decode_bits(), decoder.decode_bits()] reg_40001 = b[1] + b[0] # Bits 0-3 map to the monitor state monitor_integer = sum(1 << i for i, b in enumerate(reg_40001[:4]) if b) result['state'] = options['monitor state'][monitor_integer] # Bits 4-5 map to fault status fault_integer = sum(1 << i for i, b in enumerate(reg_40001[4:6]) if b) result['fault'] = {'status': options['fault status'][fault_integer]} # Bits 6 and 7 tell if low and high alarms are active low, high = reg_40001[6:8] result['alarm'] = options['alarm level'][low + high] # Bits 8-10 tell if internal sensor relays 1-3 are energized. Skipping. # Bit 11 is a heartbeat bit that toggles every two seconds. Skipping. # Bit 12 tells if relays are under modbus control. Skipping. # Remaining bits are empty. Skipping. # Register 40002 has a gas ID and a sensor cartridge ID. Skipping. decoder._pointer += 2 # Registers 40003-40004 are the gas concentration as a float result['concentration'] = decoder.decode_32bit_float() # Register 40005 is the concentration as an int. Skipping. decoder._pointer += 2 # Register 40006 is the number of the most important fault. fault_number = decoder.decode_16bit_uint() if fault_number != 0: code = ('m' if fault_number < 30 else 'F') + str(fault_number) result['fault']['code'] = code result['fault'].update(faults[code]) # Register 40007 holds the concentration unit in the second byte # Instead of being an int, it's the position of the up bit unit_bit = decoder.decode_bits().index(True) result['units'] = options['concentration unit'][unit_bit] decoder._pointer += 1 # Register 40008 holds the sensor temperature in Celsius result['temperature'] = decoder.decode_16bit_int() # Register 40009 holds number of hours remaining in cell life result['life'] = decoder.decode_16bit_uint() / 24.0 # Register 40010 holds the number of heartbeats (16 LSB). Skipping. decoder._pointer += 2 # Register 40011 is the sample flow rate in cc / min result['flow'] = decoder.decode_16bit_uint() # Register 40012 is blank. Skipping. decoder._pointer += 2 # Registers 40013-40016 are the alarm concentration thresholds result['low-alarm threshold'] = round(decoder.decode_32bit_float(), 6) result['high-alarm threshold'] = round(decoder.decode_32bit_float(), 6) # Despite what the manual says, thresholds are always reported in ppm. # Let's fix that to match the concentration units. if result['units'] == 'ppb': result['concentration'] *= 1000 result['low-alarm threshold'] *= 1000 result['high-alarm threshold'] *= 1000 return result
def function[_parse, parameter[self, registers]]: constant[Parse the response, returning a dictionary.] variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da20c795690>, <ast.Constant object at 0x7da20c796140>], [<ast.Attribute object at 0x7da20c795d80>, <ast.Constant object at 0x7da20c795240>]] variable[decoder] assign[=] call[name[BinaryPayloadDecoder].fromRegisters, parameter[name[registers]]] variable[b] assign[=] list[[<ast.Call object at 0x7da20c794100>, <ast.Call object at 0x7da20c795ab0>]] variable[reg_40001] assign[=] binary_operation[call[name[b]][constant[1]] + call[name[b]][constant[0]]] variable[monitor_integer] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da20c795a80>]] call[name[result]][constant[state]] assign[=] call[call[name[options]][constant[monitor state]]][name[monitor_integer]] variable[fault_integer] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da18fe91f60>]] call[name[result]][constant[fault]] assign[=] dictionary[[<ast.Constant object at 0x7da18fe91540>], [<ast.Subscript object at 0x7da18fe92740>]] <ast.Tuple object at 0x7da18fe93220> assign[=] call[name[reg_40001]][<ast.Slice object at 0x7da18fe90eb0>] call[name[result]][constant[alarm]] assign[=] call[call[name[options]][constant[alarm level]]][binary_operation[name[low] + name[high]]] <ast.AugAssign object at 0x7da18fe91120> call[name[result]][constant[concentration]] assign[=] call[name[decoder].decode_32bit_float, parameter[]] <ast.AugAssign object at 0x7da18fe92620> variable[fault_number] assign[=] call[name[decoder].decode_16bit_uint, parameter[]] if compare[name[fault_number] not_equal[!=] constant[0]] begin[:] variable[code] assign[=] binary_operation[<ast.IfExp object at 0x7da18fe914b0> + call[name[str], parameter[name[fault_number]]]] call[call[name[result]][constant[fault]]][constant[code]] assign[=] name[code] call[call[name[result]][constant[fault]].update, parameter[call[name[faults]][name[code]]]] variable[unit_bit] assign[=] call[call[name[decoder].decode_bits, parameter[]].index, parameter[constant[True]]] call[name[result]][constant[units]] assign[=] call[call[name[options]][constant[concentration unit]]][name[unit_bit]] <ast.AugAssign object at 0x7da18fe913f0> call[name[result]][constant[temperature]] assign[=] call[name[decoder].decode_16bit_int, parameter[]] call[name[result]][constant[life]] assign[=] binary_operation[call[name[decoder].decode_16bit_uint, parameter[]] / constant[24.0]] <ast.AugAssign object at 0x7da18fe91a50> call[name[result]][constant[flow]] assign[=] call[name[decoder].decode_16bit_uint, parameter[]] <ast.AugAssign object at 0x7da18fe919c0> call[name[result]][constant[low-alarm threshold]] assign[=] call[name[round], parameter[call[name[decoder].decode_32bit_float, parameter[]], constant[6]]] call[name[result]][constant[high-alarm threshold]] assign[=] call[name[round], parameter[call[name[decoder].decode_32bit_float, parameter[]], constant[6]]] if compare[call[name[result]][constant[units]] equal[==] constant[ppb]] begin[:] <ast.AugAssign object at 0x7da18c4ce650> <ast.AugAssign object at 0x7da18c4cc790> <ast.AugAssign object at 0x7da18c4cfc10> return[name[result]]
keyword[def] identifier[_parse] ( identifier[self] , identifier[registers] ): literal[string] identifier[result] ={ literal[string] : identifier[self] . identifier[ip] , literal[string] : keyword[True] } identifier[decoder] = identifier[BinaryPayloadDecoder] . identifier[fromRegisters] ( identifier[registers] , identifier[byteorder] = identifier[Endian] . identifier[Big] , identifier[wordorder] = identifier[Endian] . identifier[Little] ) identifier[b] =[ identifier[decoder] . identifier[decode_bits] (), identifier[decoder] . identifier[decode_bits] ()] identifier[reg_40001] = identifier[b] [ literal[int] ]+ identifier[b] [ literal[int] ] identifier[monitor_integer] = identifier[sum] ( literal[int] << identifier[i] keyword[for] identifier[i] , identifier[b] keyword[in] identifier[enumerate] ( identifier[reg_40001] [: literal[int] ]) keyword[if] identifier[b] ) identifier[result] [ literal[string] ]= identifier[options] [ literal[string] ][ identifier[monitor_integer] ] identifier[fault_integer] = identifier[sum] ( literal[int] << identifier[i] keyword[for] identifier[i] , identifier[b] keyword[in] identifier[enumerate] ( identifier[reg_40001] [ literal[int] : literal[int] ]) keyword[if] identifier[b] ) identifier[result] [ literal[string] ]={ literal[string] : identifier[options] [ literal[string] ][ identifier[fault_integer] ]} identifier[low] , identifier[high] = identifier[reg_40001] [ literal[int] : literal[int] ] identifier[result] [ literal[string] ]= identifier[options] [ literal[string] ][ identifier[low] + identifier[high] ] identifier[decoder] . identifier[_pointer] += literal[int] identifier[result] [ literal[string] ]= identifier[decoder] . identifier[decode_32bit_float] () identifier[decoder] . identifier[_pointer] += literal[int] identifier[fault_number] = identifier[decoder] . identifier[decode_16bit_uint] () keyword[if] identifier[fault_number] != literal[int] : identifier[code] =( literal[string] keyword[if] identifier[fault_number] < literal[int] keyword[else] literal[string] )+ identifier[str] ( identifier[fault_number] ) identifier[result] [ literal[string] ][ literal[string] ]= identifier[code] identifier[result] [ literal[string] ]. identifier[update] ( identifier[faults] [ identifier[code] ]) identifier[unit_bit] = identifier[decoder] . identifier[decode_bits] (). identifier[index] ( keyword[True] ) identifier[result] [ literal[string] ]= identifier[options] [ literal[string] ][ identifier[unit_bit] ] identifier[decoder] . identifier[_pointer] += literal[int] identifier[result] [ literal[string] ]= identifier[decoder] . identifier[decode_16bit_int] () identifier[result] [ literal[string] ]= identifier[decoder] . identifier[decode_16bit_uint] ()/ literal[int] identifier[decoder] . identifier[_pointer] += literal[int] identifier[result] [ literal[string] ]= identifier[decoder] . identifier[decode_16bit_uint] () identifier[decoder] . identifier[_pointer] += literal[int] identifier[result] [ literal[string] ]= identifier[round] ( identifier[decoder] . identifier[decode_32bit_float] (), literal[int] ) identifier[result] [ literal[string] ]= identifier[round] ( identifier[decoder] . identifier[decode_32bit_float] (), literal[int] ) keyword[if] identifier[result] [ literal[string] ]== literal[string] : identifier[result] [ literal[string] ]*= literal[int] identifier[result] [ literal[string] ]*= literal[int] identifier[result] [ literal[string] ]*= literal[int] keyword[return] identifier[result]
def _parse(self, registers): """Parse the response, returning a dictionary.""" result = {'ip': self.ip, 'connected': True} decoder = BinaryPayloadDecoder.fromRegisters(registers, byteorder=Endian.Big, wordorder=Endian.Little) # Register 40001 is a collection of alarm status signals b = [decoder.decode_bits(), decoder.decode_bits()] reg_40001 = b[1] + b[0] # Bits 0-3 map to the monitor state monitor_integer = sum((1 << i for (i, b) in enumerate(reg_40001[:4]) if b)) result['state'] = options['monitor state'][monitor_integer] # Bits 4-5 map to fault status fault_integer = sum((1 << i for (i, b) in enumerate(reg_40001[4:6]) if b)) result['fault'] = {'status': options['fault status'][fault_integer]} # Bits 6 and 7 tell if low and high alarms are active (low, high) = reg_40001[6:8] result['alarm'] = options['alarm level'][low + high] # Bits 8-10 tell if internal sensor relays 1-3 are energized. Skipping. # Bit 11 is a heartbeat bit that toggles every two seconds. Skipping. # Bit 12 tells if relays are under modbus control. Skipping. # Remaining bits are empty. Skipping. # Register 40002 has a gas ID and a sensor cartridge ID. Skipping. decoder._pointer += 2 # Registers 40003-40004 are the gas concentration as a float result['concentration'] = decoder.decode_32bit_float() # Register 40005 is the concentration as an int. Skipping. decoder._pointer += 2 # Register 40006 is the number of the most important fault. fault_number = decoder.decode_16bit_uint() if fault_number != 0: code = ('m' if fault_number < 30 else 'F') + str(fault_number) result['fault']['code'] = code result['fault'].update(faults[code]) # depends on [control=['if'], data=['fault_number']] # Register 40007 holds the concentration unit in the second byte # Instead of being an int, it's the position of the up bit unit_bit = decoder.decode_bits().index(True) result['units'] = options['concentration unit'][unit_bit] decoder._pointer += 1 # Register 40008 holds the sensor temperature in Celsius result['temperature'] = decoder.decode_16bit_int() # Register 40009 holds number of hours remaining in cell life result['life'] = decoder.decode_16bit_uint() / 24.0 # Register 40010 holds the number of heartbeats (16 LSB). Skipping. decoder._pointer += 2 # Register 40011 is the sample flow rate in cc / min result['flow'] = decoder.decode_16bit_uint() # Register 40012 is blank. Skipping. decoder._pointer += 2 # Registers 40013-40016 are the alarm concentration thresholds result['low-alarm threshold'] = round(decoder.decode_32bit_float(), 6) result['high-alarm threshold'] = round(decoder.decode_32bit_float(), 6) # Despite what the manual says, thresholds are always reported in ppm. # Let's fix that to match the concentration units. if result['units'] == 'ppb': result['concentration'] *= 1000 result['low-alarm threshold'] *= 1000 result['high-alarm threshold'] *= 1000 # depends on [control=['if'], data=[]] return result
def content_type(self): """Return the value of Content-Type header field. The value for the Content-Type header field is determined from the :attr:`media_type` and :attr:`charset` data attributes. Returns: str: Value of Content-Type header field """ if (self.media_type is not None and self.media_type.startswith('text/') and self.charset is not None): return self.media_type + '; charset=' + self.charset else: return self.media_type
def function[content_type, parameter[self]]: constant[Return the value of Content-Type header field. The value for the Content-Type header field is determined from the :attr:`media_type` and :attr:`charset` data attributes. Returns: str: Value of Content-Type header field ] if <ast.BoolOp object at 0x7da1b01a53f0> begin[:] return[binary_operation[binary_operation[name[self].media_type + constant[; charset=]] + name[self].charset]]
keyword[def] identifier[content_type] ( identifier[self] ): literal[string] keyword[if] ( identifier[self] . identifier[media_type] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[media_type] . identifier[startswith] ( literal[string] ) keyword[and] identifier[self] . identifier[charset] keyword[is] keyword[not] keyword[None] ): keyword[return] identifier[self] . identifier[media_type] + literal[string] + identifier[self] . identifier[charset] keyword[else] : keyword[return] identifier[self] . identifier[media_type]
def content_type(self): """Return the value of Content-Type header field. The value for the Content-Type header field is determined from the :attr:`media_type` and :attr:`charset` data attributes. Returns: str: Value of Content-Type header field """ if self.media_type is not None and self.media_type.startswith('text/') and (self.charset is not None): return self.media_type + '; charset=' + self.charset # depends on [control=['if'], data=[]] else: return self.media_type
def extend(validator, validators=(), version=None, type_checker=None): """ Create a new validator class by extending an existing one. Arguments: validator (jsonschema.IValidator): an existing validator class validators (collections.Mapping): a mapping of new validator callables to extend with, whose structure is as in `create`. .. note:: Any validator callables with the same name as an existing one will (silently) replace the old validator callable entirely, effectively overriding any validation done in the "parent" validator class. If you wish to instead extend the behavior of a parent's validator callable, delegate and call it directly in the new validator function by retrieving it using ``OldValidator.VALIDATORS["validator_name"]``. version (str): a version for the new validator class type_checker (jsonschema.TypeChecker): a type checker, used when applying the :validator:`type` validator. If unprovided, the type checker of the extended `jsonschema.IValidator` will be carried along.` Returns: a new `jsonschema.IValidator` class extending the one provided .. note:: Meta Schemas The new validator class will have its parent's meta schema. If you wish to change or extend the meta schema in the new validator class, modify ``META_SCHEMA`` directly on the returned class. Note that no implicit copying is done, so a copy should likely be made before modifying it, in order to not affect the old validator. """ all_validators = dict(validator.VALIDATORS) all_validators.update(validators) if type_checker is None: type_checker = validator.TYPE_CHECKER elif validator._CREATED_WITH_DEFAULT_TYPES: raise TypeError( "Cannot extend a validator created with default_types " "with a type_checker. Update the validator to use a " "type_checker when created." ) return create( meta_schema=validator.META_SCHEMA, validators=all_validators, version=version, type_checker=type_checker, id_of=validator.ID_OF, )
def function[extend, parameter[validator, validators, version, type_checker]]: constant[ Create a new validator class by extending an existing one. Arguments: validator (jsonschema.IValidator): an existing validator class validators (collections.Mapping): a mapping of new validator callables to extend with, whose structure is as in `create`. .. note:: Any validator callables with the same name as an existing one will (silently) replace the old validator callable entirely, effectively overriding any validation done in the "parent" validator class. If you wish to instead extend the behavior of a parent's validator callable, delegate and call it directly in the new validator function by retrieving it using ``OldValidator.VALIDATORS["validator_name"]``. version (str): a version for the new validator class type_checker (jsonschema.TypeChecker): a type checker, used when applying the :validator:`type` validator. If unprovided, the type checker of the extended `jsonschema.IValidator` will be carried along.` Returns: a new `jsonschema.IValidator` class extending the one provided .. note:: Meta Schemas The new validator class will have its parent's meta schema. If you wish to change or extend the meta schema in the new validator class, modify ``META_SCHEMA`` directly on the returned class. Note that no implicit copying is done, so a copy should likely be made before modifying it, in order to not affect the old validator. ] variable[all_validators] assign[=] call[name[dict], parameter[name[validator].VALIDATORS]] call[name[all_validators].update, parameter[name[validators]]] if compare[name[type_checker] is constant[None]] begin[:] variable[type_checker] assign[=] name[validator].TYPE_CHECKER return[call[name[create], parameter[]]]
keyword[def] identifier[extend] ( identifier[validator] , identifier[validators] =(), identifier[version] = keyword[None] , identifier[type_checker] = keyword[None] ): literal[string] identifier[all_validators] = identifier[dict] ( identifier[validator] . identifier[VALIDATORS] ) identifier[all_validators] . identifier[update] ( identifier[validators] ) keyword[if] identifier[type_checker] keyword[is] keyword[None] : identifier[type_checker] = identifier[validator] . identifier[TYPE_CHECKER] keyword[elif] identifier[validator] . identifier[_CREATED_WITH_DEFAULT_TYPES] : keyword[raise] identifier[TypeError] ( literal[string] literal[string] literal[string] ) keyword[return] identifier[create] ( identifier[meta_schema] = identifier[validator] . identifier[META_SCHEMA] , identifier[validators] = identifier[all_validators] , identifier[version] = identifier[version] , identifier[type_checker] = identifier[type_checker] , identifier[id_of] = identifier[validator] . identifier[ID_OF] , )
def extend(validator, validators=(), version=None, type_checker=None): """ Create a new validator class by extending an existing one. Arguments: validator (jsonschema.IValidator): an existing validator class validators (collections.Mapping): a mapping of new validator callables to extend with, whose structure is as in `create`. .. note:: Any validator callables with the same name as an existing one will (silently) replace the old validator callable entirely, effectively overriding any validation done in the "parent" validator class. If you wish to instead extend the behavior of a parent's validator callable, delegate and call it directly in the new validator function by retrieving it using ``OldValidator.VALIDATORS["validator_name"]``. version (str): a version for the new validator class type_checker (jsonschema.TypeChecker): a type checker, used when applying the :validator:`type` validator. If unprovided, the type checker of the extended `jsonschema.IValidator` will be carried along.` Returns: a new `jsonschema.IValidator` class extending the one provided .. note:: Meta Schemas The new validator class will have its parent's meta schema. If you wish to change or extend the meta schema in the new validator class, modify ``META_SCHEMA`` directly on the returned class. Note that no implicit copying is done, so a copy should likely be made before modifying it, in order to not affect the old validator. """ all_validators = dict(validator.VALIDATORS) all_validators.update(validators) if type_checker is None: type_checker = validator.TYPE_CHECKER # depends on [control=['if'], data=['type_checker']] elif validator._CREATED_WITH_DEFAULT_TYPES: raise TypeError('Cannot extend a validator created with default_types with a type_checker. Update the validator to use a type_checker when created.') # depends on [control=['if'], data=[]] return create(meta_schema=validator.META_SCHEMA, validators=all_validators, version=version, type_checker=type_checker, id_of=validator.ID_OF)
def plot_ndpanel(panel, func=None, xlim='auto', ylim='auto', row_labels='auto', col_labels='auto', row_name='auto', col_name='auto', pass_slicing_meta_to_func=False, subplot_xlabel=None, subplot_ylabel=None, row_name_pad=40.0, col_name_pad=40.0, hspace=0, wspace=0, hide_tick_labels=True, hide_tick_lines=False, legend=None, legend_title=None, grid_kwargs={}, **kwargs): """Use to visualize mutli-dimensional data stored in N-dimensional pandas panels. Given an nd-panel of shape (.., .., .., rows, cols), the function creates a 2d grid of subplot of shape (rows, cols). subplot i, j calls func parameter with an (n-2) nd panel that corresponds to (..., .., .., i, j). Parameters --------------- panel : pandas Panel (3d-5d.. indexing is hard coded at the moment) items : time series generated along this axis major : rows minor : cols func : function that accepts a slice of a panel (two dimensions less than input panel) {_graph_grid_layout} pass_slicing_meta_to_func : [False | True] Changes the arguments that are passed to the provided function. If False: func(data_slice, **kwargs) (Default) If True: func(data_slice, row=row, col=col, row_value=row_value, col_value=col_value, **kwargs) grid_kwargs : dict kwargs to be passed to the create_grid_layout method. See its documentation for further details. legend : None, tuple If provided as tuple, must be a 2-d tuple corresponding to a subplot position. If legend=(2, 4), then the legend will drawn using the labels of the lines provided in subplot in 2nd row and 4th column. A better name could be subplot_source_for_legend? legend_title : str, None If provided, used as title for the legend. Returns --------------- Reference to main axis and to subplot axes. Examples ---------------- if a is a panel: plot_panel(a, func=plot, marker='o'); Code that could be useful --------------------------- # Checks number of arguments function accepts if func.func_code.co_argcount == 1: func(data) else: func(data, ax) """ auto_col_name, auto_col_labels, auto_row_name, auto_row_labels = extract_annotation(panel) shape = panel.values.shape rowNum, colNum = shape[-2], shape[-1] # Last two are used for setting up the size ndim = len(shape) if ndim < 2 or ndim > 5: raise Exception('Only dimensions between 2 and 5 are supported') if row_labels == 'auto': row_labels = auto_row_labels if col_labels == 'auto': col_labels = auto_col_labels # Figure out xlimits and y limits axis = '' # used below to autoscale subplots if xlim == 'auto': xlim = None axis += 'x' if ylim == 'auto': ylim = None axis += 'y' ax_main, ax_subplots = create_grid_layout(rowNum=rowNum, colNum=colNum, row_labels=row_labels, col_labels=col_labels, xlabel=subplot_xlabel, ylabel=subplot_ylabel, hide_tick_labels=hide_tick_labels, hide_tick_lines=hide_tick_lines, xlim=xlim, ylim=ylim, hspace=hspace, wspace=wspace, **grid_kwargs) nrange = arange(ndim) nrange = list(nrange[(nrange - 2) % ndim]) # Moves the last two dimensions to the first two if not isinstance(panel, pandas.DataFrame): panel = panel.transpose(*nrange) for (row, col), ax in numpy.ndenumerate(ax_subplots): plt.sca(ax) data_slice = panel.iloc[row].iloc[col] row_value = panel.axes[0][row] col_value = panel.axes[1][col] if pass_slicing_meta_to_func: func(data_slice, row=row, col=col, row_value=row_value, col_value=col_value, **kwargs) else: func(data_slice, **kwargs) autoscale_subplots(ax_subplots, axis) plt.sca(ax_main) if legend is not None: items, labels = ax_subplots[legend].get_legend_handles_labels() # lines = ax_subplots[legend].lines # l = pl.legend(lines , map(lambda x : x.get_label(), lines), l = pl.legend(items, labels, bbox_to_anchor=(0.9, 0.5), bbox_transform=pl.gcf().transFigure, loc='center left', numpoints=1, frameon=False) if legend_title is not None: l.set_title(legend_title) if row_name == 'auto': row_name = auto_row_name if col_name == 'auto': col_name = auto_col_name if row_name is not None: pl.xlabel(col_name, labelpad=col_name_pad) if col_name is not None: pl.ylabel(row_name, labelpad=row_name_pad) ##### # Placing ticks on the top left subplot ax_label = ax_subplots[0, -1] pl.sca(ax_label) if subplot_xlabel: xticks = numpy.array(pl.xticks()[0], dtype=object) xticks[1::2] = '' ax_label.set_xticklabels(xticks, rotation=90, size='small') if subplot_ylabel: yticks = numpy.array(pl.yticks()[0], dtype=object) yticks[1::2] = '' ax_label.set_yticklabels(yticks, rotation=0, size='small') pl.sca(ax_main) return ax_main, ax_subplots
def function[plot_ndpanel, parameter[panel, func, xlim, ylim, row_labels, col_labels, row_name, col_name, pass_slicing_meta_to_func, subplot_xlabel, subplot_ylabel, row_name_pad, col_name_pad, hspace, wspace, hide_tick_labels, hide_tick_lines, legend, legend_title, grid_kwargs]]: constant[Use to visualize mutli-dimensional data stored in N-dimensional pandas panels. Given an nd-panel of shape (.., .., .., rows, cols), the function creates a 2d grid of subplot of shape (rows, cols). subplot i, j calls func parameter with an (n-2) nd panel that corresponds to (..., .., .., i, j). Parameters --------------- panel : pandas Panel (3d-5d.. indexing is hard coded at the moment) items : time series generated along this axis major : rows minor : cols func : function that accepts a slice of a panel (two dimensions less than input panel) {_graph_grid_layout} pass_slicing_meta_to_func : [False | True] Changes the arguments that are passed to the provided function. If False: func(data_slice, **kwargs) (Default) If True: func(data_slice, row=row, col=col, row_value=row_value, col_value=col_value, **kwargs) grid_kwargs : dict kwargs to be passed to the create_grid_layout method. See its documentation for further details. legend : None, tuple If provided as tuple, must be a 2-d tuple corresponding to a subplot position. If legend=(2, 4), then the legend will drawn using the labels of the lines provided in subplot in 2nd row and 4th column. A better name could be subplot_source_for_legend? legend_title : str, None If provided, used as title for the legend. Returns --------------- Reference to main axis and to subplot axes. Examples ---------------- if a is a panel: plot_panel(a, func=plot, marker='o'); Code that could be useful --------------------------- # Checks number of arguments function accepts if func.func_code.co_argcount == 1: func(data) else: func(data, ax) ] <ast.Tuple object at 0x7da18bc71990> assign[=] call[name[extract_annotation], parameter[name[panel]]] variable[shape] assign[=] name[panel].values.shape <ast.Tuple object at 0x7da18bc73250> assign[=] tuple[[<ast.Subscript object at 0x7da18bc72050>, <ast.Subscript object at 0x7da18bc71db0>]] variable[ndim] assign[=] call[name[len], parameter[name[shape]]] if <ast.BoolOp object at 0x7da18bc73550> begin[:] <ast.Raise object at 0x7da18bc732b0> if compare[name[row_labels] equal[==] constant[auto]] begin[:] variable[row_labels] assign[=] name[auto_row_labels] if compare[name[col_labels] equal[==] constant[auto]] begin[:] variable[col_labels] assign[=] name[auto_col_labels] variable[axis] assign[=] constant[] if compare[name[xlim] equal[==] constant[auto]] begin[:] variable[xlim] assign[=] constant[None] <ast.AugAssign object at 0x7da18bc72470> if compare[name[ylim] equal[==] constant[auto]] begin[:] variable[ylim] assign[=] constant[None] <ast.AugAssign object at 0x7da18bc72e30> <ast.Tuple object at 0x7da18bc71d50> assign[=] call[name[create_grid_layout], parameter[]] variable[nrange] assign[=] call[name[arange], parameter[name[ndim]]] variable[nrange] assign[=] call[name[list], parameter[call[name[nrange]][binary_operation[binary_operation[name[nrange] - constant[2]] <ast.Mod object at 0x7da2590d6920> name[ndim]]]]] if <ast.UnaryOp object at 0x7da18bc722c0> begin[:] variable[panel] assign[=] call[name[panel].transpose, parameter[<ast.Starred object at 0x7da18bc72290>]] for taget[tuple[[<ast.Tuple object at 0x7da18bc70820>, <ast.Name object at 0x7da18bc70cd0>]]] in starred[call[name[numpy].ndenumerate, parameter[name[ax_subplots]]]] begin[:] call[name[plt].sca, parameter[name[ax]]] variable[data_slice] assign[=] call[call[name[panel].iloc][name[row]].iloc][name[col]] variable[row_value] assign[=] call[call[name[panel].axes][constant[0]]][name[row]] variable[col_value] assign[=] call[call[name[panel].axes][constant[1]]][name[col]] if name[pass_slicing_meta_to_func] begin[:] call[name[func], parameter[name[data_slice]]] call[name[autoscale_subplots], parameter[name[ax_subplots], name[axis]]] call[name[plt].sca, parameter[name[ax_main]]] if compare[name[legend] is_not constant[None]] begin[:] <ast.Tuple object at 0x7da18bc71f60> assign[=] call[call[name[ax_subplots]][name[legend]].get_legend_handles_labels, parameter[]] variable[l] assign[=] call[name[pl].legend, parameter[name[items], name[labels]]] if compare[name[legend_title] is_not constant[None]] begin[:] call[name[l].set_title, parameter[name[legend_title]]] if compare[name[row_name] equal[==] constant[auto]] begin[:] variable[row_name] assign[=] name[auto_row_name] if compare[name[col_name] equal[==] constant[auto]] begin[:] variable[col_name] assign[=] name[auto_col_name] if compare[name[row_name] is_not constant[None]] begin[:] call[name[pl].xlabel, parameter[name[col_name]]] if compare[name[col_name] is_not constant[None]] begin[:] call[name[pl].ylabel, parameter[name[row_name]]] variable[ax_label] assign[=] call[name[ax_subplots]][tuple[[<ast.Constant object at 0x7da20c7cbca0>, <ast.UnaryOp object at 0x7da20c7c9210>]]] call[name[pl].sca, parameter[name[ax_label]]] if name[subplot_xlabel] begin[:] variable[xticks] assign[=] call[name[numpy].array, parameter[call[call[name[pl].xticks, parameter[]]][constant[0]]]] call[name[xticks]][<ast.Slice object at 0x7da20c7cb070>] assign[=] constant[] call[name[ax_label].set_xticklabels, parameter[name[xticks]]] if name[subplot_ylabel] begin[:] variable[yticks] assign[=] call[name[numpy].array, parameter[call[call[name[pl].yticks, parameter[]]][constant[0]]]] call[name[yticks]][<ast.Slice object at 0x7da20c7ca590>] assign[=] constant[] call[name[ax_label].set_yticklabels, parameter[name[yticks]]] call[name[pl].sca, parameter[name[ax_main]]] return[tuple[[<ast.Name object at 0x7da20c7cae30>, <ast.Name object at 0x7da20c7cb400>]]]
keyword[def] identifier[plot_ndpanel] ( identifier[panel] , identifier[func] = keyword[None] , identifier[xlim] = literal[string] , identifier[ylim] = literal[string] , identifier[row_labels] = literal[string] , identifier[col_labels] = literal[string] , identifier[row_name] = literal[string] , identifier[col_name] = literal[string] , identifier[pass_slicing_meta_to_func] = keyword[False] , identifier[subplot_xlabel] = keyword[None] , identifier[subplot_ylabel] = keyword[None] , identifier[row_name_pad] = literal[int] , identifier[col_name_pad] = literal[int] , identifier[hspace] = literal[int] , identifier[wspace] = literal[int] , identifier[hide_tick_labels] = keyword[True] , identifier[hide_tick_lines] = keyword[False] , identifier[legend] = keyword[None] , identifier[legend_title] = keyword[None] , identifier[grid_kwargs] ={}, ** identifier[kwargs] ): literal[string] identifier[auto_col_name] , identifier[auto_col_labels] , identifier[auto_row_name] , identifier[auto_row_labels] = identifier[extract_annotation] ( identifier[panel] ) identifier[shape] = identifier[panel] . identifier[values] . identifier[shape] identifier[rowNum] , identifier[colNum] = identifier[shape] [- literal[int] ], identifier[shape] [- literal[int] ] identifier[ndim] = identifier[len] ( identifier[shape] ) keyword[if] identifier[ndim] < literal[int] keyword[or] identifier[ndim] > literal[int] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[if] identifier[row_labels] == literal[string] : identifier[row_labels] = identifier[auto_row_labels] keyword[if] identifier[col_labels] == literal[string] : identifier[col_labels] = identifier[auto_col_labels] identifier[axis] = literal[string] keyword[if] identifier[xlim] == literal[string] : identifier[xlim] = keyword[None] identifier[axis] += literal[string] keyword[if] identifier[ylim] == literal[string] : identifier[ylim] = keyword[None] identifier[axis] += literal[string] identifier[ax_main] , identifier[ax_subplots] = identifier[create_grid_layout] ( identifier[rowNum] = identifier[rowNum] , identifier[colNum] = identifier[colNum] , identifier[row_labels] = identifier[row_labels] , identifier[col_labels] = identifier[col_labels] , identifier[xlabel] = identifier[subplot_xlabel] , identifier[ylabel] = identifier[subplot_ylabel] , identifier[hide_tick_labels] = identifier[hide_tick_labels] , identifier[hide_tick_lines] = identifier[hide_tick_lines] , identifier[xlim] = identifier[xlim] , identifier[ylim] = identifier[ylim] , identifier[hspace] = identifier[hspace] , identifier[wspace] = identifier[wspace] , ** identifier[grid_kwargs] ) identifier[nrange] = identifier[arange] ( identifier[ndim] ) identifier[nrange] = identifier[list] ( identifier[nrange] [( identifier[nrange] - literal[int] )% identifier[ndim] ]) keyword[if] keyword[not] identifier[isinstance] ( identifier[panel] , identifier[pandas] . identifier[DataFrame] ): identifier[panel] = identifier[panel] . identifier[transpose] (* identifier[nrange] ) keyword[for] ( identifier[row] , identifier[col] ), identifier[ax] keyword[in] identifier[numpy] . identifier[ndenumerate] ( identifier[ax_subplots] ): identifier[plt] . identifier[sca] ( identifier[ax] ) identifier[data_slice] = identifier[panel] . identifier[iloc] [ identifier[row] ]. identifier[iloc] [ identifier[col] ] identifier[row_value] = identifier[panel] . identifier[axes] [ literal[int] ][ identifier[row] ] identifier[col_value] = identifier[panel] . identifier[axes] [ literal[int] ][ identifier[col] ] keyword[if] identifier[pass_slicing_meta_to_func] : identifier[func] ( identifier[data_slice] , identifier[row] = identifier[row] , identifier[col] = identifier[col] , identifier[row_value] = identifier[row_value] , identifier[col_value] = identifier[col_value] ,** identifier[kwargs] ) keyword[else] : identifier[func] ( identifier[data_slice] ,** identifier[kwargs] ) identifier[autoscale_subplots] ( identifier[ax_subplots] , identifier[axis] ) identifier[plt] . identifier[sca] ( identifier[ax_main] ) keyword[if] identifier[legend] keyword[is] keyword[not] keyword[None] : identifier[items] , identifier[labels] = identifier[ax_subplots] [ identifier[legend] ]. identifier[get_legend_handles_labels] () identifier[l] = identifier[pl] . identifier[legend] ( identifier[items] , identifier[labels] , identifier[bbox_to_anchor] =( literal[int] , literal[int] ), identifier[bbox_transform] = identifier[pl] . identifier[gcf] (). identifier[transFigure] , identifier[loc] = literal[string] , identifier[numpoints] = literal[int] , identifier[frameon] = keyword[False] ) keyword[if] identifier[legend_title] keyword[is] keyword[not] keyword[None] : identifier[l] . identifier[set_title] ( identifier[legend_title] ) keyword[if] identifier[row_name] == literal[string] : identifier[row_name] = identifier[auto_row_name] keyword[if] identifier[col_name] == literal[string] : identifier[col_name] = identifier[auto_col_name] keyword[if] identifier[row_name] keyword[is] keyword[not] keyword[None] : identifier[pl] . identifier[xlabel] ( identifier[col_name] , identifier[labelpad] = identifier[col_name_pad] ) keyword[if] identifier[col_name] keyword[is] keyword[not] keyword[None] : identifier[pl] . identifier[ylabel] ( identifier[row_name] , identifier[labelpad] = identifier[row_name_pad] ) identifier[ax_label] = identifier[ax_subplots] [ literal[int] ,- literal[int] ] identifier[pl] . identifier[sca] ( identifier[ax_label] ) keyword[if] identifier[subplot_xlabel] : identifier[xticks] = identifier[numpy] . identifier[array] ( identifier[pl] . identifier[xticks] ()[ literal[int] ], identifier[dtype] = identifier[object] ) identifier[xticks] [ literal[int] :: literal[int] ]= literal[string] identifier[ax_label] . identifier[set_xticklabels] ( identifier[xticks] , identifier[rotation] = literal[int] , identifier[size] = literal[string] ) keyword[if] identifier[subplot_ylabel] : identifier[yticks] = identifier[numpy] . identifier[array] ( identifier[pl] . identifier[yticks] ()[ literal[int] ], identifier[dtype] = identifier[object] ) identifier[yticks] [ literal[int] :: literal[int] ]= literal[string] identifier[ax_label] . identifier[set_yticklabels] ( identifier[yticks] , identifier[rotation] = literal[int] , identifier[size] = literal[string] ) identifier[pl] . identifier[sca] ( identifier[ax_main] ) keyword[return] identifier[ax_main] , identifier[ax_subplots]
def plot_ndpanel(panel, func=None, xlim='auto', ylim='auto', row_labels='auto', col_labels='auto', row_name='auto', col_name='auto', pass_slicing_meta_to_func=False, subplot_xlabel=None, subplot_ylabel=None, row_name_pad=40.0, col_name_pad=40.0, hspace=0, wspace=0, hide_tick_labels=True, hide_tick_lines=False, legend=None, legend_title=None, grid_kwargs={}, **kwargs): """Use to visualize mutli-dimensional data stored in N-dimensional pandas panels. Given an nd-panel of shape (.., .., .., rows, cols), the function creates a 2d grid of subplot of shape (rows, cols). subplot i, j calls func parameter with an (n-2) nd panel that corresponds to (..., .., .., i, j). Parameters --------------- panel : pandas Panel (3d-5d.. indexing is hard coded at the moment) items : time series generated along this axis major : rows minor : cols func : function that accepts a slice of a panel (two dimensions less than input panel) {_graph_grid_layout} pass_slicing_meta_to_func : [False | True] Changes the arguments that are passed to the provided function. If False: func(data_slice, **kwargs) (Default) If True: func(data_slice, row=row, col=col, row_value=row_value, col_value=col_value, **kwargs) grid_kwargs : dict kwargs to be passed to the create_grid_layout method. See its documentation for further details. legend : None, tuple If provided as tuple, must be a 2-d tuple corresponding to a subplot position. If legend=(2, 4), then the legend will drawn using the labels of the lines provided in subplot in 2nd row and 4th column. A better name could be subplot_source_for_legend? legend_title : str, None If provided, used as title for the legend. Returns --------------- Reference to main axis and to subplot axes. Examples ---------------- if a is a panel: plot_panel(a, func=plot, marker='o'); Code that could be useful --------------------------- # Checks number of arguments function accepts if func.func_code.co_argcount == 1: func(data) else: func(data, ax) """ (auto_col_name, auto_col_labels, auto_row_name, auto_row_labels) = extract_annotation(panel) shape = panel.values.shape (rowNum, colNum) = (shape[-2], shape[-1]) # Last two are used for setting up the size ndim = len(shape) if ndim < 2 or ndim > 5: raise Exception('Only dimensions between 2 and 5 are supported') # depends on [control=['if'], data=[]] if row_labels == 'auto': row_labels = auto_row_labels # depends on [control=['if'], data=['row_labels']] if col_labels == 'auto': col_labels = auto_col_labels # depends on [control=['if'], data=['col_labels']] # Figure out xlimits and y limits axis = '' # used below to autoscale subplots if xlim == 'auto': xlim = None axis += 'x' # depends on [control=['if'], data=['xlim']] if ylim == 'auto': ylim = None axis += 'y' # depends on [control=['if'], data=['ylim']] (ax_main, ax_subplots) = create_grid_layout(rowNum=rowNum, colNum=colNum, row_labels=row_labels, col_labels=col_labels, xlabel=subplot_xlabel, ylabel=subplot_ylabel, hide_tick_labels=hide_tick_labels, hide_tick_lines=hide_tick_lines, xlim=xlim, ylim=ylim, hspace=hspace, wspace=wspace, **grid_kwargs) nrange = arange(ndim) nrange = list(nrange[(nrange - 2) % ndim]) # Moves the last two dimensions to the first two if not isinstance(panel, pandas.DataFrame): panel = panel.transpose(*nrange) # depends on [control=['if'], data=[]] for ((row, col), ax) in numpy.ndenumerate(ax_subplots): plt.sca(ax) data_slice = panel.iloc[row].iloc[col] row_value = panel.axes[0][row] col_value = panel.axes[1][col] if pass_slicing_meta_to_func: func(data_slice, row=row, col=col, row_value=row_value, col_value=col_value, **kwargs) # depends on [control=['if'], data=[]] else: func(data_slice, **kwargs) # depends on [control=['for'], data=[]] autoscale_subplots(ax_subplots, axis) plt.sca(ax_main) if legend is not None: (items, labels) = ax_subplots[legend].get_legend_handles_labels() # lines = ax_subplots[legend].lines # l = pl.legend(lines , map(lambda x : x.get_label(), lines), l = pl.legend(items, labels, bbox_to_anchor=(0.9, 0.5), bbox_transform=pl.gcf().transFigure, loc='center left', numpoints=1, frameon=False) if legend_title is not None: l.set_title(legend_title) # depends on [control=['if'], data=['legend_title']] # depends on [control=['if'], data=['legend']] if row_name == 'auto': row_name = auto_row_name # depends on [control=['if'], data=['row_name']] if col_name == 'auto': col_name = auto_col_name # depends on [control=['if'], data=['col_name']] if row_name is not None: pl.xlabel(col_name, labelpad=col_name_pad) # depends on [control=['if'], data=[]] if col_name is not None: pl.ylabel(row_name, labelpad=row_name_pad) # depends on [control=['if'], data=[]] ##### # Placing ticks on the top left subplot ax_label = ax_subplots[0, -1] pl.sca(ax_label) if subplot_xlabel: xticks = numpy.array(pl.xticks()[0], dtype=object) xticks[1::2] = '' ax_label.set_xticklabels(xticks, rotation=90, size='small') # depends on [control=['if'], data=[]] if subplot_ylabel: yticks = numpy.array(pl.yticks()[0], dtype=object) yticks[1::2] = '' ax_label.set_yticklabels(yticks, rotation=0, size='small') # depends on [control=['if'], data=[]] pl.sca(ax_main) return (ax_main, ax_subplots)
def invert_features(features): """Make a dict in the form source column : set of transforms. Note that the key transform is removed. """ inverted_features = collections.defaultdict(list) for transform in six.itervalues(features): source_column = transform['source_column'] inverted_features[source_column].append(transform) return dict(inverted_features)
def function[invert_features, parameter[features]]: constant[Make a dict in the form source column : set of transforms. Note that the key transform is removed. ] variable[inverted_features] assign[=] call[name[collections].defaultdict, parameter[name[list]]] for taget[name[transform]] in starred[call[name[six].itervalues, parameter[name[features]]]] begin[:] variable[source_column] assign[=] call[name[transform]][constant[source_column]] call[call[name[inverted_features]][name[source_column]].append, parameter[name[transform]]] return[call[name[dict], parameter[name[inverted_features]]]]
keyword[def] identifier[invert_features] ( identifier[features] ): literal[string] identifier[inverted_features] = identifier[collections] . identifier[defaultdict] ( identifier[list] ) keyword[for] identifier[transform] keyword[in] identifier[six] . identifier[itervalues] ( identifier[features] ): identifier[source_column] = identifier[transform] [ literal[string] ] identifier[inverted_features] [ identifier[source_column] ]. identifier[append] ( identifier[transform] ) keyword[return] identifier[dict] ( identifier[inverted_features] )
def invert_features(features): """Make a dict in the form source column : set of transforms. Note that the key transform is removed. """ inverted_features = collections.defaultdict(list) for transform in six.itervalues(features): source_column = transform['source_column'] inverted_features[source_column].append(transform) # depends on [control=['for'], data=['transform']] return dict(inverted_features)
def _find_neighbors(self, inst, avg_dist): """ Identify nearest as well as farthest hits and misses within radius defined by average distance over whole distance array. This works the same regardless of endpoint type. """ NN_near = [] NN_far = [] min_indices = [] max_indices = [] for i in range(self._datalen): if inst != i: locator = [inst, i] if i > inst: locator.reverse() d = self._distance_array[locator[0]][locator[1]] if d < avg_dist: min_indices.append(i) if d > avg_dist: max_indices.append(i) for i in range(len(min_indices)): NN_near.append(min_indices[i]) for i in range(len(max_indices)): NN_far.append(max_indices[i]) return np.array(NN_near, dtype=np.int32), np.array(NN_far, dtype=np.int32)
def function[_find_neighbors, parameter[self, inst, avg_dist]]: constant[ Identify nearest as well as farthest hits and misses within radius defined by average distance over whole distance array. This works the same regardless of endpoint type. ] variable[NN_near] assign[=] list[[]] variable[NN_far] assign[=] list[[]] variable[min_indices] assign[=] list[[]] variable[max_indices] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[name[self]._datalen]]] begin[:] if compare[name[inst] not_equal[!=] name[i]] begin[:] variable[locator] assign[=] list[[<ast.Name object at 0x7da20e961b70>, <ast.Name object at 0x7da20e9625c0>]] if compare[name[i] greater[>] name[inst]] begin[:] call[name[locator].reverse, parameter[]] variable[d] assign[=] call[call[name[self]._distance_array][call[name[locator]][constant[0]]]][call[name[locator]][constant[1]]] if compare[name[d] less[<] name[avg_dist]] begin[:] call[name[min_indices].append, parameter[name[i]]] if compare[name[d] greater[>] name[avg_dist]] begin[:] call[name[max_indices].append, parameter[name[i]]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[min_indices]]]]]] begin[:] call[name[NN_near].append, parameter[call[name[min_indices]][name[i]]]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[max_indices]]]]]] begin[:] call[name[NN_far].append, parameter[call[name[max_indices]][name[i]]]] return[tuple[[<ast.Call object at 0x7da1b0c8bc40>, <ast.Call object at 0x7da1b0c8b790>]]]
keyword[def] identifier[_find_neighbors] ( identifier[self] , identifier[inst] , identifier[avg_dist] ): literal[string] identifier[NN_near] =[] identifier[NN_far] =[] identifier[min_indices] =[] identifier[max_indices] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[_datalen] ): keyword[if] identifier[inst] != identifier[i] : identifier[locator] =[ identifier[inst] , identifier[i] ] keyword[if] identifier[i] > identifier[inst] : identifier[locator] . identifier[reverse] () identifier[d] = identifier[self] . identifier[_distance_array] [ identifier[locator] [ literal[int] ]][ identifier[locator] [ literal[int] ]] keyword[if] identifier[d] < identifier[avg_dist] : identifier[min_indices] . identifier[append] ( identifier[i] ) keyword[if] identifier[d] > identifier[avg_dist] : identifier[max_indices] . identifier[append] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[min_indices] )): identifier[NN_near] . identifier[append] ( identifier[min_indices] [ identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[max_indices] )): identifier[NN_far] . identifier[append] ( identifier[max_indices] [ identifier[i] ]) keyword[return] identifier[np] . identifier[array] ( identifier[NN_near] , identifier[dtype] = identifier[np] . identifier[int32] ), identifier[np] . identifier[array] ( identifier[NN_far] , identifier[dtype] = identifier[np] . identifier[int32] )
def _find_neighbors(self, inst, avg_dist): """ Identify nearest as well as farthest hits and misses within radius defined by average distance over whole distance array. This works the same regardless of endpoint type. """ NN_near = [] NN_far = [] min_indices = [] max_indices = [] for i in range(self._datalen): if inst != i: locator = [inst, i] if i > inst: locator.reverse() # depends on [control=['if'], data=[]] d = self._distance_array[locator[0]][locator[1]] if d < avg_dist: min_indices.append(i) # depends on [control=['if'], data=[]] if d > avg_dist: max_indices.append(i) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['inst', 'i']] # depends on [control=['for'], data=['i']] for i in range(len(min_indices)): NN_near.append(min_indices[i]) # depends on [control=['for'], data=['i']] for i in range(len(max_indices)): NN_far.append(max_indices[i]) # depends on [control=['for'], data=['i']] return (np.array(NN_near, dtype=np.int32), np.array(NN_far, dtype=np.int32))
def bake(self, start_response): """Bakes the response and returns the content. Args: start_response (:obj:`callable`): Callback method that accepts status code and a list of tuples (pairs) containing headers' key and value respectively. """ if isinstance(self._content, six.text_type): self._content = self._content.encode('utf8') if self._content_length is None: self._content_length = len(self._content) self._headers[HttpResponseHeaders.CONTENT_LENGTH] = \ str(self._content_length) headers = list(self._headers.items()) cookies = [(HttpResponseHeaders.SET_COOKIE, v.OutputString()) for _, v in self._cookies.items()] if len(cookies): headers = list(headers) + cookies start_response(self._status_code, headers) if isinstance(self._content, six.binary_type): return [self._content] return self._content
def function[bake, parameter[self, start_response]]: constant[Bakes the response and returns the content. Args: start_response (:obj:`callable`): Callback method that accepts status code and a list of tuples (pairs) containing headers' key and value respectively. ] if call[name[isinstance], parameter[name[self]._content, name[six].text_type]] begin[:] name[self]._content assign[=] call[name[self]._content.encode, parameter[constant[utf8]]] if compare[name[self]._content_length is constant[None]] begin[:] name[self]._content_length assign[=] call[name[len], parameter[name[self]._content]] call[name[self]._headers][name[HttpResponseHeaders].CONTENT_LENGTH] assign[=] call[name[str], parameter[name[self]._content_length]] variable[headers] assign[=] call[name[list], parameter[call[name[self]._headers.items, parameter[]]]] variable[cookies] assign[=] <ast.ListComp object at 0x7da20e9b3d60> if call[name[len], parameter[name[cookies]]] begin[:] variable[headers] assign[=] binary_operation[call[name[list], parameter[name[headers]]] + name[cookies]] call[name[start_response], parameter[name[self]._status_code, name[headers]]] if call[name[isinstance], parameter[name[self]._content, name[six].binary_type]] begin[:] return[list[[<ast.Attribute object at 0x7da18eb563b0>]]] return[name[self]._content]
keyword[def] identifier[bake] ( identifier[self] , identifier[start_response] ): literal[string] keyword[if] identifier[isinstance] ( identifier[self] . identifier[_content] , identifier[six] . identifier[text_type] ): identifier[self] . identifier[_content] = identifier[self] . identifier[_content] . identifier[encode] ( literal[string] ) keyword[if] identifier[self] . identifier[_content_length] keyword[is] keyword[None] : identifier[self] . identifier[_content_length] = identifier[len] ( identifier[self] . identifier[_content] ) identifier[self] . identifier[_headers] [ identifier[HttpResponseHeaders] . identifier[CONTENT_LENGTH] ]= identifier[str] ( identifier[self] . identifier[_content_length] ) identifier[headers] = identifier[list] ( identifier[self] . identifier[_headers] . identifier[items] ()) identifier[cookies] =[( identifier[HttpResponseHeaders] . identifier[SET_COOKIE] , identifier[v] . identifier[OutputString] ()) keyword[for] identifier[_] , identifier[v] keyword[in] identifier[self] . identifier[_cookies] . identifier[items] ()] keyword[if] identifier[len] ( identifier[cookies] ): identifier[headers] = identifier[list] ( identifier[headers] )+ identifier[cookies] identifier[start_response] ( identifier[self] . identifier[_status_code] , identifier[headers] ) keyword[if] identifier[isinstance] ( identifier[self] . identifier[_content] , identifier[six] . identifier[binary_type] ): keyword[return] [ identifier[self] . identifier[_content] ] keyword[return] identifier[self] . identifier[_content]
def bake(self, start_response): """Bakes the response and returns the content. Args: start_response (:obj:`callable`): Callback method that accepts status code and a list of tuples (pairs) containing headers' key and value respectively. """ if isinstance(self._content, six.text_type): self._content = self._content.encode('utf8') # depends on [control=['if'], data=[]] if self._content_length is None: self._content_length = len(self._content) # depends on [control=['if'], data=[]] self._headers[HttpResponseHeaders.CONTENT_LENGTH] = str(self._content_length) headers = list(self._headers.items()) cookies = [(HttpResponseHeaders.SET_COOKIE, v.OutputString()) for (_, v) in self._cookies.items()] if len(cookies): headers = list(headers) + cookies # depends on [control=['if'], data=[]] start_response(self._status_code, headers) if isinstance(self._content, six.binary_type): return [self._content] # depends on [control=['if'], data=[]] return self._content
def verify_token(token, public_key_or_address, signing_algorithm="ES256K"): """ A function for validating an individual token. """ decoded_token = decode_token(token) decoded_token_payload = decoded_token["payload"] if "subject" not in decoded_token_payload: raise ValueError("Token doesn't have a subject") if "publicKey" not in decoded_token_payload["subject"]: raise ValueError("Token doesn't have a subject public key") if "issuer" not in decoded_token_payload: raise ValueError("Token doesn't have an issuer") if "publicKey" not in decoded_token_payload["issuer"]: raise ValueError("Token doesn't have an issuer public key") if "claim" not in decoded_token_payload: raise ValueError("Token doesn't have a claim") issuer_public_key = str(decoded_token_payload["issuer"]["publicKey"]) public_key_object = ECPublicKey(issuer_public_key) compressed_public_key = compress(issuer_public_key) decompressed_public_key = decompress(issuer_public_key) if public_key_object._type == PubkeyType.compressed: compressed_address = public_key_object.address() uncompressed_address = bin_hash160_to_address( bin_hash160( decompress(public_key_object.to_bin()) ) ) elif public_key_object._type == PubkeyType.uncompressed: compressed_address = bin_hash160_to_address( bin_hash160( compress(public_key_object.to_bin()) ) ) uncompressed_address = public_key_object.address() else: raise ValueError("Invalid issuer public key format") if public_key_or_address == compressed_public_key: pass elif public_key_or_address == decompressed_public_key: pass elif public_key_or_address == compressed_address: pass elif public_key_or_address == uncompressed_address: pass else: raise ValueError("Token public key doesn't match the verifying value") token_verifier = TokenVerifier() if not token_verifier.verify(token, public_key_object.to_pem()): raise ValueError("Token was not signed by the issuer public key") return decoded_token
def function[verify_token, parameter[token, public_key_or_address, signing_algorithm]]: constant[ A function for validating an individual token. ] variable[decoded_token] assign[=] call[name[decode_token], parameter[name[token]]] variable[decoded_token_payload] assign[=] call[name[decoded_token]][constant[payload]] if compare[constant[subject] <ast.NotIn object at 0x7da2590d7190> name[decoded_token_payload]] begin[:] <ast.Raise object at 0x7da2044c3190> if compare[constant[publicKey] <ast.NotIn object at 0x7da2590d7190> call[name[decoded_token_payload]][constant[subject]]] begin[:] <ast.Raise object at 0x7da2044c2800> if compare[constant[issuer] <ast.NotIn object at 0x7da2590d7190> name[decoded_token_payload]] begin[:] <ast.Raise object at 0x7da2044c1120> if compare[constant[publicKey] <ast.NotIn object at 0x7da2590d7190> call[name[decoded_token_payload]][constant[issuer]]] begin[:] <ast.Raise object at 0x7da2044c3010> if compare[constant[claim] <ast.NotIn object at 0x7da2590d7190> name[decoded_token_payload]] begin[:] <ast.Raise object at 0x7da2044c18a0> variable[issuer_public_key] assign[=] call[name[str], parameter[call[call[name[decoded_token_payload]][constant[issuer]]][constant[publicKey]]]] variable[public_key_object] assign[=] call[name[ECPublicKey], parameter[name[issuer_public_key]]] variable[compressed_public_key] assign[=] call[name[compress], parameter[name[issuer_public_key]]] variable[decompressed_public_key] assign[=] call[name[decompress], parameter[name[issuer_public_key]]] if compare[name[public_key_object]._type equal[==] name[PubkeyType].compressed] begin[:] variable[compressed_address] assign[=] call[name[public_key_object].address, parameter[]] variable[uncompressed_address] assign[=] call[name[bin_hash160_to_address], parameter[call[name[bin_hash160], parameter[call[name[decompress], parameter[call[name[public_key_object].to_bin, parameter[]]]]]]]] if compare[name[public_key_or_address] equal[==] name[compressed_public_key]] begin[:] pass variable[token_verifier] assign[=] call[name[TokenVerifier], parameter[]] if <ast.UnaryOp object at 0x7da20c6a8610> begin[:] <ast.Raise object at 0x7da20c6a99c0> return[name[decoded_token]]
keyword[def] identifier[verify_token] ( identifier[token] , identifier[public_key_or_address] , identifier[signing_algorithm] = literal[string] ): literal[string] identifier[decoded_token] = identifier[decode_token] ( identifier[token] ) identifier[decoded_token_payload] = identifier[decoded_token] [ literal[string] ] keyword[if] literal[string] keyword[not] keyword[in] identifier[decoded_token_payload] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[decoded_token_payload] [ literal[string] ]: keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[decoded_token_payload] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[decoded_token_payload] [ literal[string] ]: keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[decoded_token_payload] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[issuer_public_key] = identifier[str] ( identifier[decoded_token_payload] [ literal[string] ][ literal[string] ]) identifier[public_key_object] = identifier[ECPublicKey] ( identifier[issuer_public_key] ) identifier[compressed_public_key] = identifier[compress] ( identifier[issuer_public_key] ) identifier[decompressed_public_key] = identifier[decompress] ( identifier[issuer_public_key] ) keyword[if] identifier[public_key_object] . identifier[_type] == identifier[PubkeyType] . identifier[compressed] : identifier[compressed_address] = identifier[public_key_object] . identifier[address] () identifier[uncompressed_address] = identifier[bin_hash160_to_address] ( identifier[bin_hash160] ( identifier[decompress] ( identifier[public_key_object] . identifier[to_bin] ()) ) ) keyword[elif] identifier[public_key_object] . identifier[_type] == identifier[PubkeyType] . identifier[uncompressed] : identifier[compressed_address] = identifier[bin_hash160_to_address] ( identifier[bin_hash160] ( identifier[compress] ( identifier[public_key_object] . identifier[to_bin] ()) ) ) identifier[uncompressed_address] = identifier[public_key_object] . identifier[address] () keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[public_key_or_address] == identifier[compressed_public_key] : keyword[pass] keyword[elif] identifier[public_key_or_address] == identifier[decompressed_public_key] : keyword[pass] keyword[elif] identifier[public_key_or_address] == identifier[compressed_address] : keyword[pass] keyword[elif] identifier[public_key_or_address] == identifier[uncompressed_address] : keyword[pass] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[token_verifier] = identifier[TokenVerifier] () keyword[if] keyword[not] identifier[token_verifier] . identifier[verify] ( identifier[token] , identifier[public_key_object] . identifier[to_pem] ()): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[decoded_token]
def verify_token(token, public_key_or_address, signing_algorithm='ES256K'): """ A function for validating an individual token. """ decoded_token = decode_token(token) decoded_token_payload = decoded_token['payload'] if 'subject' not in decoded_token_payload: raise ValueError("Token doesn't have a subject") # depends on [control=['if'], data=[]] if 'publicKey' not in decoded_token_payload['subject']: raise ValueError("Token doesn't have a subject public key") # depends on [control=['if'], data=[]] if 'issuer' not in decoded_token_payload: raise ValueError("Token doesn't have an issuer") # depends on [control=['if'], data=[]] if 'publicKey' not in decoded_token_payload['issuer']: raise ValueError("Token doesn't have an issuer public key") # depends on [control=['if'], data=[]] if 'claim' not in decoded_token_payload: raise ValueError("Token doesn't have a claim") # depends on [control=['if'], data=[]] issuer_public_key = str(decoded_token_payload['issuer']['publicKey']) public_key_object = ECPublicKey(issuer_public_key) compressed_public_key = compress(issuer_public_key) decompressed_public_key = decompress(issuer_public_key) if public_key_object._type == PubkeyType.compressed: compressed_address = public_key_object.address() uncompressed_address = bin_hash160_to_address(bin_hash160(decompress(public_key_object.to_bin()))) # depends on [control=['if'], data=[]] elif public_key_object._type == PubkeyType.uncompressed: compressed_address = bin_hash160_to_address(bin_hash160(compress(public_key_object.to_bin()))) uncompressed_address = public_key_object.address() # depends on [control=['if'], data=[]] else: raise ValueError('Invalid issuer public key format') if public_key_or_address == compressed_public_key: pass # depends on [control=['if'], data=[]] elif public_key_or_address == decompressed_public_key: pass # depends on [control=['if'], data=[]] elif public_key_or_address == compressed_address: pass # depends on [control=['if'], data=[]] elif public_key_or_address == uncompressed_address: pass # depends on [control=['if'], data=[]] else: raise ValueError("Token public key doesn't match the verifying value") token_verifier = TokenVerifier() if not token_verifier.verify(token, public_key_object.to_pem()): raise ValueError('Token was not signed by the issuer public key') # depends on [control=['if'], data=[]] return decoded_token
def upgrade(refresh=True, dist_upgrade=False, **kwargs): ''' .. versionchanged:: 2015.8.12,2016.3.3,2016.11.0 On minions running systemd>=205, `systemd-run(1)`_ is now used to isolate commands which modify installed packages from the ``salt-minion`` daemon's control group. This is done to keep systemd from killing any apt-get/dpkg commands spawned by Salt when the ``salt-minion`` service is restarted. (see ``KillMode`` in the `systemd.kill(5)`_ manpage for more information). If desired, usage of `systemd-run(1)`_ can be suppressed by setting a :mod:`config option <salt.modules.config.get>` called ``systemd.scope``, with a value of ``False`` (no quotes). .. _`systemd-run(1)`: https://www.freedesktop.org/software/systemd/man/systemd-run.html .. _`systemd.kill(5)`: https://www.freedesktop.org/software/systemd/man/systemd.kill.html Upgrades all packages via ``apt-get upgrade`` or ``apt-get dist-upgrade`` if ``dist_upgrade`` is ``True``. Returns a dictionary containing the changes: .. code-block:: python {'<package>': {'old': '<old-version>', 'new': '<new-version>'}} dist_upgrade Whether to perform the upgrade using dist-upgrade vs upgrade. Default is to use upgrade. .. versionadded:: 2014.7.0 cache_valid_time .. versionadded:: 2016.11.0 Skip refreshing the package database if refresh has already occurred within <value> seconds download_only Only download the packages, don't unpack or install them .. versionadded:: 2018.3.0 force_conf_new Always install the new version of any configuration files. .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' pkg.upgrade ''' cache_valid_time = kwargs.pop('cache_valid_time', 0) if salt.utils.data.is_true(refresh): refresh_db(cache_valid_time) old = list_pkgs() if 'force_conf_new' in kwargs and kwargs['force_conf_new']: force_conf = '--force-confnew' else: force_conf = '--force-confold' cmd = ['apt-get', '-q', '-y', '-o', 'DPkg::Options::={0}'.format(force_conf), '-o', 'DPkg::Options::=--force-confdef'] if kwargs.get('force_yes', False): cmd.append('--force-yes') if kwargs.get('skip_verify', False): cmd.append('--allow-unauthenticated') if kwargs.get('download_only', False): cmd.append('--download-only') cmd.append('dist-upgrade' if dist_upgrade else 'upgrade') result = _call_apt(cmd, env=DPKG_ENV_VARS.copy()) __context__.pop('pkg.list_pkgs', None) new = list_pkgs() ret = salt.utils.data.compare_dicts(old, new) if result['retcode'] != 0: raise CommandExecutionError( 'Problem encountered upgrading packages', info={'changes': ret, 'result': result} ) return ret
def function[upgrade, parameter[refresh, dist_upgrade]]: constant[ .. versionchanged:: 2015.8.12,2016.3.3,2016.11.0 On minions running systemd>=205, `systemd-run(1)`_ is now used to isolate commands which modify installed packages from the ``salt-minion`` daemon's control group. This is done to keep systemd from killing any apt-get/dpkg commands spawned by Salt when the ``salt-minion`` service is restarted. (see ``KillMode`` in the `systemd.kill(5)`_ manpage for more information). If desired, usage of `systemd-run(1)`_ can be suppressed by setting a :mod:`config option <salt.modules.config.get>` called ``systemd.scope``, with a value of ``False`` (no quotes). .. _`systemd-run(1)`: https://www.freedesktop.org/software/systemd/man/systemd-run.html .. _`systemd.kill(5)`: https://www.freedesktop.org/software/systemd/man/systemd.kill.html Upgrades all packages via ``apt-get upgrade`` or ``apt-get dist-upgrade`` if ``dist_upgrade`` is ``True``. Returns a dictionary containing the changes: .. code-block:: python {'<package>': {'old': '<old-version>', 'new': '<new-version>'}} dist_upgrade Whether to perform the upgrade using dist-upgrade vs upgrade. Default is to use upgrade. .. versionadded:: 2014.7.0 cache_valid_time .. versionadded:: 2016.11.0 Skip refreshing the package database if refresh has already occurred within <value> seconds download_only Only download the packages, don't unpack or install them .. versionadded:: 2018.3.0 force_conf_new Always install the new version of any configuration files. .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' pkg.upgrade ] variable[cache_valid_time] assign[=] call[name[kwargs].pop, parameter[constant[cache_valid_time], constant[0]]] if call[name[salt].utils.data.is_true, parameter[name[refresh]]] begin[:] call[name[refresh_db], parameter[name[cache_valid_time]]] variable[old] assign[=] call[name[list_pkgs], parameter[]] if <ast.BoolOp object at 0x7da1b2186e00> begin[:] variable[force_conf] assign[=] constant[--force-confnew] variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b21863e0>, <ast.Constant object at 0x7da1b21863b0>, <ast.Constant object at 0x7da1b2186380>, <ast.Constant object at 0x7da1b2186350>, <ast.Call object at 0x7da1b2186320>, <ast.Constant object at 0x7da1b2187e50>, <ast.Constant object at 0x7da1b2187e20>]] if call[name[kwargs].get, parameter[constant[force_yes], constant[False]]] begin[:] call[name[cmd].append, parameter[constant[--force-yes]]] if call[name[kwargs].get, parameter[constant[skip_verify], constant[False]]] begin[:] call[name[cmd].append, parameter[constant[--allow-unauthenticated]]] if call[name[kwargs].get, parameter[constant[download_only], constant[False]]] begin[:] call[name[cmd].append, parameter[constant[--download-only]]] call[name[cmd].append, parameter[<ast.IfExp object at 0x7da20c6ab430>]] variable[result] assign[=] call[name[_call_apt], parameter[name[cmd]]] call[name[__context__].pop, parameter[constant[pkg.list_pkgs], constant[None]]] variable[new] assign[=] call[name[list_pkgs], parameter[]] variable[ret] assign[=] call[name[salt].utils.data.compare_dicts, parameter[name[old], name[new]]] if compare[call[name[result]][constant[retcode]] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da20c6a8040> return[name[ret]]
keyword[def] identifier[upgrade] ( identifier[refresh] = keyword[True] , identifier[dist_upgrade] = keyword[False] ,** identifier[kwargs] ): literal[string] identifier[cache_valid_time] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[int] ) keyword[if] identifier[salt] . identifier[utils] . identifier[data] . identifier[is_true] ( identifier[refresh] ): identifier[refresh_db] ( identifier[cache_valid_time] ) identifier[old] = identifier[list_pkgs] () keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[and] identifier[kwargs] [ literal[string] ]: identifier[force_conf] = literal[string] keyword[else] : identifier[force_conf] = literal[string] identifier[cmd] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] . identifier[format] ( identifier[force_conf] ), literal[string] , literal[string] ] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ): identifier[cmd] . identifier[append] ( literal[string] ) keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ): identifier[cmd] . identifier[append] ( literal[string] ) keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ): identifier[cmd] . identifier[append] ( literal[string] ) identifier[cmd] . identifier[append] ( literal[string] keyword[if] identifier[dist_upgrade] keyword[else] literal[string] ) identifier[result] = identifier[_call_apt] ( identifier[cmd] , identifier[env] = identifier[DPKG_ENV_VARS] . identifier[copy] ()) identifier[__context__] . identifier[pop] ( literal[string] , keyword[None] ) identifier[new] = identifier[list_pkgs] () identifier[ret] = identifier[salt] . identifier[utils] . identifier[data] . identifier[compare_dicts] ( identifier[old] , identifier[new] ) keyword[if] identifier[result] [ literal[string] ]!= literal[int] : keyword[raise] identifier[CommandExecutionError] ( literal[string] , identifier[info] ={ literal[string] : identifier[ret] , literal[string] : identifier[result] } ) keyword[return] identifier[ret]
def upgrade(refresh=True, dist_upgrade=False, **kwargs): """ .. versionchanged:: 2015.8.12,2016.3.3,2016.11.0 On minions running systemd>=205, `systemd-run(1)`_ is now used to isolate commands which modify installed packages from the ``salt-minion`` daemon's control group. This is done to keep systemd from killing any apt-get/dpkg commands spawned by Salt when the ``salt-minion`` service is restarted. (see ``KillMode`` in the `systemd.kill(5)`_ manpage for more information). If desired, usage of `systemd-run(1)`_ can be suppressed by setting a :mod:`config option <salt.modules.config.get>` called ``systemd.scope``, with a value of ``False`` (no quotes). .. _`systemd-run(1)`: https://www.freedesktop.org/software/systemd/man/systemd-run.html .. _`systemd.kill(5)`: https://www.freedesktop.org/software/systemd/man/systemd.kill.html Upgrades all packages via ``apt-get upgrade`` or ``apt-get dist-upgrade`` if ``dist_upgrade`` is ``True``. Returns a dictionary containing the changes: .. code-block:: python {'<package>': {'old': '<old-version>', 'new': '<new-version>'}} dist_upgrade Whether to perform the upgrade using dist-upgrade vs upgrade. Default is to use upgrade. .. versionadded:: 2014.7.0 cache_valid_time .. versionadded:: 2016.11.0 Skip refreshing the package database if refresh has already occurred within <value> seconds download_only Only download the packages, don't unpack or install them .. versionadded:: 2018.3.0 force_conf_new Always install the new version of any configuration files. .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' pkg.upgrade """ cache_valid_time = kwargs.pop('cache_valid_time', 0) if salt.utils.data.is_true(refresh): refresh_db(cache_valid_time) # depends on [control=['if'], data=[]] old = list_pkgs() if 'force_conf_new' in kwargs and kwargs['force_conf_new']: force_conf = '--force-confnew' # depends on [control=['if'], data=[]] else: force_conf = '--force-confold' cmd = ['apt-get', '-q', '-y', '-o', 'DPkg::Options::={0}'.format(force_conf), '-o', 'DPkg::Options::=--force-confdef'] if kwargs.get('force_yes', False): cmd.append('--force-yes') # depends on [control=['if'], data=[]] if kwargs.get('skip_verify', False): cmd.append('--allow-unauthenticated') # depends on [control=['if'], data=[]] if kwargs.get('download_only', False): cmd.append('--download-only') # depends on [control=['if'], data=[]] cmd.append('dist-upgrade' if dist_upgrade else 'upgrade') result = _call_apt(cmd, env=DPKG_ENV_VARS.copy()) __context__.pop('pkg.list_pkgs', None) new = list_pkgs() ret = salt.utils.data.compare_dicts(old, new) if result['retcode'] != 0: raise CommandExecutionError('Problem encountered upgrading packages', info={'changes': ret, 'result': result}) # depends on [control=['if'], data=[]] return ret
def _get_record(self): """Fetch a given record. Handles fetching a record from the database along with throwing an appropriate instance of ``AJAXError`. """ if not self.pk: raise AJAXError(400, _('Invalid request for record.')) try: return self.model.objects.get(pk=self.pk) except self.model.DoesNotExist: raise AJAXError(404, _('%s with id of "%s" not found.') % ( self.model.__name__, self.pk))
def function[_get_record, parameter[self]]: constant[Fetch a given record. Handles fetching a record from the database along with throwing an appropriate instance of ``AJAXError`. ] if <ast.UnaryOp object at 0x7da1b1150a90> begin[:] <ast.Raise object at 0x7da1b1152bf0> <ast.Try object at 0x7da1b1152b00>
keyword[def] identifier[_get_record] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[pk] : keyword[raise] identifier[AJAXError] ( literal[int] , identifier[_] ( literal[string] )) keyword[try] : keyword[return] identifier[self] . identifier[model] . identifier[objects] . identifier[get] ( identifier[pk] = identifier[self] . identifier[pk] ) keyword[except] identifier[self] . identifier[model] . identifier[DoesNotExist] : keyword[raise] identifier[AJAXError] ( literal[int] , identifier[_] ( literal[string] )%( identifier[self] . identifier[model] . identifier[__name__] , identifier[self] . identifier[pk] ))
def _get_record(self): """Fetch a given record. Handles fetching a record from the database along with throwing an appropriate instance of ``AJAXError`. """ if not self.pk: raise AJAXError(400, _('Invalid request for record.')) # depends on [control=['if'], data=[]] try: return self.model.objects.get(pk=self.pk) # depends on [control=['try'], data=[]] except self.model.DoesNotExist: raise AJAXError(404, _('%s with id of "%s" not found.') % (self.model.__name__, self.pk)) # depends on [control=['except'], data=[]]
def _get_cached_style_urls(self, asset_url_path): """ Gets the URLs of the cached styles. """ try: cached_styles = os.listdir(self.cache_path) except IOError as ex: if ex.errno != errno.ENOENT and ex.errno != errno.ESRCH: raise return [] except OSError: return [] return [posixpath.join(asset_url_path, style) for style in cached_styles if style.endswith('.css')]
def function[_get_cached_style_urls, parameter[self, asset_url_path]]: constant[ Gets the URLs of the cached styles. ] <ast.Try object at 0x7da1b1dc5270> return[<ast.ListComp object at 0x7da1b1dc4fa0>]
keyword[def] identifier[_get_cached_style_urls] ( identifier[self] , identifier[asset_url_path] ): literal[string] keyword[try] : identifier[cached_styles] = identifier[os] . identifier[listdir] ( identifier[self] . identifier[cache_path] ) keyword[except] identifier[IOError] keyword[as] identifier[ex] : keyword[if] identifier[ex] . identifier[errno] != identifier[errno] . identifier[ENOENT] keyword[and] identifier[ex] . identifier[errno] != identifier[errno] . identifier[ESRCH] : keyword[raise] keyword[return] [] keyword[except] identifier[OSError] : keyword[return] [] keyword[return] [ identifier[posixpath] . identifier[join] ( identifier[asset_url_path] , identifier[style] ) keyword[for] identifier[style] keyword[in] identifier[cached_styles] keyword[if] identifier[style] . identifier[endswith] ( literal[string] )]
def _get_cached_style_urls(self, asset_url_path): """ Gets the URLs of the cached styles. """ try: cached_styles = os.listdir(self.cache_path) # depends on [control=['try'], data=[]] except IOError as ex: if ex.errno != errno.ENOENT and ex.errno != errno.ESRCH: raise # depends on [control=['if'], data=[]] return [] # depends on [control=['except'], data=['ex']] except OSError: return [] # depends on [control=['except'], data=[]] return [posixpath.join(asset_url_path, style) for style in cached_styles if style.endswith('.css')]