code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def convert_words_to_float(high_word, low_word): """Convert two words to a floating point""" number, retval = convert_words_to_uint(high_word, low_word) if not retval: return 0.0, False try: packed_float = struct.pack('>l', number) return struct.unpack('>f', packed_float)[0], True except: return 0.0, False
def function[convert_words_to_float, parameter[high_word, low_word]]: constant[Convert two words to a floating point] <ast.Tuple object at 0x7da207f019c0> assign[=] call[name[convert_words_to_uint], parameter[name[high_word], name[low_word]]] if <ast.UnaryOp object at 0x7da207f01bd0> begin[:] return[tuple[[<ast.Constant object at 0x7da207f03520>, <ast.Constant object at 0x7da207f029e0>]]] <ast.Try object at 0x7da207f03ca0>
keyword[def] identifier[convert_words_to_float] ( identifier[high_word] , identifier[low_word] ): literal[string] identifier[number] , identifier[retval] = identifier[convert_words_to_uint] ( identifier[high_word] , identifier[low_word] ) keyword[if] keyword[not] identifier[retval] : keyword[return] literal[int] , keyword[False] keyword[try] : identifier[packed_float] = identifier[struct] . identifier[pack] ( literal[string] , identifier[number] ) keyword[return] identifier[struct] . identifier[unpack] ( literal[string] , identifier[packed_float] )[ literal[int] ], keyword[True] keyword[except] : keyword[return] literal[int] , keyword[False]
def convert_words_to_float(high_word, low_word): """Convert two words to a floating point""" (number, retval) = convert_words_to_uint(high_word, low_word) if not retval: return (0.0, False) # depends on [control=['if'], data=[]] try: packed_float = struct.pack('>l', number) return (struct.unpack('>f', packed_float)[0], True) # depends on [control=['try'], data=[]] except: return (0.0, False) # depends on [control=['except'], data=[]]
def is_dir_exists(dirpath): # type: (AnyStr) -> bool """Check the existence of folder path.""" if dirpath is None or not os.path.exists(dirpath) or not os.path.isdir(dirpath): return False else: return True
def function[is_dir_exists, parameter[dirpath]]: constant[Check the existence of folder path.] if <ast.BoolOp object at 0x7da1b25869b0> begin[:] return[constant[False]]
keyword[def] identifier[is_dir_exists] ( identifier[dirpath] ): literal[string] keyword[if] identifier[dirpath] keyword[is] keyword[None] keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[dirpath] ) keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[dirpath] ): keyword[return] keyword[False] keyword[else] : keyword[return] keyword[True]
def is_dir_exists(dirpath): # type: (AnyStr) -> bool 'Check the existence of folder path.' if dirpath is None or not os.path.exists(dirpath) or (not os.path.isdir(dirpath)): return False # depends on [control=['if'], data=[]] else: return True
def unmount(self): """Unmounts the sftp system if it's currently mounted.""" if not self.mounted: return # Try to unmount properly. cmd = 'fusermount -u %s' % self.mount_point_local shell_exec(cmd) # The filesystem is probably still in use. # kill sshfs and re-run this same command (which will work then). if self.mounted: self._kill() shell_exec(cmd) self._mount_point_local_delete()
def function[unmount, parameter[self]]: constant[Unmounts the sftp system if it's currently mounted.] if <ast.UnaryOp object at 0x7da20e9b0520> begin[:] return[None] variable[cmd] assign[=] binary_operation[constant[fusermount -u %s] <ast.Mod object at 0x7da2590d6920> name[self].mount_point_local] call[name[shell_exec], parameter[name[cmd]]] if name[self].mounted begin[:] call[name[self]._kill, parameter[]] call[name[shell_exec], parameter[name[cmd]]] call[name[self]._mount_point_local_delete, parameter[]]
keyword[def] identifier[unmount] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[mounted] : keyword[return] identifier[cmd] = literal[string] % identifier[self] . identifier[mount_point_local] identifier[shell_exec] ( identifier[cmd] ) keyword[if] identifier[self] . identifier[mounted] : identifier[self] . identifier[_kill] () identifier[shell_exec] ( identifier[cmd] ) identifier[self] . identifier[_mount_point_local_delete] ()
def unmount(self): """Unmounts the sftp system if it's currently mounted.""" if not self.mounted: return # depends on [control=['if'], data=[]] # Try to unmount properly. cmd = 'fusermount -u %s' % self.mount_point_local shell_exec(cmd) # The filesystem is probably still in use. # kill sshfs and re-run this same command (which will work then). if self.mounted: self._kill() shell_exec(cmd) # depends on [control=['if'], data=[]] self._mount_point_local_delete()
def text_path(self, text): """Adds closed paths for text to the current path. The generated path if filled, achieves an effect similar to that of :meth:`show_text`. Text conversion and positioning is done similar to :meth:`show_text`. Like :meth:`show_text`, after this call the current point is moved to the origin of where the next glyph would be placed in this same progression. That is, the current point will be at the origin of the final glyph offset by its advance values. This allows for chaining multiple calls to to :meth:`text_path` without having to set current point in between. :param text: The text to show, as an Unicode or UTF-8 string. .. note:: The :meth:`text_path` method is part of what the cairo designers call the "toy" text API. It is convenient for short demos and simple programs, but it is not expected to be adequate for serious text-using applications. See :ref:`fonts` for details, and :meth:`glyph_path` for the "real" text path API in cairo. """ cairo.cairo_text_path(self._pointer, _encode_string(text)) self._check_status()
def function[text_path, parameter[self, text]]: constant[Adds closed paths for text to the current path. The generated path if filled, achieves an effect similar to that of :meth:`show_text`. Text conversion and positioning is done similar to :meth:`show_text`. Like :meth:`show_text`, after this call the current point is moved to the origin of where the next glyph would be placed in this same progression. That is, the current point will be at the origin of the final glyph offset by its advance values. This allows for chaining multiple calls to to :meth:`text_path` without having to set current point in between. :param text: The text to show, as an Unicode or UTF-8 string. .. note:: The :meth:`text_path` method is part of what the cairo designers call the "toy" text API. It is convenient for short demos and simple programs, but it is not expected to be adequate for serious text-using applications. See :ref:`fonts` for details, and :meth:`glyph_path` for the "real" text path API in cairo. ] call[name[cairo].cairo_text_path, parameter[name[self]._pointer, call[name[_encode_string], parameter[name[text]]]]] call[name[self]._check_status, parameter[]]
keyword[def] identifier[text_path] ( identifier[self] , identifier[text] ): literal[string] identifier[cairo] . identifier[cairo_text_path] ( identifier[self] . identifier[_pointer] , identifier[_encode_string] ( identifier[text] )) identifier[self] . identifier[_check_status] ()
def text_path(self, text): """Adds closed paths for text to the current path. The generated path if filled, achieves an effect similar to that of :meth:`show_text`. Text conversion and positioning is done similar to :meth:`show_text`. Like :meth:`show_text`, after this call the current point is moved to the origin of where the next glyph would be placed in this same progression. That is, the current point will be at the origin of the final glyph offset by its advance values. This allows for chaining multiple calls to to :meth:`text_path` without having to set current point in between. :param text: The text to show, as an Unicode or UTF-8 string. .. note:: The :meth:`text_path` method is part of what the cairo designers call the "toy" text API. It is convenient for short demos and simple programs, but it is not expected to be adequate for serious text-using applications. See :ref:`fonts` for details, and :meth:`glyph_path` for the "real" text path API in cairo. """ cairo.cairo_text_path(self._pointer, _encode_string(text)) self._check_status()
def create_placeholder_weld_object(data): """Helper method that creates a WeldObject that evaluates to itself. Parameters ---------- data : numpy.ndarray or WeldObject or str Data to wrap around. If str, it is a placeholder or 'str' literal. Returns ------- WeldObject WeldObject wrapped around data. """ obj_id, weld_obj = create_weld_object(data) weld_obj.weld_code = '{}'.format(str(obj_id)) return weld_obj
def function[create_placeholder_weld_object, parameter[data]]: constant[Helper method that creates a WeldObject that evaluates to itself. Parameters ---------- data : numpy.ndarray or WeldObject or str Data to wrap around. If str, it is a placeholder or 'str' literal. Returns ------- WeldObject WeldObject wrapped around data. ] <ast.Tuple object at 0x7da1b0ad9ba0> assign[=] call[name[create_weld_object], parameter[name[data]]] name[weld_obj].weld_code assign[=] call[constant[{}].format, parameter[call[name[str], parameter[name[obj_id]]]]] return[name[weld_obj]]
keyword[def] identifier[create_placeholder_weld_object] ( identifier[data] ): literal[string] identifier[obj_id] , identifier[weld_obj] = identifier[create_weld_object] ( identifier[data] ) identifier[weld_obj] . identifier[weld_code] = literal[string] . identifier[format] ( identifier[str] ( identifier[obj_id] )) keyword[return] identifier[weld_obj]
def create_placeholder_weld_object(data): """Helper method that creates a WeldObject that evaluates to itself. Parameters ---------- data : numpy.ndarray or WeldObject or str Data to wrap around. If str, it is a placeholder or 'str' literal. Returns ------- WeldObject WeldObject wrapped around data. """ (obj_id, weld_obj) = create_weld_object(data) weld_obj.weld_code = '{}'.format(str(obj_id)) return weld_obj
def do_action(self, target, dry_run=False): """ :param target: Full path and filename :param dry_run: True - don't actually perform action. False: perform action. No effect for this rule. :return: filename: Full path and filename after action completes """ if dry_run is False: try: filename = os.path.basename(target) size = os.path.getsize(target) print("{0}\t{1}".format(filename, size)) except OSError: self.logger.error("Error getting size for file: {0}".format(target)) return target
def function[do_action, parameter[self, target, dry_run]]: constant[ :param target: Full path and filename :param dry_run: True - don't actually perform action. False: perform action. No effect for this rule. :return: filename: Full path and filename after action completes ] if compare[name[dry_run] is constant[False]] begin[:] <ast.Try object at 0x7da1b162a710> return[name[target]]
keyword[def] identifier[do_action] ( identifier[self] , identifier[target] , identifier[dry_run] = keyword[False] ): literal[string] keyword[if] identifier[dry_run] keyword[is] keyword[False] : keyword[try] : identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[target] ) identifier[size] = identifier[os] . identifier[path] . identifier[getsize] ( identifier[target] ) identifier[print] ( literal[string] . identifier[format] ( identifier[filename] , identifier[size] )) keyword[except] identifier[OSError] : identifier[self] . identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[target] )) keyword[return] identifier[target]
def do_action(self, target, dry_run=False): """ :param target: Full path and filename :param dry_run: True - don't actually perform action. False: perform action. No effect for this rule. :return: filename: Full path and filename after action completes """ if dry_run is False: try: filename = os.path.basename(target) size = os.path.getsize(target) print('{0}\t{1}'.format(filename, size)) # depends on [control=['try'], data=[]] except OSError: self.logger.error('Error getting size for file: {0}'.format(target)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] return target
def __add_stack(self, span, limit=None): """ Adds a backtrace to this span """ span.stack = [] frame_count = 0 tb = traceback.extract_stack() tb.reverse() for frame in tb: if limit is not None and frame_count >= limit: break # Exclude Instana frames unless we're in dev mode if "INSTANA_DEV" not in os.environ: if re_tracer_frame.search(frame[0]) is not None: continue if re_with_stan_frame.search(frame[2]) is not None: continue span.stack.append({ "c": frame[0], "n": frame[1], "m": frame[2] }) if limit is not None: frame_count += 1
def function[__add_stack, parameter[self, span, limit]]: constant[ Adds a backtrace to this span ] name[span].stack assign[=] list[[]] variable[frame_count] assign[=] constant[0] variable[tb] assign[=] call[name[traceback].extract_stack, parameter[]] call[name[tb].reverse, parameter[]] for taget[name[frame]] in starred[name[tb]] begin[:] if <ast.BoolOp object at 0x7da1b032bf10> begin[:] break if compare[constant[INSTANA_DEV] <ast.NotIn object at 0x7da2590d7190> name[os].environ] begin[:] if compare[call[name[re_tracer_frame].search, parameter[call[name[frame]][constant[0]]]] is_not constant[None]] begin[:] continue if compare[call[name[re_with_stan_frame].search, parameter[call[name[frame]][constant[2]]]] is_not constant[None]] begin[:] continue call[name[span].stack.append, parameter[dictionary[[<ast.Constant object at 0x7da1b0301090>, <ast.Constant object at 0x7da1b0302050>, <ast.Constant object at 0x7da1b03030d0>], [<ast.Subscript object at 0x7da1b0302d40>, <ast.Subscript object at 0x7da1b03034f0>, <ast.Subscript object at 0x7da1b03035b0>]]]] if compare[name[limit] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da1b0300a00>
keyword[def] identifier[__add_stack] ( identifier[self] , identifier[span] , identifier[limit] = keyword[None] ): literal[string] identifier[span] . identifier[stack] =[] identifier[frame_count] = literal[int] identifier[tb] = identifier[traceback] . identifier[extract_stack] () identifier[tb] . identifier[reverse] () keyword[for] identifier[frame] keyword[in] identifier[tb] : keyword[if] identifier[limit] keyword[is] keyword[not] keyword[None] keyword[and] identifier[frame_count] >= identifier[limit] : keyword[break] keyword[if] literal[string] keyword[not] keyword[in] identifier[os] . identifier[environ] : keyword[if] identifier[re_tracer_frame] . identifier[search] ( identifier[frame] [ literal[int] ]) keyword[is] keyword[not] keyword[None] : keyword[continue] keyword[if] identifier[re_with_stan_frame] . identifier[search] ( identifier[frame] [ literal[int] ]) keyword[is] keyword[not] keyword[None] : keyword[continue] identifier[span] . identifier[stack] . identifier[append] ({ literal[string] : identifier[frame] [ literal[int] ], literal[string] : identifier[frame] [ literal[int] ], literal[string] : identifier[frame] [ literal[int] ] }) keyword[if] identifier[limit] keyword[is] keyword[not] keyword[None] : identifier[frame_count] += literal[int]
def __add_stack(self, span, limit=None): """ Adds a backtrace to this span """ span.stack = [] frame_count = 0 tb = traceback.extract_stack() tb.reverse() for frame in tb: if limit is not None and frame_count >= limit: break # depends on [control=['if'], data=[]] # Exclude Instana frames unless we're in dev mode if 'INSTANA_DEV' not in os.environ: if re_tracer_frame.search(frame[0]) is not None: continue # depends on [control=['if'], data=[]] if re_with_stan_frame.search(frame[2]) is not None: continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] span.stack.append({'c': frame[0], 'n': frame[1], 'm': frame[2]}) if limit is not None: frame_count += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['frame']]
def RunValidationFromOptions(feed, options): """Validate feed, run in profiler if in options, and return an exit code.""" if options.performance: return ProfileRunValidationOutputFromOptions(feed, options) else: return RunValidationOutputFromOptions(feed, options)
def function[RunValidationFromOptions, parameter[feed, options]]: constant[Validate feed, run in profiler if in options, and return an exit code.] if name[options].performance begin[:] return[call[name[ProfileRunValidationOutputFromOptions], parameter[name[feed], name[options]]]]
keyword[def] identifier[RunValidationFromOptions] ( identifier[feed] , identifier[options] ): literal[string] keyword[if] identifier[options] . identifier[performance] : keyword[return] identifier[ProfileRunValidationOutputFromOptions] ( identifier[feed] , identifier[options] ) keyword[else] : keyword[return] identifier[RunValidationOutputFromOptions] ( identifier[feed] , identifier[options] )
def RunValidationFromOptions(feed, options): """Validate feed, run in profiler if in options, and return an exit code.""" if options.performance: return ProfileRunValidationOutputFromOptions(feed, options) # depends on [control=['if'], data=[]] else: return RunValidationOutputFromOptions(feed, options)
def _LoadFlows(self, client_id, min_create_time, token): """Yields all flows for the given client_id and time range. Args: client_id: client URN min_create_time: minimum creation time (inclusive) token: acl token Yields: flow_objects.Flow objects """ if data_store.RelationalDBEnabled(): if isinstance(client_id, rdfvalue.RDFURN): client_id = client_id.Basename() flow_list = data_store.REL_DB.ReadAllFlowObjects( client_id=client_id, min_create_time=min_create_time, include_child_flows=False) for flow_obj in flow_list: yield flow_obj else: now = rdfvalue.RDFDatetime.Now() client_id_urn = rdf_client.ClientURN(client_id) flows_dir = aff4.FACTORY.Open(client_id_urn.Add("flows"), token=token) # Save DB roundtrips by checking both conditions at once. flow_list = flows_dir.ListChildren( age=(min_create_time.AsMicrosecondsSinceEpoch(), now.AsMicrosecondsSinceEpoch())) for flow_obj in aff4.FACTORY.MultiOpen(flow_list, token=token): yield rdf_flow_objects.Flow( args=flow_obj.args, flow_class_name=flow_obj.runner_args.flow_name, flow_id=flow_obj.urn.Basename(), create_time=flow_obj.context.create_time, creator=flow_obj.creator, )
def function[_LoadFlows, parameter[self, client_id, min_create_time, token]]: constant[Yields all flows for the given client_id and time range. Args: client_id: client URN min_create_time: minimum creation time (inclusive) token: acl token Yields: flow_objects.Flow objects ] if call[name[data_store].RelationalDBEnabled, parameter[]] begin[:] if call[name[isinstance], parameter[name[client_id], name[rdfvalue].RDFURN]] begin[:] variable[client_id] assign[=] call[name[client_id].Basename, parameter[]] variable[flow_list] assign[=] call[name[data_store].REL_DB.ReadAllFlowObjects, parameter[]] for taget[name[flow_obj]] in starred[name[flow_list]] begin[:] <ast.Yield object at 0x7da1b1c1b940>
keyword[def] identifier[_LoadFlows] ( identifier[self] , identifier[client_id] , identifier[min_create_time] , identifier[token] ): literal[string] keyword[if] identifier[data_store] . identifier[RelationalDBEnabled] (): keyword[if] identifier[isinstance] ( identifier[client_id] , identifier[rdfvalue] . identifier[RDFURN] ): identifier[client_id] = identifier[client_id] . identifier[Basename] () identifier[flow_list] = identifier[data_store] . identifier[REL_DB] . identifier[ReadAllFlowObjects] ( identifier[client_id] = identifier[client_id] , identifier[min_create_time] = identifier[min_create_time] , identifier[include_child_flows] = keyword[False] ) keyword[for] identifier[flow_obj] keyword[in] identifier[flow_list] : keyword[yield] identifier[flow_obj] keyword[else] : identifier[now] = identifier[rdfvalue] . identifier[RDFDatetime] . identifier[Now] () identifier[client_id_urn] = identifier[rdf_client] . identifier[ClientURN] ( identifier[client_id] ) identifier[flows_dir] = identifier[aff4] . identifier[FACTORY] . identifier[Open] ( identifier[client_id_urn] . identifier[Add] ( literal[string] ), identifier[token] = identifier[token] ) identifier[flow_list] = identifier[flows_dir] . identifier[ListChildren] ( identifier[age] =( identifier[min_create_time] . identifier[AsMicrosecondsSinceEpoch] (), identifier[now] . identifier[AsMicrosecondsSinceEpoch] ())) keyword[for] identifier[flow_obj] keyword[in] identifier[aff4] . identifier[FACTORY] . identifier[MultiOpen] ( identifier[flow_list] , identifier[token] = identifier[token] ): keyword[yield] identifier[rdf_flow_objects] . identifier[Flow] ( identifier[args] = identifier[flow_obj] . identifier[args] , identifier[flow_class_name] = identifier[flow_obj] . identifier[runner_args] . identifier[flow_name] , identifier[flow_id] = identifier[flow_obj] . identifier[urn] . identifier[Basename] (), identifier[create_time] = identifier[flow_obj] . identifier[context] . identifier[create_time] , identifier[creator] = identifier[flow_obj] . identifier[creator] , )
def _LoadFlows(self, client_id, min_create_time, token): """Yields all flows for the given client_id and time range. Args: client_id: client URN min_create_time: minimum creation time (inclusive) token: acl token Yields: flow_objects.Flow objects """ if data_store.RelationalDBEnabled(): if isinstance(client_id, rdfvalue.RDFURN): client_id = client_id.Basename() # depends on [control=['if'], data=[]] flow_list = data_store.REL_DB.ReadAllFlowObjects(client_id=client_id, min_create_time=min_create_time, include_child_flows=False) for flow_obj in flow_list: yield flow_obj # depends on [control=['for'], data=['flow_obj']] # depends on [control=['if'], data=[]] else: now = rdfvalue.RDFDatetime.Now() client_id_urn = rdf_client.ClientURN(client_id) flows_dir = aff4.FACTORY.Open(client_id_urn.Add('flows'), token=token) # Save DB roundtrips by checking both conditions at once. flow_list = flows_dir.ListChildren(age=(min_create_time.AsMicrosecondsSinceEpoch(), now.AsMicrosecondsSinceEpoch())) for flow_obj in aff4.FACTORY.MultiOpen(flow_list, token=token): yield rdf_flow_objects.Flow(args=flow_obj.args, flow_class_name=flow_obj.runner_args.flow_name, flow_id=flow_obj.urn.Basename(), create_time=flow_obj.context.create_time, creator=flow_obj.creator) # depends on [control=['for'], data=['flow_obj']]
async def log_transaction(self, **params): """Writing transaction to database """ if params.get("message"): params = json.loads(params.get("message", "{}")) if not params: return {"error":400, "reason":"Missed required fields"} coinid = params.get("coinid") if not coinid in ["QTUM", "PUT"]: return {"error":400, "reason": "Missed or invalid coinid"} database = client[settings.TXS] source_collection = database[coinid] await source_collection.find_one_and_update({"txid":params.get("txid")},{"$set":{ "blocknumber":params.get("blocknumber"), "blockhash":params.get("blockhash"), "gasLimit":params.get("gasLimit"), "gasPrice":params.get("gasPrice"), }}) return {"success":True}
<ast.AsyncFunctionDef object at 0x7da20e957be0>
keyword[async] keyword[def] identifier[log_transaction] ( identifier[self] ,** identifier[params] ): literal[string] keyword[if] identifier[params] . identifier[get] ( literal[string] ): identifier[params] = identifier[json] . identifier[loads] ( identifier[params] . identifier[get] ( literal[string] , literal[string] )) keyword[if] keyword[not] identifier[params] : keyword[return] { literal[string] : literal[int] , literal[string] : literal[string] } identifier[coinid] = identifier[params] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[coinid] keyword[in] [ literal[string] , literal[string] ]: keyword[return] { literal[string] : literal[int] , literal[string] : literal[string] } identifier[database] = identifier[client] [ identifier[settings] . identifier[TXS] ] identifier[source_collection] = identifier[database] [ identifier[coinid] ] keyword[await] identifier[source_collection] . identifier[find_one_and_update] ({ literal[string] : identifier[params] . identifier[get] ( literal[string] )},{ literal[string] :{ literal[string] : identifier[params] . identifier[get] ( literal[string] ), literal[string] : identifier[params] . identifier[get] ( literal[string] ), literal[string] : identifier[params] . identifier[get] ( literal[string] ), literal[string] : identifier[params] . identifier[get] ( literal[string] ), }}) keyword[return] { literal[string] : keyword[True] }
async def log_transaction(self, **params): """Writing transaction to database """ if params.get('message'): params = json.loads(params.get('message', '{}')) # depends on [control=['if'], data=[]] if not params: return {'error': 400, 'reason': 'Missed required fields'} # depends on [control=['if'], data=[]] coinid = params.get('coinid') if not coinid in ['QTUM', 'PUT']: return {'error': 400, 'reason': 'Missed or invalid coinid'} # depends on [control=['if'], data=[]] database = client[settings.TXS] source_collection = database[coinid] await source_collection.find_one_and_update({'txid': params.get('txid')}, {'$set': {'blocknumber': params.get('blocknumber'), 'blockhash': params.get('blockhash'), 'gasLimit': params.get('gasLimit'), 'gasPrice': params.get('gasPrice')}}) return {'success': True}
def gumbel_sample(shape): """Sample from the Gumbel distribution, protect from overflows. Args: shape: Shape of Gumbel samples. Returns: Noise drawn from Gumbel distribution. """ uniform_samples = tf.random_uniform(shape, minval=0.00001, maxval=0.99998) return -tf.log(-tf.log(uniform_samples))
def function[gumbel_sample, parameter[shape]]: constant[Sample from the Gumbel distribution, protect from overflows. Args: shape: Shape of Gumbel samples. Returns: Noise drawn from Gumbel distribution. ] variable[uniform_samples] assign[=] call[name[tf].random_uniform, parameter[name[shape]]] return[<ast.UnaryOp object at 0x7da1b1f970d0>]
keyword[def] identifier[gumbel_sample] ( identifier[shape] ): literal[string] identifier[uniform_samples] = identifier[tf] . identifier[random_uniform] ( identifier[shape] , identifier[minval] = literal[int] , identifier[maxval] = literal[int] ) keyword[return] - identifier[tf] . identifier[log] (- identifier[tf] . identifier[log] ( identifier[uniform_samples] ))
def gumbel_sample(shape): """Sample from the Gumbel distribution, protect from overflows. Args: shape: Shape of Gumbel samples. Returns: Noise drawn from Gumbel distribution. """ uniform_samples = tf.random_uniform(shape, minval=1e-05, maxval=0.99998) return -tf.log(-tf.log(uniform_samples))
def step(self, action): """gym api step""" obs = None reward = None info = None turn = True withturnkey = self.step_options < 2 withinfo = self.step_options == 0 or self.step_options == 2 while not self.done and \ ((obs is None or len(obs) == 0) or (withinfo and info is None) or turn): step_message = "<Step" + str(self.step_options) + ">" + \ self.action_space[action] + \ "</Step" + str(self.step_options) + " >" comms.send_message(self.client_socket, step_message.encode()) if withturnkey: comms.send_message(self.client_socket, self.turn_key.encode()) obs = comms.recv_message(self.client_socket) reply = comms.recv_message(self.client_socket) reward, done, sent = struct.unpack('!dbb', reply) self.done = done == 1 if withinfo: info = comms.recv_message(self.client_socket).decode('utf-8') turn_key = comms.recv_message(self.client_socket).decode('utf-8') if withturnkey else "" # print("[" + str(self.role) + "] TK " + turn_key + " self.TK " + str(self.turn_key)) if turn_key != "": if sent != 0: turn = False # Done turns if: turn = self.turn_key == turn_key self.turn_key = turn_key else: turn = sent == 0 if (obs is None or len(obs) == 0) or turn: time.sleep(0.1) obs = np.frombuffer(obs, dtype=np.uint8) return obs, reward, self.done, info
def function[step, parameter[self, action]]: constant[gym api step] variable[obs] assign[=] constant[None] variable[reward] assign[=] constant[None] variable[info] assign[=] constant[None] variable[turn] assign[=] constant[True] variable[withturnkey] assign[=] compare[name[self].step_options less[<] constant[2]] variable[withinfo] assign[=] <ast.BoolOp object at 0x7da1b1a2c9d0> while <ast.BoolOp object at 0x7da1b1a2cdc0> begin[:] variable[step_message] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[<Step] + call[name[str], parameter[name[self].step_options]]] + constant[>]] + call[name[self].action_space][name[action]]] + constant[</Step]] + call[name[str], parameter[name[self].step_options]]] + constant[ >]] call[name[comms].send_message, parameter[name[self].client_socket, call[name[step_message].encode, parameter[]]]] if name[withturnkey] begin[:] call[name[comms].send_message, parameter[name[self].client_socket, call[name[self].turn_key.encode, parameter[]]]] variable[obs] assign[=] call[name[comms].recv_message, parameter[name[self].client_socket]] variable[reply] assign[=] call[name[comms].recv_message, parameter[name[self].client_socket]] <ast.Tuple object at 0x7da1b1b124a0> assign[=] call[name[struct].unpack, parameter[constant[!dbb], name[reply]]] name[self].done assign[=] compare[name[done] equal[==] constant[1]] if name[withinfo] begin[:] variable[info] assign[=] call[call[name[comms].recv_message, parameter[name[self].client_socket]].decode, parameter[constant[utf-8]]] variable[turn_key] assign[=] <ast.IfExp object at 0x7da1b1a426e0> if compare[name[turn_key] not_equal[!=] constant[]] begin[:] if compare[name[sent] not_equal[!=] constant[0]] begin[:] variable[turn] assign[=] constant[False] name[self].turn_key assign[=] name[turn_key] if <ast.BoolOp object at 0x7da1b1a48400> begin[:] call[name[time].sleep, parameter[constant[0.1]]] variable[obs] assign[=] call[name[np].frombuffer, parameter[name[obs]]] return[tuple[[<ast.Name object at 0x7da1b1a48ee0>, <ast.Name object at 0x7da1b1a48f10>, <ast.Attribute object at 0x7da1b1a48f40>, <ast.Name object at 0x7da1b1a48fa0>]]]
keyword[def] identifier[step] ( identifier[self] , identifier[action] ): literal[string] identifier[obs] = keyword[None] identifier[reward] = keyword[None] identifier[info] = keyword[None] identifier[turn] = keyword[True] identifier[withturnkey] = identifier[self] . identifier[step_options] < literal[int] identifier[withinfo] = identifier[self] . identifier[step_options] == literal[int] keyword[or] identifier[self] . identifier[step_options] == literal[int] keyword[while] keyword[not] identifier[self] . identifier[done] keyword[and] (( identifier[obs] keyword[is] keyword[None] keyword[or] identifier[len] ( identifier[obs] )== literal[int] ) keyword[or] ( identifier[withinfo] keyword[and] identifier[info] keyword[is] keyword[None] ) keyword[or] identifier[turn] ): identifier[step_message] = literal[string] + identifier[str] ( identifier[self] . identifier[step_options] )+ literal[string] + identifier[self] . identifier[action_space] [ identifier[action] ]+ literal[string] + identifier[str] ( identifier[self] . identifier[step_options] )+ literal[string] identifier[comms] . identifier[send_message] ( identifier[self] . identifier[client_socket] , identifier[step_message] . identifier[encode] ()) keyword[if] identifier[withturnkey] : identifier[comms] . identifier[send_message] ( identifier[self] . identifier[client_socket] , identifier[self] . identifier[turn_key] . identifier[encode] ()) identifier[obs] = identifier[comms] . identifier[recv_message] ( identifier[self] . identifier[client_socket] ) identifier[reply] = identifier[comms] . identifier[recv_message] ( identifier[self] . identifier[client_socket] ) identifier[reward] , identifier[done] , identifier[sent] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[reply] ) identifier[self] . identifier[done] = identifier[done] == literal[int] keyword[if] identifier[withinfo] : identifier[info] = identifier[comms] . identifier[recv_message] ( identifier[self] . identifier[client_socket] ). identifier[decode] ( literal[string] ) identifier[turn_key] = identifier[comms] . identifier[recv_message] ( identifier[self] . identifier[client_socket] ). identifier[decode] ( literal[string] ) keyword[if] identifier[withturnkey] keyword[else] literal[string] keyword[if] identifier[turn_key] != literal[string] : keyword[if] identifier[sent] != literal[int] : identifier[turn] = keyword[False] identifier[self] . identifier[turn_key] = identifier[turn_key] keyword[else] : identifier[turn] = identifier[sent] == literal[int] keyword[if] ( identifier[obs] keyword[is] keyword[None] keyword[or] identifier[len] ( identifier[obs] )== literal[int] ) keyword[or] identifier[turn] : identifier[time] . identifier[sleep] ( literal[int] ) identifier[obs] = identifier[np] . identifier[frombuffer] ( identifier[obs] , identifier[dtype] = identifier[np] . identifier[uint8] ) keyword[return] identifier[obs] , identifier[reward] , identifier[self] . identifier[done] , identifier[info]
def step(self, action): """gym api step""" obs = None reward = None info = None turn = True withturnkey = self.step_options < 2 withinfo = self.step_options == 0 or self.step_options == 2 while not self.done and ((obs is None or len(obs) == 0) or (withinfo and info is None) or turn): step_message = '<Step' + str(self.step_options) + '>' + self.action_space[action] + '</Step' + str(self.step_options) + ' >' comms.send_message(self.client_socket, step_message.encode()) if withturnkey: comms.send_message(self.client_socket, self.turn_key.encode()) # depends on [control=['if'], data=[]] obs = comms.recv_message(self.client_socket) reply = comms.recv_message(self.client_socket) (reward, done, sent) = struct.unpack('!dbb', reply) self.done = done == 1 if withinfo: info = comms.recv_message(self.client_socket).decode('utf-8') # depends on [control=['if'], data=[]] turn_key = comms.recv_message(self.client_socket).decode('utf-8') if withturnkey else '' # print("[" + str(self.role) + "] TK " + turn_key + " self.TK " + str(self.turn_key)) if turn_key != '': if sent != 0: turn = False # depends on [control=['if'], data=[]] # Done turns if: turn = self.turn_key == turn_key self.turn_key = turn_key # depends on [control=['if'], data=['turn_key']] else: turn = sent == 0 if (obs is None or len(obs) == 0) or turn: time.sleep(0.1) # depends on [control=['if'], data=[]] obs = np.frombuffer(obs, dtype=np.uint8) # depends on [control=['while'], data=[]] return (obs, reward, self.done, info)
def getDwordAtOffset(self, offset): """ Returns a C{DWORD} from a given offset. @type offset: int @param offset: The offset to get the C{DWORD} from. @rtype: L{DWORD} @return: The L{DWORD} obtained at the given offset. """ return datatypes.DWORD.parse(utils.ReadData(self.getDataAtOffset(offset, 4)))
def function[getDwordAtOffset, parameter[self, offset]]: constant[ Returns a C{DWORD} from a given offset. @type offset: int @param offset: The offset to get the C{DWORD} from. @rtype: L{DWORD} @return: The L{DWORD} obtained at the given offset. ] return[call[name[datatypes].DWORD.parse, parameter[call[name[utils].ReadData, parameter[call[name[self].getDataAtOffset, parameter[name[offset], constant[4]]]]]]]]
keyword[def] identifier[getDwordAtOffset] ( identifier[self] , identifier[offset] ): literal[string] keyword[return] identifier[datatypes] . identifier[DWORD] . identifier[parse] ( identifier[utils] . identifier[ReadData] ( identifier[self] . identifier[getDataAtOffset] ( identifier[offset] , literal[int] )))
def getDwordAtOffset(self, offset): """ Returns a C{DWORD} from a given offset. @type offset: int @param offset: The offset to get the C{DWORD} from. @rtype: L{DWORD} @return: The L{DWORD} obtained at the given offset. """ return datatypes.DWORD.parse(utils.ReadData(self.getDataAtOffset(offset, 4)))
def handle_market_close(self, dt, data_portal): """Handles the close of the given day. Parameters ---------- dt : Timestamp The most recently completed simulation datetime. data_portal : DataPortal The current data portal. Returns ------- A daily perf packet. """ completed_session = self._current_session if self.emission_rate == 'daily': # this method is called for both minutely and daily emissions, but # this chunk of code here only applies for daily emissions. (since # it's done every minute, elsewhere, for minutely emission). self.sync_last_sale_prices(dt, data_portal) session_ix = self._session_count # increment the day counter before we move markers forward. self._session_count += 1 packet = { 'period_start': self._first_session, 'period_end': self._last_session, 'capital_base': self._capital_base, 'daily_perf': { 'period_open': self._market_open, 'period_close': dt, }, 'cumulative_perf': { 'period_open': self._first_session, 'period_close': self._last_session, }, 'progress': self._progress(self), 'cumulative_risk_metrics': {}, } ledger = self._ledger ledger.end_of_session(session_ix) self.end_of_session( packet, ledger, completed_session, session_ix, data_portal, ) return packet
def function[handle_market_close, parameter[self, dt, data_portal]]: constant[Handles the close of the given day. Parameters ---------- dt : Timestamp The most recently completed simulation datetime. data_portal : DataPortal The current data portal. Returns ------- A daily perf packet. ] variable[completed_session] assign[=] name[self]._current_session if compare[name[self].emission_rate equal[==] constant[daily]] begin[:] call[name[self].sync_last_sale_prices, parameter[name[dt], name[data_portal]]] variable[session_ix] assign[=] name[self]._session_count <ast.AugAssign object at 0x7da1b1e8ee00> variable[packet] assign[=] dictionary[[<ast.Constant object at 0x7da1b1e8e890>, <ast.Constant object at 0x7da1b1e8c310>, <ast.Constant object at 0x7da1b1e8db70>, <ast.Constant object at 0x7da1b1e8f970>, <ast.Constant object at 0x7da1b1e8c790>, <ast.Constant object at 0x7da1b1e8fe80>, <ast.Constant object at 0x7da1b1e8c610>], [<ast.Attribute object at 0x7da1b1e8e110>, <ast.Attribute object at 0x7da1b1e8c6d0>, <ast.Attribute object at 0x7da1b1e8fa00>, <ast.Dict object at 0x7da1b1e8c7f0>, <ast.Dict object at 0x7da1b1e8dae0>, <ast.Call object at 0x7da1b1e8e8f0>, <ast.Dict object at 0x7da1b1e8d240>]] variable[ledger] assign[=] name[self]._ledger call[name[ledger].end_of_session, parameter[name[session_ix]]] call[name[self].end_of_session, parameter[name[packet], name[ledger], name[completed_session], name[session_ix], name[data_portal]]] return[name[packet]]
keyword[def] identifier[handle_market_close] ( identifier[self] , identifier[dt] , identifier[data_portal] ): literal[string] identifier[completed_session] = identifier[self] . identifier[_current_session] keyword[if] identifier[self] . identifier[emission_rate] == literal[string] : identifier[self] . identifier[sync_last_sale_prices] ( identifier[dt] , identifier[data_portal] ) identifier[session_ix] = identifier[self] . identifier[_session_count] identifier[self] . identifier[_session_count] += literal[int] identifier[packet] ={ literal[string] : identifier[self] . identifier[_first_session] , literal[string] : identifier[self] . identifier[_last_session] , literal[string] : identifier[self] . identifier[_capital_base] , literal[string] :{ literal[string] : identifier[self] . identifier[_market_open] , literal[string] : identifier[dt] , }, literal[string] :{ literal[string] : identifier[self] . identifier[_first_session] , literal[string] : identifier[self] . identifier[_last_session] , }, literal[string] : identifier[self] . identifier[_progress] ( identifier[self] ), literal[string] :{}, } identifier[ledger] = identifier[self] . identifier[_ledger] identifier[ledger] . identifier[end_of_session] ( identifier[session_ix] ) identifier[self] . identifier[end_of_session] ( identifier[packet] , identifier[ledger] , identifier[completed_session] , identifier[session_ix] , identifier[data_portal] , ) keyword[return] identifier[packet]
def handle_market_close(self, dt, data_portal): """Handles the close of the given day. Parameters ---------- dt : Timestamp The most recently completed simulation datetime. data_portal : DataPortal The current data portal. Returns ------- A daily perf packet. """ completed_session = self._current_session if self.emission_rate == 'daily': # this method is called for both minutely and daily emissions, but # this chunk of code here only applies for daily emissions. (since # it's done every minute, elsewhere, for minutely emission). self.sync_last_sale_prices(dt, data_portal) # depends on [control=['if'], data=[]] session_ix = self._session_count # increment the day counter before we move markers forward. self._session_count += 1 packet = {'period_start': self._first_session, 'period_end': self._last_session, 'capital_base': self._capital_base, 'daily_perf': {'period_open': self._market_open, 'period_close': dt}, 'cumulative_perf': {'period_open': self._first_session, 'period_close': self._last_session}, 'progress': self._progress(self), 'cumulative_risk_metrics': {}} ledger = self._ledger ledger.end_of_session(session_ix) self.end_of_session(packet, ledger, completed_session, session_ix, data_portal) return packet
def timing(self, stat, value, tags=None): """Measure a timing for statistical distribution.""" self.client.timing(metric=stat, value=value, tags=tags)
def function[timing, parameter[self, stat, value, tags]]: constant[Measure a timing for statistical distribution.] call[name[self].client.timing, parameter[]]
keyword[def] identifier[timing] ( identifier[self] , identifier[stat] , identifier[value] , identifier[tags] = keyword[None] ): literal[string] identifier[self] . identifier[client] . identifier[timing] ( identifier[metric] = identifier[stat] , identifier[value] = identifier[value] , identifier[tags] = identifier[tags] )
def timing(self, stat, value, tags=None): """Measure a timing for statistical distribution.""" self.client.timing(metric=stat, value=value, tags=tags)
def _hanging_indent_after_bracket(self, bracket, position): """Extracts indentation information for a hanging indent Case of hanging indent after a bracket (including parenthesis) :param str bracket: bracket in question :param int position: Position of bracket in self._tokens :returns: the state and valid positions for hanging indentation :rtype: _ContinuedIndent """ indentation = self._tokens.line_indent(position) if ( self._is_block_opener and self._continuation_string == self._block_indent_string ): return _ContinuedIndent( HANGING_BLOCK, bracket, position, _Indentations(indentation + self._continuation_string, indentation), _BeforeBlockIndentations( indentation + self._continuation_string, indentation + self._continuation_string * 2, ), ) if bracket == ":": # If the dict key was on the same line as the open brace, the new # correct indent should be relative to the key instead of the # current indent level paren_align = self._cont_stack[-1].valid_outdent_strings next_align = self._cont_stack[-1].valid_continuation_strings.copy() next_align_keys = list(next_align.keys()) next_align[next_align_keys[0] + self._continuation_string] = True # Note that the continuation of # d = { # 'a': 'b' # 'c' # } # is handled by the special-casing for hanging continued string indents. return _ContinuedIndent( HANGING_DICT_VALUE, bracket, position, paren_align, next_align ) return _ContinuedIndent( HANGING, bracket, position, _Indentations(indentation, indentation + self._continuation_string), _Indentations(indentation + self._continuation_string), )
def function[_hanging_indent_after_bracket, parameter[self, bracket, position]]: constant[Extracts indentation information for a hanging indent Case of hanging indent after a bracket (including parenthesis) :param str bracket: bracket in question :param int position: Position of bracket in self._tokens :returns: the state and valid positions for hanging indentation :rtype: _ContinuedIndent ] variable[indentation] assign[=] call[name[self]._tokens.line_indent, parameter[name[position]]] if <ast.BoolOp object at 0x7da1b0350940> begin[:] return[call[name[_ContinuedIndent], parameter[name[HANGING_BLOCK], name[bracket], name[position], call[name[_Indentations], parameter[binary_operation[name[indentation] + name[self]._continuation_string], name[indentation]]], call[name[_BeforeBlockIndentations], parameter[binary_operation[name[indentation] + name[self]._continuation_string], binary_operation[name[indentation] + binary_operation[name[self]._continuation_string * constant[2]]]]]]]] if compare[name[bracket] equal[==] constant[:]] begin[:] variable[paren_align] assign[=] call[name[self]._cont_stack][<ast.UnaryOp object at 0x7da1b024d570>].valid_outdent_strings variable[next_align] assign[=] call[call[name[self]._cont_stack][<ast.UnaryOp object at 0x7da1b024cac0>].valid_continuation_strings.copy, parameter[]] variable[next_align_keys] assign[=] call[name[list], parameter[call[name[next_align].keys, parameter[]]]] call[name[next_align]][binary_operation[call[name[next_align_keys]][constant[0]] + name[self]._continuation_string]] assign[=] constant[True] return[call[name[_ContinuedIndent], parameter[name[HANGING_DICT_VALUE], name[bracket], name[position], name[paren_align], name[next_align]]]] return[call[name[_ContinuedIndent], parameter[name[HANGING], name[bracket], name[position], call[name[_Indentations], parameter[name[indentation], binary_operation[name[indentation] + name[self]._continuation_string]]], call[name[_Indentations], parameter[binary_operation[name[indentation] + name[self]._continuation_string]]]]]]
keyword[def] identifier[_hanging_indent_after_bracket] ( identifier[self] , identifier[bracket] , identifier[position] ): literal[string] identifier[indentation] = identifier[self] . identifier[_tokens] . identifier[line_indent] ( identifier[position] ) keyword[if] ( identifier[self] . identifier[_is_block_opener] keyword[and] identifier[self] . identifier[_continuation_string] == identifier[self] . identifier[_block_indent_string] ): keyword[return] identifier[_ContinuedIndent] ( identifier[HANGING_BLOCK] , identifier[bracket] , identifier[position] , identifier[_Indentations] ( identifier[indentation] + identifier[self] . identifier[_continuation_string] , identifier[indentation] ), identifier[_BeforeBlockIndentations] ( identifier[indentation] + identifier[self] . identifier[_continuation_string] , identifier[indentation] + identifier[self] . identifier[_continuation_string] * literal[int] , ), ) keyword[if] identifier[bracket] == literal[string] : identifier[paren_align] = identifier[self] . identifier[_cont_stack] [- literal[int] ]. identifier[valid_outdent_strings] identifier[next_align] = identifier[self] . identifier[_cont_stack] [- literal[int] ]. identifier[valid_continuation_strings] . identifier[copy] () identifier[next_align_keys] = identifier[list] ( identifier[next_align] . identifier[keys] ()) identifier[next_align] [ identifier[next_align_keys] [ literal[int] ]+ identifier[self] . identifier[_continuation_string] ]= keyword[True] keyword[return] identifier[_ContinuedIndent] ( identifier[HANGING_DICT_VALUE] , identifier[bracket] , identifier[position] , identifier[paren_align] , identifier[next_align] ) keyword[return] identifier[_ContinuedIndent] ( identifier[HANGING] , identifier[bracket] , identifier[position] , identifier[_Indentations] ( identifier[indentation] , identifier[indentation] + identifier[self] . identifier[_continuation_string] ), identifier[_Indentations] ( identifier[indentation] + identifier[self] . identifier[_continuation_string] ), )
def _hanging_indent_after_bracket(self, bracket, position): """Extracts indentation information for a hanging indent Case of hanging indent after a bracket (including parenthesis) :param str bracket: bracket in question :param int position: Position of bracket in self._tokens :returns: the state and valid positions for hanging indentation :rtype: _ContinuedIndent """ indentation = self._tokens.line_indent(position) if self._is_block_opener and self._continuation_string == self._block_indent_string: return _ContinuedIndent(HANGING_BLOCK, bracket, position, _Indentations(indentation + self._continuation_string, indentation), _BeforeBlockIndentations(indentation + self._continuation_string, indentation + self._continuation_string * 2)) # depends on [control=['if'], data=[]] if bracket == ':': # If the dict key was on the same line as the open brace, the new # correct indent should be relative to the key instead of the # current indent level paren_align = self._cont_stack[-1].valid_outdent_strings next_align = self._cont_stack[-1].valid_continuation_strings.copy() next_align_keys = list(next_align.keys()) next_align[next_align_keys[0] + self._continuation_string] = True # Note that the continuation of # d = { # 'a': 'b' # 'c' # } # is handled by the special-casing for hanging continued string indents. return _ContinuedIndent(HANGING_DICT_VALUE, bracket, position, paren_align, next_align) # depends on [control=['if'], data=['bracket']] return _ContinuedIndent(HANGING, bracket, position, _Indentations(indentation, indentation + self._continuation_string), _Indentations(indentation + self._continuation_string))
def transform(self, X, y=None, copy=None): """ Perform standardization by centering and scaling using the parameters. :param X: Data matrix to scale. :type X: numpy.ndarray, shape [n_samples, n_features] :param y: Passthrough for scikit-learn ``Pipeline`` compatibility. :type y: None :param bool copy: Copy the X matrix. :return: Scaled version of the X data matrix. :rtype: numpy.ndarray, shape [n_samples, n_features] """ check_is_fitted(self, 'scale_') copy = copy if copy is not None else self.copy X = check_array(X, accept_sparse='csr', copy=copy, warn_on_dtype=True, estimator=self, dtype=FLOAT_DTYPES) if sparse.issparse(X): if self.with_mean: raise ValueError( "Cannot center sparse matrices: pass `with_mean=False` " "instead. See docstring for motivation and alternatives.") if self.scale_ is not None: inplace_column_scale(X, 1 / self.scale_) else: if self.with_mean: X -= self.mean_ if self.with_std: X /= self.scale_ return X
def function[transform, parameter[self, X, y, copy]]: constant[ Perform standardization by centering and scaling using the parameters. :param X: Data matrix to scale. :type X: numpy.ndarray, shape [n_samples, n_features] :param y: Passthrough for scikit-learn ``Pipeline`` compatibility. :type y: None :param bool copy: Copy the X matrix. :return: Scaled version of the X data matrix. :rtype: numpy.ndarray, shape [n_samples, n_features] ] call[name[check_is_fitted], parameter[name[self], constant[scale_]]] variable[copy] assign[=] <ast.IfExp object at 0x7da20cabfa30> variable[X] assign[=] call[name[check_array], parameter[name[X]]] if call[name[sparse].issparse, parameter[name[X]]] begin[:] if name[self].with_mean begin[:] <ast.Raise object at 0x7da20cabdf90> if compare[name[self].scale_ is_not constant[None]] begin[:] call[name[inplace_column_scale], parameter[name[X], binary_operation[constant[1] / name[self].scale_]]] return[name[X]]
keyword[def] identifier[transform] ( identifier[self] , identifier[X] , identifier[y] = keyword[None] , identifier[copy] = keyword[None] ): literal[string] identifier[check_is_fitted] ( identifier[self] , literal[string] ) identifier[copy] = identifier[copy] keyword[if] identifier[copy] keyword[is] keyword[not] keyword[None] keyword[else] identifier[self] . identifier[copy] identifier[X] = identifier[check_array] ( identifier[X] , identifier[accept_sparse] = literal[string] , identifier[copy] = identifier[copy] , identifier[warn_on_dtype] = keyword[True] , identifier[estimator] = identifier[self] , identifier[dtype] = identifier[FLOAT_DTYPES] ) keyword[if] identifier[sparse] . identifier[issparse] ( identifier[X] ): keyword[if] identifier[self] . identifier[with_mean] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) keyword[if] identifier[self] . identifier[scale_] keyword[is] keyword[not] keyword[None] : identifier[inplace_column_scale] ( identifier[X] , literal[int] / identifier[self] . identifier[scale_] ) keyword[else] : keyword[if] identifier[self] . identifier[with_mean] : identifier[X] -= identifier[self] . identifier[mean_] keyword[if] identifier[self] . identifier[with_std] : identifier[X] /= identifier[self] . identifier[scale_] keyword[return] identifier[X]
def transform(self, X, y=None, copy=None): """ Perform standardization by centering and scaling using the parameters. :param X: Data matrix to scale. :type X: numpy.ndarray, shape [n_samples, n_features] :param y: Passthrough for scikit-learn ``Pipeline`` compatibility. :type y: None :param bool copy: Copy the X matrix. :return: Scaled version of the X data matrix. :rtype: numpy.ndarray, shape [n_samples, n_features] """ check_is_fitted(self, 'scale_') copy = copy if copy is not None else self.copy X = check_array(X, accept_sparse='csr', copy=copy, warn_on_dtype=True, estimator=self, dtype=FLOAT_DTYPES) if sparse.issparse(X): if self.with_mean: raise ValueError('Cannot center sparse matrices: pass `with_mean=False` instead. See docstring for motivation and alternatives.') # depends on [control=['if'], data=[]] if self.scale_ is not None: inplace_column_scale(X, 1 / self.scale_) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: if self.with_mean: X -= self.mean_ # depends on [control=['if'], data=[]] if self.with_std: X /= self.scale_ # depends on [control=['if'], data=[]] return X
def parse_away_fo(self): """ Parse face-off info for away team. :returns: ``self`` on success, ``None`` otherwise """ try: self.__set_fo_tables() self.face_offs['away'] = self.__parse_fo_table(self.__away_fo) return self except: return None
def function[parse_away_fo, parameter[self]]: constant[ Parse face-off info for away team. :returns: ``self`` on success, ``None`` otherwise ] <ast.Try object at 0x7da18dc07f10>
keyword[def] identifier[parse_away_fo] ( identifier[self] ): literal[string] keyword[try] : identifier[self] . identifier[__set_fo_tables] () identifier[self] . identifier[face_offs] [ literal[string] ]= identifier[self] . identifier[__parse_fo_table] ( identifier[self] . identifier[__away_fo] ) keyword[return] identifier[self] keyword[except] : keyword[return] keyword[None]
def parse_away_fo(self): """ Parse face-off info for away team. :returns: ``self`` on success, ``None`` otherwise """ try: self.__set_fo_tables() self.face_offs['away'] = self.__parse_fo_table(self.__away_fo) return self # depends on [control=['try'], data=[]] except: return None # depends on [control=['except'], data=[]]
def add_state(self): """This function adds a new state""" sid = len(self.states) self.states.append(SFAState(sid))
def function[add_state, parameter[self]]: constant[This function adds a new state] variable[sid] assign[=] call[name[len], parameter[name[self].states]] call[name[self].states.append, parameter[call[name[SFAState], parameter[name[sid]]]]]
keyword[def] identifier[add_state] ( identifier[self] ): literal[string] identifier[sid] = identifier[len] ( identifier[self] . identifier[states] ) identifier[self] . identifier[states] . identifier[append] ( identifier[SFAState] ( identifier[sid] ))
def add_state(self): """This function adds a new state""" sid = len(self.states) self.states.append(SFAState(sid))
def tournament(self,individuals,tourn_size, num_selections=None): """conducts tournament selection of size tourn_size""" winners = [] locs = [] if num_selections is None: num_selections = len(individuals) for i in np.arange(num_selections): # sample pool with replacement pool_i = self.random_state.choice(len(individuals),size=tourn_size) pool = [] for i in pool_i: pool.append(np.mean(individuals[i].fitness)) # winner locs.append(pool_i[np.argmin(pool)]) winners.append(copy.deepcopy(individuals[locs[-1]])) return winners,locs
def function[tournament, parameter[self, individuals, tourn_size, num_selections]]: constant[conducts tournament selection of size tourn_size] variable[winners] assign[=] list[[]] variable[locs] assign[=] list[[]] if compare[name[num_selections] is constant[None]] begin[:] variable[num_selections] assign[=] call[name[len], parameter[name[individuals]]] for taget[name[i]] in starred[call[name[np].arange, parameter[name[num_selections]]]] begin[:] variable[pool_i] assign[=] call[name[self].random_state.choice, parameter[call[name[len], parameter[name[individuals]]]]] variable[pool] assign[=] list[[]] for taget[name[i]] in starred[name[pool_i]] begin[:] call[name[pool].append, parameter[call[name[np].mean, parameter[call[name[individuals]][name[i]].fitness]]]] call[name[locs].append, parameter[call[name[pool_i]][call[name[np].argmin, parameter[name[pool]]]]]] call[name[winners].append, parameter[call[name[copy].deepcopy, parameter[call[name[individuals]][call[name[locs]][<ast.UnaryOp object at 0x7da1b18018d0>]]]]]] return[tuple[[<ast.Name object at 0x7da1b18027a0>, <ast.Name object at 0x7da1b1802f80>]]]
keyword[def] identifier[tournament] ( identifier[self] , identifier[individuals] , identifier[tourn_size] , identifier[num_selections] = keyword[None] ): literal[string] identifier[winners] =[] identifier[locs] =[] keyword[if] identifier[num_selections] keyword[is] keyword[None] : identifier[num_selections] = identifier[len] ( identifier[individuals] ) keyword[for] identifier[i] keyword[in] identifier[np] . identifier[arange] ( identifier[num_selections] ): identifier[pool_i] = identifier[self] . identifier[random_state] . identifier[choice] ( identifier[len] ( identifier[individuals] ), identifier[size] = identifier[tourn_size] ) identifier[pool] =[] keyword[for] identifier[i] keyword[in] identifier[pool_i] : identifier[pool] . identifier[append] ( identifier[np] . identifier[mean] ( identifier[individuals] [ identifier[i] ]. identifier[fitness] )) identifier[locs] . identifier[append] ( identifier[pool_i] [ identifier[np] . identifier[argmin] ( identifier[pool] )]) identifier[winners] . identifier[append] ( identifier[copy] . identifier[deepcopy] ( identifier[individuals] [ identifier[locs] [- literal[int] ]])) keyword[return] identifier[winners] , identifier[locs]
def tournament(self, individuals, tourn_size, num_selections=None): """conducts tournament selection of size tourn_size""" winners = [] locs = [] if num_selections is None: num_selections = len(individuals) # depends on [control=['if'], data=['num_selections']] for i in np.arange(num_selections): # sample pool with replacement pool_i = self.random_state.choice(len(individuals), size=tourn_size) pool = [] for i in pool_i: pool.append(np.mean(individuals[i].fitness)) # depends on [control=['for'], data=['i']] # winner locs.append(pool_i[np.argmin(pool)]) winners.append(copy.deepcopy(individuals[locs[-1]])) # depends on [control=['for'], data=['i']] return (winners, locs)
def setActiveState(self, active): """ Use this to enable or disable (grey out) a parameter. """ st = DISABLED if active: st = NORMAL self.entry.configure(state=st) self.inputLabel.configure(state=st) self.promptLabel.configure(state=st)
def function[setActiveState, parameter[self, active]]: constant[ Use this to enable or disable (grey out) a parameter. ] variable[st] assign[=] name[DISABLED] if name[active] begin[:] variable[st] assign[=] name[NORMAL] call[name[self].entry.configure, parameter[]] call[name[self].inputLabel.configure, parameter[]] call[name[self].promptLabel.configure, parameter[]]
keyword[def] identifier[setActiveState] ( identifier[self] , identifier[active] ): literal[string] identifier[st] = identifier[DISABLED] keyword[if] identifier[active] : identifier[st] = identifier[NORMAL] identifier[self] . identifier[entry] . identifier[configure] ( identifier[state] = identifier[st] ) identifier[self] . identifier[inputLabel] . identifier[configure] ( identifier[state] = identifier[st] ) identifier[self] . identifier[promptLabel] . identifier[configure] ( identifier[state] = identifier[st] )
def setActiveState(self, active): """ Use this to enable or disable (grey out) a parameter. """ st = DISABLED if active: st = NORMAL # depends on [control=['if'], data=[]] self.entry.configure(state=st) self.inputLabel.configure(state=st) self.promptLabel.configure(state=st)
def find_by(cls, payload, require=False): """ Searches the model in question by AND joining the query parameters. Implements a Railsy way of looking for a record using a method by the same name and passing in the query as a dict. as well. Only the first hit is returned, and there is no particular ordering specified in the server-side API method. Args: payload: `dict`. The attributes of a record to restrict the search to. require: `bool`. True means to raise a `pulsarpy.models.RecordNotFound` exception if no record is found. Returns: `dict`: The JSON serialization of the record, if any, found by the API call. `None`: If the API call didnt' return any results. Raises: `pulsarpy.models.RecordNotFound`: No records were found, and the `require` parameter is True. """ if not isinstance(payload, dict): raise ValueError("The 'payload' parameter must be provided a dictionary object.") url = os.path.join(cls.URL, "find_by") payload = {"find_by": payload} cls.debug_logger.debug("Searching Pulsar {} for {}".format(cls.__name__, json.dumps(payload, indent=4))) res = requests.post(url=url, json=payload, headers=HEADERS, verify=False) #cls.write_response_html_to_file(res,"bob.html") res.raise_for_status() res_json = res.json() if res_json: try: res_json = res_json[cls.MODEL_NAME] except KeyError: # Key won't be present if there isn't a serializer for it on the server. pass else: if require: raise RecordNotFound("Can't find any {} records with search criteria: '{}'.".format(cls.__name__, payload)) return res_json
def function[find_by, parameter[cls, payload, require]]: constant[ Searches the model in question by AND joining the query parameters. Implements a Railsy way of looking for a record using a method by the same name and passing in the query as a dict. as well. Only the first hit is returned, and there is no particular ordering specified in the server-side API method. Args: payload: `dict`. The attributes of a record to restrict the search to. require: `bool`. True means to raise a `pulsarpy.models.RecordNotFound` exception if no record is found. Returns: `dict`: The JSON serialization of the record, if any, found by the API call. `None`: If the API call didnt' return any results. Raises: `pulsarpy.models.RecordNotFound`: No records were found, and the `require` parameter is True. ] if <ast.UnaryOp object at 0x7da1b10d67a0> begin[:] <ast.Raise object at 0x7da1b10d7d90> variable[url] assign[=] call[name[os].path.join, parameter[name[cls].URL, constant[find_by]]] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b10d5c60>], [<ast.Name object at 0x7da1b10d6650>]] call[name[cls].debug_logger.debug, parameter[call[constant[Searching Pulsar {} for {}].format, parameter[name[cls].__name__, call[name[json].dumps, parameter[name[payload]]]]]]] variable[res] assign[=] call[name[requests].post, parameter[]] call[name[res].raise_for_status, parameter[]] variable[res_json] assign[=] call[name[res].json, parameter[]] if name[res_json] begin[:] <ast.Try object at 0x7da1b10d4280> return[name[res_json]]
keyword[def] identifier[find_by] ( identifier[cls] , identifier[payload] , identifier[require] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[payload] , identifier[dict] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[url] = identifier[os] . identifier[path] . identifier[join] ( identifier[cls] . identifier[URL] , literal[string] ) identifier[payload] ={ literal[string] : identifier[payload] } identifier[cls] . identifier[debug_logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[cls] . identifier[__name__] , identifier[json] . identifier[dumps] ( identifier[payload] , identifier[indent] = literal[int] ))) identifier[res] = identifier[requests] . identifier[post] ( identifier[url] = identifier[url] , identifier[json] = identifier[payload] , identifier[headers] = identifier[HEADERS] , identifier[verify] = keyword[False] ) identifier[res] . identifier[raise_for_status] () identifier[res_json] = identifier[res] . identifier[json] () keyword[if] identifier[res_json] : keyword[try] : identifier[res_json] = identifier[res_json] [ identifier[cls] . identifier[MODEL_NAME] ] keyword[except] identifier[KeyError] : keyword[pass] keyword[else] : keyword[if] identifier[require] : keyword[raise] identifier[RecordNotFound] ( literal[string] . identifier[format] ( identifier[cls] . identifier[__name__] , identifier[payload] )) keyword[return] identifier[res_json]
def find_by(cls, payload, require=False): """ Searches the model in question by AND joining the query parameters. Implements a Railsy way of looking for a record using a method by the same name and passing in the query as a dict. as well. Only the first hit is returned, and there is no particular ordering specified in the server-side API method. Args: payload: `dict`. The attributes of a record to restrict the search to. require: `bool`. True means to raise a `pulsarpy.models.RecordNotFound` exception if no record is found. Returns: `dict`: The JSON serialization of the record, if any, found by the API call. `None`: If the API call didnt' return any results. Raises: `pulsarpy.models.RecordNotFound`: No records were found, and the `require` parameter is True. """ if not isinstance(payload, dict): raise ValueError("The 'payload' parameter must be provided a dictionary object.") # depends on [control=['if'], data=[]] url = os.path.join(cls.URL, 'find_by') payload = {'find_by': payload} cls.debug_logger.debug('Searching Pulsar {} for {}'.format(cls.__name__, json.dumps(payload, indent=4))) res = requests.post(url=url, json=payload, headers=HEADERS, verify=False) #cls.write_response_html_to_file(res,"bob.html") res.raise_for_status() res_json = res.json() if res_json: try: res_json = res_json[cls.MODEL_NAME] # depends on [control=['try'], data=[]] except KeyError: # Key won't be present if there isn't a serializer for it on the server. pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] elif require: raise RecordNotFound("Can't find any {} records with search criteria: '{}'.".format(cls.__name__, payload)) # depends on [control=['if'], data=[]] return res_json
def pyVersionStr(self): """Version of Python running my script Returns ------- str A descriptive string containing the version of Python running this script. """ from sys import version_info return "Python Interpreter Version: {}.{}.{}".format(version_info.major, version_info.minor, version_info.micro)
def function[pyVersionStr, parameter[self]]: constant[Version of Python running my script Returns ------- str A descriptive string containing the version of Python running this script. ] from relative_module[sys] import module[version_info] return[call[constant[Python Interpreter Version: {}.{}.{}].format, parameter[name[version_info].major, name[version_info].minor, name[version_info].micro]]]
keyword[def] identifier[pyVersionStr] ( identifier[self] ): literal[string] keyword[from] identifier[sys] keyword[import] identifier[version_info] keyword[return] literal[string] . identifier[format] ( identifier[version_info] . identifier[major] , identifier[version_info] . identifier[minor] , identifier[version_info] . identifier[micro] )
def pyVersionStr(self): """Version of Python running my script Returns ------- str A descriptive string containing the version of Python running this script. """ from sys import version_info return 'Python Interpreter Version: {}.{}.{}'.format(version_info.major, version_info.minor, version_info.micro)
def disable_detailed_monitoring(name, call=None): ''' Enable/disable detailed monitoring on a node CLI Example: ''' if call != 'action': raise SaltCloudSystemExit( 'The enable_term_protect action must be called with ' '-a or --action.' ) instance_id = _get_node(name)['instanceId'] params = {'Action': 'UnmonitorInstances', 'InstanceId.1': instance_id} result = aws.query(params, location=get_location(), provider=get_provider(), return_root=True, opts=__opts__, sigver='4') return show_detailed_monitoring(name=name, instance_id=instance_id, call='action')
def function[disable_detailed_monitoring, parameter[name, call]]: constant[ Enable/disable detailed monitoring on a node CLI Example: ] if compare[name[call] not_equal[!=] constant[action]] begin[:] <ast.Raise object at 0x7da18dc06da0> variable[instance_id] assign[=] call[call[name[_get_node], parameter[name[name]]]][constant[instanceId]] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18dc06800>, <ast.Constant object at 0x7da18dc07d00>], [<ast.Constant object at 0x7da18dc05870>, <ast.Name object at 0x7da18dc06fe0>]] variable[result] assign[=] call[name[aws].query, parameter[name[params]]] return[call[name[show_detailed_monitoring], parameter[]]]
keyword[def] identifier[disable_detailed_monitoring] ( identifier[name] , identifier[call] = keyword[None] ): literal[string] keyword[if] identifier[call] != literal[string] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] literal[string] ) identifier[instance_id] = identifier[_get_node] ( identifier[name] )[ literal[string] ] identifier[params] ={ literal[string] : literal[string] , literal[string] : identifier[instance_id] } identifier[result] = identifier[aws] . identifier[query] ( identifier[params] , identifier[location] = identifier[get_location] (), identifier[provider] = identifier[get_provider] (), identifier[return_root] = keyword[True] , identifier[opts] = identifier[__opts__] , identifier[sigver] = literal[string] ) keyword[return] identifier[show_detailed_monitoring] ( identifier[name] = identifier[name] , identifier[instance_id] = identifier[instance_id] , identifier[call] = literal[string] )
def disable_detailed_monitoring(name, call=None): """ Enable/disable detailed monitoring on a node CLI Example: """ if call != 'action': raise SaltCloudSystemExit('The enable_term_protect action must be called with -a or --action.') # depends on [control=['if'], data=[]] instance_id = _get_node(name)['instanceId'] params = {'Action': 'UnmonitorInstances', 'InstanceId.1': instance_id} result = aws.query(params, location=get_location(), provider=get_provider(), return_root=True, opts=__opts__, sigver='4') return show_detailed_monitoring(name=name, instance_id=instance_id, call='action')
def get_task(self, course, taskid): """ :param course: a Course object :param taskid: the task id of the task :raise InvalidNameException, TaskNotFoundException, TaskUnreadableException :return: an object representing the task, of the type given in the constructor """ if not id_checker(taskid): raise InvalidNameException("Task with invalid name: " + taskid) if self._cache_update_needed(course, taskid): self._update_cache(course, taskid) return self._cache[(course.get_id(), taskid)][0]
def function[get_task, parameter[self, course, taskid]]: constant[ :param course: a Course object :param taskid: the task id of the task :raise InvalidNameException, TaskNotFoundException, TaskUnreadableException :return: an object representing the task, of the type given in the constructor ] if <ast.UnaryOp object at 0x7da18bc72830> begin[:] <ast.Raise object at 0x7da18bc73310> if call[name[self]._cache_update_needed, parameter[name[course], name[taskid]]] begin[:] call[name[self]._update_cache, parameter[name[course], name[taskid]]] return[call[call[name[self]._cache][tuple[[<ast.Call object at 0x7da18bc72200>, <ast.Name object at 0x7da18bc72770>]]]][constant[0]]]
keyword[def] identifier[get_task] ( identifier[self] , identifier[course] , identifier[taskid] ): literal[string] keyword[if] keyword[not] identifier[id_checker] ( identifier[taskid] ): keyword[raise] identifier[InvalidNameException] ( literal[string] + identifier[taskid] ) keyword[if] identifier[self] . identifier[_cache_update_needed] ( identifier[course] , identifier[taskid] ): identifier[self] . identifier[_update_cache] ( identifier[course] , identifier[taskid] ) keyword[return] identifier[self] . identifier[_cache] [( identifier[course] . identifier[get_id] (), identifier[taskid] )][ literal[int] ]
def get_task(self, course, taskid): """ :param course: a Course object :param taskid: the task id of the task :raise InvalidNameException, TaskNotFoundException, TaskUnreadableException :return: an object representing the task, of the type given in the constructor """ if not id_checker(taskid): raise InvalidNameException('Task with invalid name: ' + taskid) # depends on [control=['if'], data=[]] if self._cache_update_needed(course, taskid): self._update_cache(course, taskid) # depends on [control=['if'], data=[]] return self._cache[course.get_id(), taskid][0]
def agent_create(self, data, **kwargs): "https://developer.zendesk.com/rest_api/docs/chat/agents#create-agent" api_path = "/api/v2/agents" return self.call(api_path, method="POST", data=data, **kwargs)
def function[agent_create, parameter[self, data]]: constant[https://developer.zendesk.com/rest_api/docs/chat/agents#create-agent] variable[api_path] assign[=] constant[/api/v2/agents] return[call[name[self].call, parameter[name[api_path]]]]
keyword[def] identifier[agent_create] ( identifier[self] , identifier[data] ,** identifier[kwargs] ): literal[string] identifier[api_path] = literal[string] keyword[return] identifier[self] . identifier[call] ( identifier[api_path] , identifier[method] = literal[string] , identifier[data] = identifier[data] ,** identifier[kwargs] )
def agent_create(self, data, **kwargs): """https://developer.zendesk.com/rest_api/docs/chat/agents#create-agent""" api_path = '/api/v2/agents' return self.call(api_path, method='POST', data=data, **kwargs)
def set(self, value, pos=None): """Set one or many bits to 1 or 0. value -- If True bits are set to 1, otherwise they are set to 0. pos -- Either a single bit position or an iterable of bit positions. Negative numbers are treated in the same way as slice indices. Defaults to the entire bitstring. Raises IndexError if pos < -self.len or pos >= self.len. """ f = self._set if value else self._unset if pos is None: pos = xrange(self.len) try: length = self.len for p in pos: if p < 0: p += length if not 0 <= p < length: raise IndexError("Bit position {0} out of range.".format(p)) f(p) except TypeError: # Single pos if pos < 0: pos += self.len if not 0 <= pos < length: raise IndexError("Bit position {0} out of range.".format(pos)) f(pos)
def function[set, parameter[self, value, pos]]: constant[Set one or many bits to 1 or 0. value -- If True bits are set to 1, otherwise they are set to 0. pos -- Either a single bit position or an iterable of bit positions. Negative numbers are treated in the same way as slice indices. Defaults to the entire bitstring. Raises IndexError if pos < -self.len or pos >= self.len. ] variable[f] assign[=] <ast.IfExp object at 0x7da1b1043310> if compare[name[pos] is constant[None]] begin[:] variable[pos] assign[=] call[name[xrange], parameter[name[self].len]] <ast.Try object at 0x7da1b1043280>
keyword[def] identifier[set] ( identifier[self] , identifier[value] , identifier[pos] = keyword[None] ): literal[string] identifier[f] = identifier[self] . identifier[_set] keyword[if] identifier[value] keyword[else] identifier[self] . identifier[_unset] keyword[if] identifier[pos] keyword[is] keyword[None] : identifier[pos] = identifier[xrange] ( identifier[self] . identifier[len] ) keyword[try] : identifier[length] = identifier[self] . identifier[len] keyword[for] identifier[p] keyword[in] identifier[pos] : keyword[if] identifier[p] < literal[int] : identifier[p] += identifier[length] keyword[if] keyword[not] literal[int] <= identifier[p] < identifier[length] : keyword[raise] identifier[IndexError] ( literal[string] . identifier[format] ( identifier[p] )) identifier[f] ( identifier[p] ) keyword[except] identifier[TypeError] : keyword[if] identifier[pos] < literal[int] : identifier[pos] += identifier[self] . identifier[len] keyword[if] keyword[not] literal[int] <= identifier[pos] < identifier[length] : keyword[raise] identifier[IndexError] ( literal[string] . identifier[format] ( identifier[pos] )) identifier[f] ( identifier[pos] )
def set(self, value, pos=None): """Set one or many bits to 1 or 0. value -- If True bits are set to 1, otherwise they are set to 0. pos -- Either a single bit position or an iterable of bit positions. Negative numbers are treated in the same way as slice indices. Defaults to the entire bitstring. Raises IndexError if pos < -self.len or pos >= self.len. """ f = self._set if value else self._unset if pos is None: pos = xrange(self.len) # depends on [control=['if'], data=['pos']] try: length = self.len for p in pos: if p < 0: p += length # depends on [control=['if'], data=['p']] if not 0 <= p < length: raise IndexError('Bit position {0} out of range.'.format(p)) # depends on [control=['if'], data=[]] f(p) # depends on [control=['for'], data=['p']] # depends on [control=['try'], data=[]] except TypeError: # Single pos if pos < 0: pos += self.len # depends on [control=['if'], data=['pos']] if not 0 <= pos < length: raise IndexError('Bit position {0} out of range.'.format(pos)) # depends on [control=['if'], data=[]] f(pos) # depends on [control=['except'], data=[]]
def load_buffer_six_to_ten(self, out_buffer): """ Loads second program buffer (0x94) with everything but first three bytes and checksum """ offset = 3 for ind, step in enumerate(self.partial_steps_data(5)): struct.pack_into(b"< 2H", out_buffer, offset + ind*4, step[0], step[1]) struct.pack_into(b"< B x", out_buffer, 23, self._program_mode)
def function[load_buffer_six_to_ten, parameter[self, out_buffer]]: constant[ Loads second program buffer (0x94) with everything but first three bytes and checksum ] variable[offset] assign[=] constant[3] for taget[tuple[[<ast.Name object at 0x7da18f09d3f0>, <ast.Name object at 0x7da18f09dcf0>]]] in starred[call[name[enumerate], parameter[call[name[self].partial_steps_data, parameter[constant[5]]]]]] begin[:] call[name[struct].pack_into, parameter[constant[b'< 2H'], name[out_buffer], binary_operation[name[offset] + binary_operation[name[ind] * constant[4]]], call[name[step]][constant[0]], call[name[step]][constant[1]]]] call[name[struct].pack_into, parameter[constant[b'< B x'], name[out_buffer], constant[23], name[self]._program_mode]]
keyword[def] identifier[load_buffer_six_to_ten] ( identifier[self] , identifier[out_buffer] ): literal[string] identifier[offset] = literal[int] keyword[for] identifier[ind] , identifier[step] keyword[in] identifier[enumerate] ( identifier[self] . identifier[partial_steps_data] ( literal[int] )): identifier[struct] . identifier[pack_into] ( literal[string] , identifier[out_buffer] , identifier[offset] + identifier[ind] * literal[int] , identifier[step] [ literal[int] ], identifier[step] [ literal[int] ]) identifier[struct] . identifier[pack_into] ( literal[string] , identifier[out_buffer] , literal[int] , identifier[self] . identifier[_program_mode] )
def load_buffer_six_to_ten(self, out_buffer): """ Loads second program buffer (0x94) with everything but first three bytes and checksum """ offset = 3 for (ind, step) in enumerate(self.partial_steps_data(5)): struct.pack_into(b'< 2H', out_buffer, offset + ind * 4, step[0], step[1]) # depends on [control=['for'], data=[]] struct.pack_into(b'< B x', out_buffer, 23, self._program_mode)
def hard_target_update(self): """ Update model parameters every 'target_update' time steps See https://arxiv.org/abs/1312.5602 :param target_update: target update interval """ if self.steps % self.target_update == 0: self.target.load_state_dict(self.local.state_dict())
def function[hard_target_update, parameter[self]]: constant[ Update model parameters every 'target_update' time steps See https://arxiv.org/abs/1312.5602 :param target_update: target update interval ] if compare[binary_operation[name[self].steps <ast.Mod object at 0x7da2590d6920> name[self].target_update] equal[==] constant[0]] begin[:] call[name[self].target.load_state_dict, parameter[call[name[self].local.state_dict, parameter[]]]]
keyword[def] identifier[hard_target_update] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[steps] % identifier[self] . identifier[target_update] == literal[int] : identifier[self] . identifier[target] . identifier[load_state_dict] ( identifier[self] . identifier[local] . identifier[state_dict] ())
def hard_target_update(self): """ Update model parameters every 'target_update' time steps See https://arxiv.org/abs/1312.5602 :param target_update: target update interval """ if self.steps % self.target_update == 0: self.target.load_state_dict(self.local.state_dict()) # depends on [control=['if'], data=[]]
def regroup_vectorized(srccat, eps, far=None, dist=norm_dist): """ Regroup the islands of a catalog according to their normalised distance. Assumes srccat is recarray-like for efficiency. Return a list of island groups. Parameters ---------- srccat : np.rec.arry or pd.DataFrame Should have the following fields[units]: ra[deg],dec[deg], a[arcsec],b[arcsec],pa[deg], peak_flux[any] eps : float maximum normalised distance within which sources are considered to be grouped far : float (degrees) sources that are further than this distance apart will not be grouped, and will not be tested. Default = 0.5. dist : func a function that calculates the distance between a source and each element of an array of sources. Default = :func:`AegeanTools.cluster.norm_dist` Returns ------- islands : list of lists Each island contians integer indices for members from srccat (in descending dec order). """ if far is None: far = 0.5 # 10*max(a.a/3600 for a in srccat) # most negative declination first # XXX: kind='mergesort' ensures stable sorting for determinism. # Do we need this? order = np.argsort(srccat.dec, kind='mergesort')[::-1] # TODO: is it better to store groups as arrays even if appends are more # costly? groups = [[order[0]]] for idx in order[1:]: rec = srccat[idx] # TODO: Find out if groups are big enough for this to give us a speed # gain. If not, get distance to all entries in groups above # decmin simultaneously. decmin = rec.dec - far for group in reversed(groups): # when an island's largest (last) declination is smaller than # decmin, we don't need to look at any more islands if srccat.dec[group[-1]] < decmin: # new group groups.append([idx]) rafar = far / np.cos(np.radians(rec.dec)) group_recs = np.take(srccat, group, mode='clip') group_recs = group_recs[abs(rec.ra - group_recs.ra) <= rafar] if len(group_recs) and dist(rec, group_recs).min() < eps: group.append(idx) break else: # new group groups.append([idx]) # TODO?: a more numpy-like interface would return only an array providing # the mapping: # group_idx = np.empty(len(srccat), dtype=int) # for i, group in enumerate(groups): # group_idx[group] = i # return group_idx return groups
def function[regroup_vectorized, parameter[srccat, eps, far, dist]]: constant[ Regroup the islands of a catalog according to their normalised distance. Assumes srccat is recarray-like for efficiency. Return a list of island groups. Parameters ---------- srccat : np.rec.arry or pd.DataFrame Should have the following fields[units]: ra[deg],dec[deg], a[arcsec],b[arcsec],pa[deg], peak_flux[any] eps : float maximum normalised distance within which sources are considered to be grouped far : float (degrees) sources that are further than this distance apart will not be grouped, and will not be tested. Default = 0.5. dist : func a function that calculates the distance between a source and each element of an array of sources. Default = :func:`AegeanTools.cluster.norm_dist` Returns ------- islands : list of lists Each island contians integer indices for members from srccat (in descending dec order). ] if compare[name[far] is constant[None]] begin[:] variable[far] assign[=] constant[0.5] variable[order] assign[=] call[call[name[np].argsort, parameter[name[srccat].dec]]][<ast.Slice object at 0x7da20e957d60>] variable[groups] assign[=] list[[<ast.List object at 0x7da20e9573a0>]] for taget[name[idx]] in starred[call[name[order]][<ast.Slice object at 0x7da1b2346f20>]] begin[:] variable[rec] assign[=] call[name[srccat]][name[idx]] variable[decmin] assign[=] binary_operation[name[rec].dec - name[far]] for taget[name[group]] in starred[call[name[reversed], parameter[name[groups]]]] begin[:] if compare[call[name[srccat].dec][call[name[group]][<ast.UnaryOp object at 0x7da18f00ffd0>]] less[<] name[decmin]] begin[:] call[name[groups].append, parameter[list[[<ast.Name object at 0x7da18f00fa60>]]]] variable[rafar] assign[=] binary_operation[name[far] / call[name[np].cos, parameter[call[name[np].radians, parameter[name[rec].dec]]]]] variable[group_recs] assign[=] call[name[np].take, parameter[name[srccat], name[group]]] variable[group_recs] assign[=] call[name[group_recs]][compare[call[name[abs], parameter[binary_operation[name[rec].ra - name[group_recs].ra]]] less_or_equal[<=] name[rafar]]] if <ast.BoolOp object at 0x7da18f00f940> begin[:] call[name[group].append, parameter[name[idx]]] break return[name[groups]]
keyword[def] identifier[regroup_vectorized] ( identifier[srccat] , identifier[eps] , identifier[far] = keyword[None] , identifier[dist] = identifier[norm_dist] ): literal[string] keyword[if] identifier[far] keyword[is] keyword[None] : identifier[far] = literal[int] identifier[order] = identifier[np] . identifier[argsort] ( identifier[srccat] . identifier[dec] , identifier[kind] = literal[string] )[::- literal[int] ] identifier[groups] =[[ identifier[order] [ literal[int] ]]] keyword[for] identifier[idx] keyword[in] identifier[order] [ literal[int] :]: identifier[rec] = identifier[srccat] [ identifier[idx] ] identifier[decmin] = identifier[rec] . identifier[dec] - identifier[far] keyword[for] identifier[group] keyword[in] identifier[reversed] ( identifier[groups] ): keyword[if] identifier[srccat] . identifier[dec] [ identifier[group] [- literal[int] ]]< identifier[decmin] : identifier[groups] . identifier[append] ([ identifier[idx] ]) identifier[rafar] = identifier[far] / identifier[np] . identifier[cos] ( identifier[np] . identifier[radians] ( identifier[rec] . identifier[dec] )) identifier[group_recs] = identifier[np] . identifier[take] ( identifier[srccat] , identifier[group] , identifier[mode] = literal[string] ) identifier[group_recs] = identifier[group_recs] [ identifier[abs] ( identifier[rec] . identifier[ra] - identifier[group_recs] . identifier[ra] )<= identifier[rafar] ] keyword[if] identifier[len] ( identifier[group_recs] ) keyword[and] identifier[dist] ( identifier[rec] , identifier[group_recs] ). identifier[min] ()< identifier[eps] : identifier[group] . identifier[append] ( identifier[idx] ) keyword[break] keyword[else] : identifier[groups] . identifier[append] ([ identifier[idx] ]) keyword[return] identifier[groups]
def regroup_vectorized(srccat, eps, far=None, dist=norm_dist): """ Regroup the islands of a catalog according to their normalised distance. Assumes srccat is recarray-like for efficiency. Return a list of island groups. Parameters ---------- srccat : np.rec.arry or pd.DataFrame Should have the following fields[units]: ra[deg],dec[deg], a[arcsec],b[arcsec],pa[deg], peak_flux[any] eps : float maximum normalised distance within which sources are considered to be grouped far : float (degrees) sources that are further than this distance apart will not be grouped, and will not be tested. Default = 0.5. dist : func a function that calculates the distance between a source and each element of an array of sources. Default = :func:`AegeanTools.cluster.norm_dist` Returns ------- islands : list of lists Each island contians integer indices for members from srccat (in descending dec order). """ if far is None: far = 0.5 # 10*max(a.a/3600 for a in srccat) # depends on [control=['if'], data=['far']] # most negative declination first # XXX: kind='mergesort' ensures stable sorting for determinism. # Do we need this? order = np.argsort(srccat.dec, kind='mergesort')[::-1] # TODO: is it better to store groups as arrays even if appends are more # costly? groups = [[order[0]]] for idx in order[1:]: rec = srccat[idx] # TODO: Find out if groups are big enough for this to give us a speed # gain. If not, get distance to all entries in groups above # decmin simultaneously. decmin = rec.dec - far for group in reversed(groups): # when an island's largest (last) declination is smaller than # decmin, we don't need to look at any more islands if srccat.dec[group[-1]] < decmin: # new group groups.append([idx]) # depends on [control=['if'], data=[]] rafar = far / np.cos(np.radians(rec.dec)) group_recs = np.take(srccat, group, mode='clip') group_recs = group_recs[abs(rec.ra - group_recs.ra) <= rafar] if len(group_recs) and dist(rec, group_recs).min() < eps: group.append(idx) break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['group']] else: # new group groups.append([idx]) # depends on [control=['for'], data=['idx']] # TODO?: a more numpy-like interface would return only an array providing # the mapping: # group_idx = np.empty(len(srccat), dtype=int) # for i, group in enumerate(groups): # group_idx[group] = i # return group_idx return groups
def log_player_plays_road_builder(self, player, location1, location2): """ :param player: catan.game.Player :param location1: string, see hexgrid.location() :param location2: string, see hexgrid.location() """ self._logln('{0} plays road builder, builds at {1} and {2}'.format( player.color, location1, location2 ))
def function[log_player_plays_road_builder, parameter[self, player, location1, location2]]: constant[ :param player: catan.game.Player :param location1: string, see hexgrid.location() :param location2: string, see hexgrid.location() ] call[name[self]._logln, parameter[call[constant[{0} plays road builder, builds at {1} and {2}].format, parameter[name[player].color, name[location1], name[location2]]]]]
keyword[def] identifier[log_player_plays_road_builder] ( identifier[self] , identifier[player] , identifier[location1] , identifier[location2] ): literal[string] identifier[self] . identifier[_logln] ( literal[string] . identifier[format] ( identifier[player] . identifier[color] , identifier[location1] , identifier[location2] ))
def log_player_plays_road_builder(self, player, location1, location2): """ :param player: catan.game.Player :param location1: string, see hexgrid.location() :param location2: string, see hexgrid.location() """ self._logln('{0} plays road builder, builds at {1} and {2}'.format(player.color, location1, location2))
def setlocale(name): """ Context manager with threading lock for set locale on enter, and set it back to original state on exit. :: >>> with setlocale("C"): ... ... """ with LOCALE_LOCK: old_locale = locale.setlocale(locale.LC_ALL) try: yield locale.setlocale(locale.LC_ALL, name) finally: locale.setlocale(locale.LC_ALL, old_locale)
def function[setlocale, parameter[name]]: constant[ Context manager with threading lock for set locale on enter, and set it back to original state on exit. :: >>> with setlocale("C"): ... ... ] with name[LOCALE_LOCK] begin[:] variable[old_locale] assign[=] call[name[locale].setlocale, parameter[name[locale].LC_ALL]] <ast.Try object at 0x7da1b00b5420>
keyword[def] identifier[setlocale] ( identifier[name] ): literal[string] keyword[with] identifier[LOCALE_LOCK] : identifier[old_locale] = identifier[locale] . identifier[setlocale] ( identifier[locale] . identifier[LC_ALL] ) keyword[try] : keyword[yield] identifier[locale] . identifier[setlocale] ( identifier[locale] . identifier[LC_ALL] , identifier[name] ) keyword[finally] : identifier[locale] . identifier[setlocale] ( identifier[locale] . identifier[LC_ALL] , identifier[old_locale] )
def setlocale(name): """ Context manager with threading lock for set locale on enter, and set it back to original state on exit. :: >>> with setlocale("C"): ... ... """ with LOCALE_LOCK: old_locale = locale.setlocale(locale.LC_ALL) try: yield locale.setlocale(locale.LC_ALL, name) # depends on [control=['try'], data=[]] finally: locale.setlocale(locale.LC_ALL, old_locale) # depends on [control=['with'], data=[]]
def check_member_pool(lb, member, pool_name): ''' Check a pool member exists in a specific pool CLI Examples: .. code-block:: bash salt-run f5.check_member_pool load_balancer 10.0.0.1 my_pool ''' if __opts__['load_balancers'].get(lb, None): (username, password) = list(__opts__['load_balancers'][lb].values()) else: raise Exception('Unable to find `{0}` load balancer'.format(lb)) F5 = F5Mgmt(lb, username, password) return F5.check_member_pool(member, pool_name)
def function[check_member_pool, parameter[lb, member, pool_name]]: constant[ Check a pool member exists in a specific pool CLI Examples: .. code-block:: bash salt-run f5.check_member_pool load_balancer 10.0.0.1 my_pool ] if call[call[name[__opts__]][constant[load_balancers]].get, parameter[name[lb], constant[None]]] begin[:] <ast.Tuple object at 0x7da1b1c22740> assign[=] call[name[list], parameter[call[call[call[name[__opts__]][constant[load_balancers]]][name[lb]].values, parameter[]]]] variable[F5] assign[=] call[name[F5Mgmt], parameter[name[lb], name[username], name[password]]] return[call[name[F5].check_member_pool, parameter[name[member], name[pool_name]]]]
keyword[def] identifier[check_member_pool] ( identifier[lb] , identifier[member] , identifier[pool_name] ): literal[string] keyword[if] identifier[__opts__] [ literal[string] ]. identifier[get] ( identifier[lb] , keyword[None] ): ( identifier[username] , identifier[password] )= identifier[list] ( identifier[__opts__] [ literal[string] ][ identifier[lb] ]. identifier[values] ()) keyword[else] : keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[lb] )) identifier[F5] = identifier[F5Mgmt] ( identifier[lb] , identifier[username] , identifier[password] ) keyword[return] identifier[F5] . identifier[check_member_pool] ( identifier[member] , identifier[pool_name] )
def check_member_pool(lb, member, pool_name): """ Check a pool member exists in a specific pool CLI Examples: .. code-block:: bash salt-run f5.check_member_pool load_balancer 10.0.0.1 my_pool """ if __opts__['load_balancers'].get(lb, None): (username, password) = list(__opts__['load_balancers'][lb].values()) # depends on [control=['if'], data=[]] else: raise Exception('Unable to find `{0}` load balancer'.format(lb)) F5 = F5Mgmt(lb, username, password) return F5.check_member_pool(member, pool_name)
def query(self, sql: str, args: tuple = None): """Execute a SQL query with a return value.""" with self._cursor() as cursor: log.debug('Running SQL: ' + str((sql, args))) cursor.execute(sql, args) return cursor.fetchall()
def function[query, parameter[self, sql, args]]: constant[Execute a SQL query with a return value.] with call[name[self]._cursor, parameter[]] begin[:] call[name[log].debug, parameter[binary_operation[constant[Running SQL: ] + call[name[str], parameter[tuple[[<ast.Name object at 0x7da18fe92dd0>, <ast.Name object at 0x7da18fe90040>]]]]]]] call[name[cursor].execute, parameter[name[sql], name[args]]] return[call[name[cursor].fetchall, parameter[]]]
keyword[def] identifier[query] ( identifier[self] , identifier[sql] : identifier[str] , identifier[args] : identifier[tuple] = keyword[None] ): literal[string] keyword[with] identifier[self] . identifier[_cursor] () keyword[as] identifier[cursor] : identifier[log] . identifier[debug] ( literal[string] + identifier[str] (( identifier[sql] , identifier[args] ))) identifier[cursor] . identifier[execute] ( identifier[sql] , identifier[args] ) keyword[return] identifier[cursor] . identifier[fetchall] ()
def query(self, sql: str, args: tuple=None): """Execute a SQL query with a return value.""" with self._cursor() as cursor: log.debug('Running SQL: ' + str((sql, args))) cursor.execute(sql, args) return cursor.fetchall() # depends on [control=['with'], data=['cursor']]
def calculate_mean_vectors(X, y): """Calculates the mean samples per class Parameters: ----------- X : array-like, shape (m, n) - the samples y : array-like, shape (m, ) - the class labels Returns: -------- mean_vectors : array-like, shape (k, ) Those are the mean samples from each k classes. """ return [np.mean(X[y == cl, :], axis=0) for cl in np.unique(y)]
def function[calculate_mean_vectors, parameter[X, y]]: constant[Calculates the mean samples per class Parameters: ----------- X : array-like, shape (m, n) - the samples y : array-like, shape (m, ) - the class labels Returns: -------- mean_vectors : array-like, shape (k, ) Those are the mean samples from each k classes. ] return[<ast.ListComp object at 0x7da1b209dba0>]
keyword[def] identifier[calculate_mean_vectors] ( identifier[X] , identifier[y] ): literal[string] keyword[return] [ identifier[np] . identifier[mean] ( identifier[X] [ identifier[y] == identifier[cl] ,:], identifier[axis] = literal[int] ) keyword[for] identifier[cl] keyword[in] identifier[np] . identifier[unique] ( identifier[y] )]
def calculate_mean_vectors(X, y): """Calculates the mean samples per class Parameters: ----------- X : array-like, shape (m, n) - the samples y : array-like, shape (m, ) - the class labels Returns: -------- mean_vectors : array-like, shape (k, ) Those are the mean samples from each k classes. """ return [np.mean(X[y == cl, :], axis=0) for cl in np.unique(y)]
def visit(self, node): """Visit a node. This method is largely modelled after the ast.NodeTransformer class. Args: node: The node to visit. Returns: A tuple of the primal and adjoint, each of which is a node or a list of nodes. """ method = 'visit_' + node.__class__.__name__ if not hasattr(self, method): raise ValueError('Unknown node type: %s' % node.__class__.__name__) visitor = getattr(self, method) # If this node is a statement, inform all child nodes what the active # variables in this statement are if anno.hasanno(node, 'active_in'): self.active_variables = anno.getanno(node, 'active_in') pri, adj = visitor(node) # Annotate primal and adjoint statements if isinstance(pri, gast.AST): anno.setdefaultanno(pri, 'adj', adj) else: for node in pri: anno.setdefaultanno(node, 'adj', adj) if isinstance(adj, gast.AST): anno.setdefaultanno(adj, 'pri', pri) else: for node in adj: anno.setdefaultanno(node, 'pri', pri) return pri, adj
def function[visit, parameter[self, node]]: constant[Visit a node. This method is largely modelled after the ast.NodeTransformer class. Args: node: The node to visit. Returns: A tuple of the primal and adjoint, each of which is a node or a list of nodes. ] variable[method] assign[=] binary_operation[constant[visit_] + name[node].__class__.__name__] if <ast.UnaryOp object at 0x7da18bccbfa0> begin[:] <ast.Raise object at 0x7da18bcca1a0> variable[visitor] assign[=] call[name[getattr], parameter[name[self], name[method]]] if call[name[anno].hasanno, parameter[name[node], constant[active_in]]] begin[:] name[self].active_variables assign[=] call[name[anno].getanno, parameter[name[node], constant[active_in]]] <ast.Tuple object at 0x7da18bcc91e0> assign[=] call[name[visitor], parameter[name[node]]] if call[name[isinstance], parameter[name[pri], name[gast].AST]] begin[:] call[name[anno].setdefaultanno, parameter[name[pri], constant[adj], name[adj]]] if call[name[isinstance], parameter[name[adj], name[gast].AST]] begin[:] call[name[anno].setdefaultanno, parameter[name[adj], constant[pri], name[pri]]] return[tuple[[<ast.Name object at 0x7da20cabdb70>, <ast.Name object at 0x7da20cabc2e0>]]]
keyword[def] identifier[visit] ( identifier[self] , identifier[node] ): literal[string] identifier[method] = literal[string] + identifier[node] . identifier[__class__] . identifier[__name__] keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , identifier[method] ): keyword[raise] identifier[ValueError] ( literal[string] % identifier[node] . identifier[__class__] . identifier[__name__] ) identifier[visitor] = identifier[getattr] ( identifier[self] , identifier[method] ) keyword[if] identifier[anno] . identifier[hasanno] ( identifier[node] , literal[string] ): identifier[self] . identifier[active_variables] = identifier[anno] . identifier[getanno] ( identifier[node] , literal[string] ) identifier[pri] , identifier[adj] = identifier[visitor] ( identifier[node] ) keyword[if] identifier[isinstance] ( identifier[pri] , identifier[gast] . identifier[AST] ): identifier[anno] . identifier[setdefaultanno] ( identifier[pri] , literal[string] , identifier[adj] ) keyword[else] : keyword[for] identifier[node] keyword[in] identifier[pri] : identifier[anno] . identifier[setdefaultanno] ( identifier[node] , literal[string] , identifier[adj] ) keyword[if] identifier[isinstance] ( identifier[adj] , identifier[gast] . identifier[AST] ): identifier[anno] . identifier[setdefaultanno] ( identifier[adj] , literal[string] , identifier[pri] ) keyword[else] : keyword[for] identifier[node] keyword[in] identifier[adj] : identifier[anno] . identifier[setdefaultanno] ( identifier[node] , literal[string] , identifier[pri] ) keyword[return] identifier[pri] , identifier[adj]
def visit(self, node): """Visit a node. This method is largely modelled after the ast.NodeTransformer class. Args: node: The node to visit. Returns: A tuple of the primal and adjoint, each of which is a node or a list of nodes. """ method = 'visit_' + node.__class__.__name__ if not hasattr(self, method): raise ValueError('Unknown node type: %s' % node.__class__.__name__) # depends on [control=['if'], data=[]] visitor = getattr(self, method) # If this node is a statement, inform all child nodes what the active # variables in this statement are if anno.hasanno(node, 'active_in'): self.active_variables = anno.getanno(node, 'active_in') # depends on [control=['if'], data=[]] (pri, adj) = visitor(node) # Annotate primal and adjoint statements if isinstance(pri, gast.AST): anno.setdefaultanno(pri, 'adj', adj) # depends on [control=['if'], data=[]] else: for node in pri: anno.setdefaultanno(node, 'adj', adj) # depends on [control=['for'], data=['node']] if isinstance(adj, gast.AST): anno.setdefaultanno(adj, 'pri', pri) # depends on [control=['if'], data=[]] else: for node in adj: anno.setdefaultanno(node, 'pri', pri) # depends on [control=['for'], data=['node']] return (pri, adj)
def not_all(*validation_func, # type: ValidationFuncs **kwargs ): # type: (...) -> Callable """ An alias for not_(and_(validators)). :param validation_func: the base validation function or list of base validation functions to use. A callable, a tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead of callables, they will be transformed to functions automatically. :param catch_all: an optional boolean flag. By default, only Failure are silently caught and turned into a 'ok' result. Turning this flag to True will assume that all exceptions should be caught and turned to a 'ok' result :return: """ catch_all = pop_kwargs(kwargs, [('catch_all', False)]) # in case this is a list, create a 'and_' around it (otherwise and_ will return the validation function without # wrapping it) main_validator = and_(*validation_func) return not_(main_validator, catch_all=catch_all)
def function[not_all, parameter[]]: constant[ An alias for not_(and_(validators)). :param validation_func: the base validation function or list of base validation functions to use. A callable, a tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead of callables, they will be transformed to functions automatically. :param catch_all: an optional boolean flag. By default, only Failure are silently caught and turned into a 'ok' result. Turning this flag to True will assume that all exceptions should be caught and turned to a 'ok' result :return: ] variable[catch_all] assign[=] call[name[pop_kwargs], parameter[name[kwargs], list[[<ast.Tuple object at 0x7da1b0f0d360>]]]] variable[main_validator] assign[=] call[name[and_], parameter[<ast.Starred object at 0x7da1b0f0dff0>]] return[call[name[not_], parameter[name[main_validator]]]]
keyword[def] identifier[not_all] (* identifier[validation_func] , ** identifier[kwargs] ): literal[string] identifier[catch_all] = identifier[pop_kwargs] ( identifier[kwargs] ,[( literal[string] , keyword[False] )]) identifier[main_validator] = identifier[and_] (* identifier[validation_func] ) keyword[return] identifier[not_] ( identifier[main_validator] , identifier[catch_all] = identifier[catch_all] )
def not_all(*validation_func, **kwargs): # type: ValidationFuncs # type: (...) -> Callable "\n An alias for not_(and_(validators)).\n\n :param validation_func: the base validation function or list of base validation functions to use. A callable, a\n tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists\n are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit\n `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead\n of callables, they will be transformed to functions automatically.\n :param catch_all: an optional boolean flag. By default, only Failure are silently caught and turned into\n a 'ok' result. Turning this flag to True will assume that all exceptions should be caught and turned to a\n 'ok' result\n :return:\n " catch_all = pop_kwargs(kwargs, [('catch_all', False)]) # in case this is a list, create a 'and_' around it (otherwise and_ will return the validation function without # wrapping it) main_validator = and_(*validation_func) return not_(main_validator, catch_all=catch_all)
def fnmatches(entry, *pattern_list): """ returns true if entry matches any of the glob patterns, false otherwise """ for pattern in pattern_list: if pattern and fnmatch(entry, pattern): return True return False
def function[fnmatches, parameter[entry]]: constant[ returns true if entry matches any of the glob patterns, false otherwise ] for taget[name[pattern]] in starred[name[pattern_list]] begin[:] if <ast.BoolOp object at 0x7da1b0b19150> begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[fnmatches] ( identifier[entry] ,* identifier[pattern_list] ): literal[string] keyword[for] identifier[pattern] keyword[in] identifier[pattern_list] : keyword[if] identifier[pattern] keyword[and] identifier[fnmatch] ( identifier[entry] , identifier[pattern] ): keyword[return] keyword[True] keyword[return] keyword[False]
def fnmatches(entry, *pattern_list): """ returns true if entry matches any of the glob patterns, false otherwise """ for pattern in pattern_list: if pattern and fnmatch(entry, pattern): return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pattern']] return False
def compute_probab_ratios(p_new, p_old, actions, reward_mask): """Computes the probability ratios for each time-step in a trajectory. Args: p_new: ndarray of shape [B, T+1, A] of the log-probabilities that the policy network assigns to all the actions at each time-step in each batch using the old parameters. p_old: ndarray of shape [B, T+1, A], same as above, but using old policy network parameters. actions: ndarray of shape [B, T] where each element is from [0, A). reward_mask: ndarray of shape [B, T] masking over probabilities. Returns: probab_ratios: ndarray of shape [B, T], where probab_ratios_{b,t} = p_new_{b,t,action_{b,t}} / p_old_{b,t,action_{b,t}} """ B, T = actions.shape # pylint: disable=invalid-name assert (B, T + 1) == p_old.shape[:2] assert (B, T + 1) == p_new.shape[:2] logp_old = chosen_probabs(p_old, actions) logp_new = chosen_probabs(p_new, actions) assert (B, T) == logp_old.shape assert (B, T) == logp_new.shape # Since these are log-probabilities, we just subtract them. probab_ratios = np.exp(logp_new - logp_old) * reward_mask assert (B, T) == probab_ratios.shape return probab_ratios
def function[compute_probab_ratios, parameter[p_new, p_old, actions, reward_mask]]: constant[Computes the probability ratios for each time-step in a trajectory. Args: p_new: ndarray of shape [B, T+1, A] of the log-probabilities that the policy network assigns to all the actions at each time-step in each batch using the old parameters. p_old: ndarray of shape [B, T+1, A], same as above, but using old policy network parameters. actions: ndarray of shape [B, T] where each element is from [0, A). reward_mask: ndarray of shape [B, T] masking over probabilities. Returns: probab_ratios: ndarray of shape [B, T], where probab_ratios_{b,t} = p_new_{b,t,action_{b,t}} / p_old_{b,t,action_{b,t}} ] <ast.Tuple object at 0x7da1b1e11030> assign[=] name[actions].shape assert[compare[tuple[[<ast.Name object at 0x7da1b1e12470>, <ast.BinOp object at 0x7da1b1e11000>]] equal[==] call[name[p_old].shape][<ast.Slice object at 0x7da1b1e101f0>]]] assert[compare[tuple[[<ast.Name object at 0x7da1b1e12bc0>, <ast.BinOp object at 0x7da1b1e13160>]] equal[==] call[name[p_new].shape][<ast.Slice object at 0x7da1b1e123b0>]]] variable[logp_old] assign[=] call[name[chosen_probabs], parameter[name[p_old], name[actions]]] variable[logp_new] assign[=] call[name[chosen_probabs], parameter[name[p_new], name[actions]]] assert[compare[tuple[[<ast.Name object at 0x7da1b2062560>, <ast.Name object at 0x7da1b2062050>]] equal[==] name[logp_old].shape]] assert[compare[tuple[[<ast.Name object at 0x7da1b2062620>, <ast.Name object at 0x7da1b2061f30>]] equal[==] name[logp_new].shape]] variable[probab_ratios] assign[=] binary_operation[call[name[np].exp, parameter[binary_operation[name[logp_new] - name[logp_old]]]] * name[reward_mask]] assert[compare[tuple[[<ast.Name object at 0x7da1b2061810>, <ast.Name object at 0x7da1b2061420>]] equal[==] name[probab_ratios].shape]] return[name[probab_ratios]]
keyword[def] identifier[compute_probab_ratios] ( identifier[p_new] , identifier[p_old] , identifier[actions] , identifier[reward_mask] ): literal[string] identifier[B] , identifier[T] = identifier[actions] . identifier[shape] keyword[assert] ( identifier[B] , identifier[T] + literal[int] )== identifier[p_old] . identifier[shape] [: literal[int] ] keyword[assert] ( identifier[B] , identifier[T] + literal[int] )== identifier[p_new] . identifier[shape] [: literal[int] ] identifier[logp_old] = identifier[chosen_probabs] ( identifier[p_old] , identifier[actions] ) identifier[logp_new] = identifier[chosen_probabs] ( identifier[p_new] , identifier[actions] ) keyword[assert] ( identifier[B] , identifier[T] )== identifier[logp_old] . identifier[shape] keyword[assert] ( identifier[B] , identifier[T] )== identifier[logp_new] . identifier[shape] identifier[probab_ratios] = identifier[np] . identifier[exp] ( identifier[logp_new] - identifier[logp_old] )* identifier[reward_mask] keyword[assert] ( identifier[B] , identifier[T] )== identifier[probab_ratios] . identifier[shape] keyword[return] identifier[probab_ratios]
def compute_probab_ratios(p_new, p_old, actions, reward_mask): """Computes the probability ratios for each time-step in a trajectory. Args: p_new: ndarray of shape [B, T+1, A] of the log-probabilities that the policy network assigns to all the actions at each time-step in each batch using the old parameters. p_old: ndarray of shape [B, T+1, A], same as above, but using old policy network parameters. actions: ndarray of shape [B, T] where each element is from [0, A). reward_mask: ndarray of shape [B, T] masking over probabilities. Returns: probab_ratios: ndarray of shape [B, T], where probab_ratios_{b,t} = p_new_{b,t,action_{b,t}} / p_old_{b,t,action_{b,t}} """ (B, T) = actions.shape # pylint: disable=invalid-name assert (B, T + 1) == p_old.shape[:2] assert (B, T + 1) == p_new.shape[:2] logp_old = chosen_probabs(p_old, actions) logp_new = chosen_probabs(p_new, actions) assert (B, T) == logp_old.shape assert (B, T) == logp_new.shape # Since these are log-probabilities, we just subtract them. probab_ratios = np.exp(logp_new - logp_old) * reward_mask assert (B, T) == probab_ratios.shape return probab_ratios
def export(self, cert, key, type=FILETYPE_PEM, days=100, digest=_UNSPECIFIED): """ Export the CRL as a string. :param X509 cert: The certificate used to sign the CRL. :param PKey key: The key used to sign the CRL. :param int type: The export format, either :data:`FILETYPE_PEM`, :data:`FILETYPE_ASN1`, or :data:`FILETYPE_TEXT`. :param int days: The number of days until the next update of this CRL. :param bytes digest: The name of the message digest to use (eg ``b"sha256"``). :rtype: bytes """ if not isinstance(cert, X509): raise TypeError("cert must be an X509 instance") if not isinstance(key, PKey): raise TypeError("key must be a PKey instance") if not isinstance(type, int): raise TypeError("type must be an integer") if digest is _UNSPECIFIED: raise TypeError("digest must be provided") digest_obj = _lib.EVP_get_digestbyname(digest) if digest_obj == _ffi.NULL: raise ValueError("No such digest method") bio = _lib.BIO_new(_lib.BIO_s_mem()) _openssl_assert(bio != _ffi.NULL) # A scratch time object to give different values to different CRL # fields sometime = _lib.ASN1_TIME_new() _openssl_assert(sometime != _ffi.NULL) _lib.X509_gmtime_adj(sometime, 0) _lib.X509_CRL_set_lastUpdate(self._crl, sometime) _lib.X509_gmtime_adj(sometime, days * 24 * 60 * 60) _lib.X509_CRL_set_nextUpdate(self._crl, sometime) _lib.X509_CRL_set_issuer_name( self._crl, _lib.X509_get_subject_name(cert._x509) ) sign_result = _lib.X509_CRL_sign(self._crl, key._pkey, digest_obj) if not sign_result: _raise_current_error() return dump_crl(type, self)
def function[export, parameter[self, cert, key, type, days, digest]]: constant[ Export the CRL as a string. :param X509 cert: The certificate used to sign the CRL. :param PKey key: The key used to sign the CRL. :param int type: The export format, either :data:`FILETYPE_PEM`, :data:`FILETYPE_ASN1`, or :data:`FILETYPE_TEXT`. :param int days: The number of days until the next update of this CRL. :param bytes digest: The name of the message digest to use (eg ``b"sha256"``). :rtype: bytes ] if <ast.UnaryOp object at 0x7da1b03156c0> begin[:] <ast.Raise object at 0x7da1b03156f0> if <ast.UnaryOp object at 0x7da1b0314ac0> begin[:] <ast.Raise object at 0x7da1b0316f20> if <ast.UnaryOp object at 0x7da1b0315c90> begin[:] <ast.Raise object at 0x7da1b03155d0> if compare[name[digest] is name[_UNSPECIFIED]] begin[:] <ast.Raise object at 0x7da1b028f310> variable[digest_obj] assign[=] call[name[_lib].EVP_get_digestbyname, parameter[name[digest]]] if compare[name[digest_obj] equal[==] name[_ffi].NULL] begin[:] <ast.Raise object at 0x7da1b028f9d0> variable[bio] assign[=] call[name[_lib].BIO_new, parameter[call[name[_lib].BIO_s_mem, parameter[]]]] call[name[_openssl_assert], parameter[compare[name[bio] not_equal[!=] name[_ffi].NULL]]] variable[sometime] assign[=] call[name[_lib].ASN1_TIME_new, parameter[]] call[name[_openssl_assert], parameter[compare[name[sometime] not_equal[!=] name[_ffi].NULL]]] call[name[_lib].X509_gmtime_adj, parameter[name[sometime], constant[0]]] call[name[_lib].X509_CRL_set_lastUpdate, parameter[name[self]._crl, name[sometime]]] call[name[_lib].X509_gmtime_adj, parameter[name[sometime], binary_operation[binary_operation[binary_operation[name[days] * constant[24]] * constant[60]] * constant[60]]]] call[name[_lib].X509_CRL_set_nextUpdate, parameter[name[self]._crl, name[sometime]]] call[name[_lib].X509_CRL_set_issuer_name, parameter[name[self]._crl, call[name[_lib].X509_get_subject_name, parameter[name[cert]._x509]]]] variable[sign_result] assign[=] call[name[_lib].X509_CRL_sign, parameter[name[self]._crl, name[key]._pkey, name[digest_obj]]] if <ast.UnaryOp object at 0x7da1b03160e0> begin[:] call[name[_raise_current_error], parameter[]] return[call[name[dump_crl], parameter[name[type], name[self]]]]
keyword[def] identifier[export] ( identifier[self] , identifier[cert] , identifier[key] , identifier[type] = identifier[FILETYPE_PEM] , identifier[days] = literal[int] , identifier[digest] = identifier[_UNSPECIFIED] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[cert] , identifier[X509] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[key] , identifier[PKey] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[type] , identifier[int] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] identifier[digest] keyword[is] identifier[_UNSPECIFIED] : keyword[raise] identifier[TypeError] ( literal[string] ) identifier[digest_obj] = identifier[_lib] . identifier[EVP_get_digestbyname] ( identifier[digest] ) keyword[if] identifier[digest_obj] == identifier[_ffi] . identifier[NULL] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[bio] = identifier[_lib] . identifier[BIO_new] ( identifier[_lib] . identifier[BIO_s_mem] ()) identifier[_openssl_assert] ( identifier[bio] != identifier[_ffi] . identifier[NULL] ) identifier[sometime] = identifier[_lib] . identifier[ASN1_TIME_new] () identifier[_openssl_assert] ( identifier[sometime] != identifier[_ffi] . identifier[NULL] ) identifier[_lib] . identifier[X509_gmtime_adj] ( identifier[sometime] , literal[int] ) identifier[_lib] . identifier[X509_CRL_set_lastUpdate] ( identifier[self] . identifier[_crl] , identifier[sometime] ) identifier[_lib] . identifier[X509_gmtime_adj] ( identifier[sometime] , identifier[days] * literal[int] * literal[int] * literal[int] ) identifier[_lib] . identifier[X509_CRL_set_nextUpdate] ( identifier[self] . identifier[_crl] , identifier[sometime] ) identifier[_lib] . identifier[X509_CRL_set_issuer_name] ( identifier[self] . identifier[_crl] , identifier[_lib] . identifier[X509_get_subject_name] ( identifier[cert] . identifier[_x509] ) ) identifier[sign_result] = identifier[_lib] . identifier[X509_CRL_sign] ( identifier[self] . identifier[_crl] , identifier[key] . identifier[_pkey] , identifier[digest_obj] ) keyword[if] keyword[not] identifier[sign_result] : identifier[_raise_current_error] () keyword[return] identifier[dump_crl] ( identifier[type] , identifier[self] )
def export(self, cert, key, type=FILETYPE_PEM, days=100, digest=_UNSPECIFIED): """ Export the CRL as a string. :param X509 cert: The certificate used to sign the CRL. :param PKey key: The key used to sign the CRL. :param int type: The export format, either :data:`FILETYPE_PEM`, :data:`FILETYPE_ASN1`, or :data:`FILETYPE_TEXT`. :param int days: The number of days until the next update of this CRL. :param bytes digest: The name of the message digest to use (eg ``b"sha256"``). :rtype: bytes """ if not isinstance(cert, X509): raise TypeError('cert must be an X509 instance') # depends on [control=['if'], data=[]] if not isinstance(key, PKey): raise TypeError('key must be a PKey instance') # depends on [control=['if'], data=[]] if not isinstance(type, int): raise TypeError('type must be an integer') # depends on [control=['if'], data=[]] if digest is _UNSPECIFIED: raise TypeError('digest must be provided') # depends on [control=['if'], data=[]] digest_obj = _lib.EVP_get_digestbyname(digest) if digest_obj == _ffi.NULL: raise ValueError('No such digest method') # depends on [control=['if'], data=[]] bio = _lib.BIO_new(_lib.BIO_s_mem()) _openssl_assert(bio != _ffi.NULL) # A scratch time object to give different values to different CRL # fields sometime = _lib.ASN1_TIME_new() _openssl_assert(sometime != _ffi.NULL) _lib.X509_gmtime_adj(sometime, 0) _lib.X509_CRL_set_lastUpdate(self._crl, sometime) _lib.X509_gmtime_adj(sometime, days * 24 * 60 * 60) _lib.X509_CRL_set_nextUpdate(self._crl, sometime) _lib.X509_CRL_set_issuer_name(self._crl, _lib.X509_get_subject_name(cert._x509)) sign_result = _lib.X509_CRL_sign(self._crl, key._pkey, digest_obj) if not sign_result: _raise_current_error() # depends on [control=['if'], data=[]] return dump_crl(type, self)
def _proxy(self): """ Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: CommandContext for this CommandInstance :rtype: twilio.rest.preview.wireless.command.CommandContext """ if self._context is None: self._context = CommandContext(self._version, sid=self._solution['sid'], ) return self._context
def function[_proxy, parameter[self]]: constant[ Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: CommandContext for this CommandInstance :rtype: twilio.rest.preview.wireless.command.CommandContext ] if compare[name[self]._context is constant[None]] begin[:] name[self]._context assign[=] call[name[CommandContext], parameter[name[self]._version]] return[name[self]._context]
keyword[def] identifier[_proxy] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_context] keyword[is] keyword[None] : identifier[self] . identifier[_context] = identifier[CommandContext] ( identifier[self] . identifier[_version] , identifier[sid] = identifier[self] . identifier[_solution] [ literal[string] ],) keyword[return] identifier[self] . identifier[_context]
def _proxy(self): """ Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: CommandContext for this CommandInstance :rtype: twilio.rest.preview.wireless.command.CommandContext """ if self._context is None: self._context = CommandContext(self._version, sid=self._solution['sid']) # depends on [control=['if'], data=[]] return self._context
def get_digraph(ont, relations=None, writecache=False): """ Creates a basic graph object corresponding to a remote ontology """ digraph = networkx.MultiDiGraph() logging.info("Getting edges (may be cached)") for (s,p,o) in get_edges(ont): p = map_legacy_pred(p) if relations is None or p in relations: digraph.add_edge(o,s,pred=p) logging.info("Getting labels (may be cached)") for (n,label) in fetchall_labels(ont): digraph.add_node(n, **{'label':label}) return digraph
def function[get_digraph, parameter[ont, relations, writecache]]: constant[ Creates a basic graph object corresponding to a remote ontology ] variable[digraph] assign[=] call[name[networkx].MultiDiGraph, parameter[]] call[name[logging].info, parameter[constant[Getting edges (may be cached)]]] for taget[tuple[[<ast.Name object at 0x7da1b083dea0>, <ast.Name object at 0x7da1b083c6d0>, <ast.Name object at 0x7da1b083dc30>]]] in starred[call[name[get_edges], parameter[name[ont]]]] begin[:] variable[p] assign[=] call[name[map_legacy_pred], parameter[name[p]]] if <ast.BoolOp object at 0x7da1b083dff0> begin[:] call[name[digraph].add_edge, parameter[name[o], name[s]]] call[name[logging].info, parameter[constant[Getting labels (may be cached)]]] for taget[tuple[[<ast.Name object at 0x7da1b083f190>, <ast.Name object at 0x7da1b083e020>]]] in starred[call[name[fetchall_labels], parameter[name[ont]]]] begin[:] call[name[digraph].add_node, parameter[name[n]]] return[name[digraph]]
keyword[def] identifier[get_digraph] ( identifier[ont] , identifier[relations] = keyword[None] , identifier[writecache] = keyword[False] ): literal[string] identifier[digraph] = identifier[networkx] . identifier[MultiDiGraph] () identifier[logging] . identifier[info] ( literal[string] ) keyword[for] ( identifier[s] , identifier[p] , identifier[o] ) keyword[in] identifier[get_edges] ( identifier[ont] ): identifier[p] = identifier[map_legacy_pred] ( identifier[p] ) keyword[if] identifier[relations] keyword[is] keyword[None] keyword[or] identifier[p] keyword[in] identifier[relations] : identifier[digraph] . identifier[add_edge] ( identifier[o] , identifier[s] , identifier[pred] = identifier[p] ) identifier[logging] . identifier[info] ( literal[string] ) keyword[for] ( identifier[n] , identifier[label] ) keyword[in] identifier[fetchall_labels] ( identifier[ont] ): identifier[digraph] . identifier[add_node] ( identifier[n] ,**{ literal[string] : identifier[label] }) keyword[return] identifier[digraph]
def get_digraph(ont, relations=None, writecache=False): """ Creates a basic graph object corresponding to a remote ontology """ digraph = networkx.MultiDiGraph() logging.info('Getting edges (may be cached)') for (s, p, o) in get_edges(ont): p = map_legacy_pred(p) if relations is None or p in relations: digraph.add_edge(o, s, pred=p) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] logging.info('Getting labels (may be cached)') for (n, label) in fetchall_labels(ont): digraph.add_node(n, **{'label': label}) # depends on [control=['for'], data=[]] return digraph
def make_processitem_handlelist_handle_name(handle_name, condition='contains', negate=False, preserve_case=False): """ Create a node for ProcessItem/HandleList/Handle/Name :return: A IndicatorItem represented as an Element node """ document = 'ProcessItem' search = 'ProcessItem/HandleList/Handle/Name' content_type = 'string' content = handle_name ii_node = ioc_api.make_indicatoritem_node(condition, document, search, content_type, content, negate=negate, preserve_case=preserve_case) return ii_node
def function[make_processitem_handlelist_handle_name, parameter[handle_name, condition, negate, preserve_case]]: constant[ Create a node for ProcessItem/HandleList/Handle/Name :return: A IndicatorItem represented as an Element node ] variable[document] assign[=] constant[ProcessItem] variable[search] assign[=] constant[ProcessItem/HandleList/Handle/Name] variable[content_type] assign[=] constant[string] variable[content] assign[=] name[handle_name] variable[ii_node] assign[=] call[name[ioc_api].make_indicatoritem_node, parameter[name[condition], name[document], name[search], name[content_type], name[content]]] return[name[ii_node]]
keyword[def] identifier[make_processitem_handlelist_handle_name] ( identifier[handle_name] , identifier[condition] = literal[string] , identifier[negate] = keyword[False] , identifier[preserve_case] = keyword[False] ): literal[string] identifier[document] = literal[string] identifier[search] = literal[string] identifier[content_type] = literal[string] identifier[content] = identifier[handle_name] identifier[ii_node] = identifier[ioc_api] . identifier[make_indicatoritem_node] ( identifier[condition] , identifier[document] , identifier[search] , identifier[content_type] , identifier[content] , identifier[negate] = identifier[negate] , identifier[preserve_case] = identifier[preserve_case] ) keyword[return] identifier[ii_node]
def make_processitem_handlelist_handle_name(handle_name, condition='contains', negate=False, preserve_case=False): """ Create a node for ProcessItem/HandleList/Handle/Name :return: A IndicatorItem represented as an Element node """ document = 'ProcessItem' search = 'ProcessItem/HandleList/Handle/Name' content_type = 'string' content = handle_name ii_node = ioc_api.make_indicatoritem_node(condition, document, search, content_type, content, negate=negate, preserve_case=preserve_case) return ii_node
def getBWTRange(self, start, end): ''' This function masks the complexity of retrieving a chunk of the BWT from the compressed format @param start - the beginning of the range to retrieve @param end - the end of the range in normal python notation (bwt[end] is not part of the return) @return - a range of integers representing the characters in the bwt from start to end ''' #set aside an array block to fill startBlockIndex = start >> self.bitPower endBlockIndex = int(math.floor(float(end)/self.binSize)) trueStart = startBlockIndex*self.binSize #first we will extract the range of blocks return self.decompressBlocks(startBlockIndex, endBlockIndex)[start-trueStart:end-trueStart]
def function[getBWTRange, parameter[self, start, end]]: constant[ This function masks the complexity of retrieving a chunk of the BWT from the compressed format @param start - the beginning of the range to retrieve @param end - the end of the range in normal python notation (bwt[end] is not part of the return) @return - a range of integers representing the characters in the bwt from start to end ] variable[startBlockIndex] assign[=] binary_operation[name[start] <ast.RShift object at 0x7da2590d6a40> name[self].bitPower] variable[endBlockIndex] assign[=] call[name[int], parameter[call[name[math].floor, parameter[binary_operation[call[name[float], parameter[name[end]]] / name[self].binSize]]]]] variable[trueStart] assign[=] binary_operation[name[startBlockIndex] * name[self].binSize] return[call[call[name[self].decompressBlocks, parameter[name[startBlockIndex], name[endBlockIndex]]]][<ast.Slice object at 0x7da18fe934c0>]]
keyword[def] identifier[getBWTRange] ( identifier[self] , identifier[start] , identifier[end] ): literal[string] identifier[startBlockIndex] = identifier[start] >> identifier[self] . identifier[bitPower] identifier[endBlockIndex] = identifier[int] ( identifier[math] . identifier[floor] ( identifier[float] ( identifier[end] )/ identifier[self] . identifier[binSize] )) identifier[trueStart] = identifier[startBlockIndex] * identifier[self] . identifier[binSize] keyword[return] identifier[self] . identifier[decompressBlocks] ( identifier[startBlockIndex] , identifier[endBlockIndex] )[ identifier[start] - identifier[trueStart] : identifier[end] - identifier[trueStart] ]
def getBWTRange(self, start, end): """ This function masks the complexity of retrieving a chunk of the BWT from the compressed format @param start - the beginning of the range to retrieve @param end - the end of the range in normal python notation (bwt[end] is not part of the return) @return - a range of integers representing the characters in the bwt from start to end """ #set aside an array block to fill startBlockIndex = start >> self.bitPower endBlockIndex = int(math.floor(float(end) / self.binSize)) trueStart = startBlockIndex * self.binSize #first we will extract the range of blocks return self.decompressBlocks(startBlockIndex, endBlockIndex)[start - trueStart:end - trueStart]
def bernoulli(x, layer_fn=tf.compat.v1.layers.dense, name=None): """Constructs a trainable `tfd.Bernoulli` distribution. This function creates a Bernoulli distribution parameterized by logits. Using default args, this function is mathematically equivalent to: ```none Y = Bernoulli(logits=matmul(W, x) + b) where, W in R^[d, n] b in R^d ``` #### Examples This function can be used as a [logistic regression]( https://en.wikipedia.org/wiki/Logistic_regression) loss. ```python # This example fits a logistic regression loss. import tensorflow as tf import tensorflow_probability as tfp # Create fictitious training data. dtype = np.float32 n = 3000 # number of samples x_size = 4 # size of single x def make_training_data(): np.random.seed(142) x = np.random.randn(n, x_size).astype(dtype) w = np.random.randn(x_size).astype(dtype) b = np.random.randn(1).astype(dtype) true_logits = np.tensordot(x, w, axes=[[-1], [-1]]) + b noise = np.random.logistic(size=n).astype(dtype) y = dtype(true_logits + noise > 0.) return y, x y, x = make_training_data() # Build TF graph for fitting Bernoulli maximum likelihood estimator. bernoulli = tfp.trainable_distributions.bernoulli(x) loss = -tf.reduce_mean(bernoulli.log_prob(y)) train_op = tf.train.AdamOptimizer(learning_rate=2.**-5).minimize(loss) mse = tf.reduce_mean(tf.squared_difference(y, bernoulli.mean())) init_op = tf.global_variables_initializer() # Run graph 1000 times. num_steps = 1000 loss_ = np.zeros(num_steps) # Style: `_` to indicate sess.run result. mse_ = np.zeros(num_steps) with tf.Session() as sess: sess.run(init_op) for it in xrange(loss_.size): _, loss_[it], mse_[it] = sess.run([train_op, loss, mse]) if it % 200 == 0 or it == loss_.size - 1: print("iteration:{} loss:{} mse:{}".format(it, loss_[it], mse_[it])) # ==> iteration:0 loss:0.635675370693 mse:0.222526371479 # iteration:200 loss:0.440077394247 mse:0.143687799573 # iteration:400 loss:0.440077394247 mse:0.143687844276 # iteration:600 loss:0.440077394247 mse:0.143687844276 # iteration:800 loss:0.440077424049 mse:0.143687844276 # iteration:999 loss:0.440077424049 mse:0.143687844276 ``` Args: x: `Tensor` with floating type. Must have statically defined rank and statically known right-most dimension. layer_fn: Python `callable` which takes input `x` and `int` scalar `d` and returns a transformation of `x` with shape `tf.concat([tf.shape(x)[:-1], [1]], axis=0)`. Default value: `tf.layers.dense`. name: A `name_scope` name for operations created by this function. Default value: `None` (i.e., "bernoulli"). Returns: bernoulli: An instance of `tfd.Bernoulli`. """ with tf.compat.v1.name_scope(name, 'bernoulli', [x]): x = tf.convert_to_tensor(value=x, name='x') logits = tf.squeeze(layer_fn(x, 1), axis=-1) return tfd.Bernoulli(logits=logits)
def function[bernoulli, parameter[x, layer_fn, name]]: constant[Constructs a trainable `tfd.Bernoulli` distribution. This function creates a Bernoulli distribution parameterized by logits. Using default args, this function is mathematically equivalent to: ```none Y = Bernoulli(logits=matmul(W, x) + b) where, W in R^[d, n] b in R^d ``` #### Examples This function can be used as a [logistic regression]( https://en.wikipedia.org/wiki/Logistic_regression) loss. ```python # This example fits a logistic regression loss. import tensorflow as tf import tensorflow_probability as tfp # Create fictitious training data. dtype = np.float32 n = 3000 # number of samples x_size = 4 # size of single x def make_training_data(): np.random.seed(142) x = np.random.randn(n, x_size).astype(dtype) w = np.random.randn(x_size).astype(dtype) b = np.random.randn(1).astype(dtype) true_logits = np.tensordot(x, w, axes=[[-1], [-1]]) + b noise = np.random.logistic(size=n).astype(dtype) y = dtype(true_logits + noise > 0.) return y, x y, x = make_training_data() # Build TF graph for fitting Bernoulli maximum likelihood estimator. bernoulli = tfp.trainable_distributions.bernoulli(x) loss = -tf.reduce_mean(bernoulli.log_prob(y)) train_op = tf.train.AdamOptimizer(learning_rate=2.**-5).minimize(loss) mse = tf.reduce_mean(tf.squared_difference(y, bernoulli.mean())) init_op = tf.global_variables_initializer() # Run graph 1000 times. num_steps = 1000 loss_ = np.zeros(num_steps) # Style: `_` to indicate sess.run result. mse_ = np.zeros(num_steps) with tf.Session() as sess: sess.run(init_op) for it in xrange(loss_.size): _, loss_[it], mse_[it] = sess.run([train_op, loss, mse]) if it % 200 == 0 or it == loss_.size - 1: print("iteration:{} loss:{} mse:{}".format(it, loss_[it], mse_[it])) # ==> iteration:0 loss:0.635675370693 mse:0.222526371479 # iteration:200 loss:0.440077394247 mse:0.143687799573 # iteration:400 loss:0.440077394247 mse:0.143687844276 # iteration:600 loss:0.440077394247 mse:0.143687844276 # iteration:800 loss:0.440077424049 mse:0.143687844276 # iteration:999 loss:0.440077424049 mse:0.143687844276 ``` Args: x: `Tensor` with floating type. Must have statically defined rank and statically known right-most dimension. layer_fn: Python `callable` which takes input `x` and `int` scalar `d` and returns a transformation of `x` with shape `tf.concat([tf.shape(x)[:-1], [1]], axis=0)`. Default value: `tf.layers.dense`. name: A `name_scope` name for operations created by this function. Default value: `None` (i.e., "bernoulli"). Returns: bernoulli: An instance of `tfd.Bernoulli`. ] with call[name[tf].compat.v1.name_scope, parameter[name[name], constant[bernoulli], list[[<ast.Name object at 0x7da1b02120b0>]]]] begin[:] variable[x] assign[=] call[name[tf].convert_to_tensor, parameter[]] variable[logits] assign[=] call[name[tf].squeeze, parameter[call[name[layer_fn], parameter[name[x], constant[1]]]]] return[call[name[tfd].Bernoulli, parameter[]]]
keyword[def] identifier[bernoulli] ( identifier[x] , identifier[layer_fn] = identifier[tf] . identifier[compat] . identifier[v1] . identifier[layers] . identifier[dense] , identifier[name] = keyword[None] ): literal[string] keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[name_scope] ( identifier[name] , literal[string] ,[ identifier[x] ]): identifier[x] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[x] , identifier[name] = literal[string] ) identifier[logits] = identifier[tf] . identifier[squeeze] ( identifier[layer_fn] ( identifier[x] , literal[int] ), identifier[axis] =- literal[int] ) keyword[return] identifier[tfd] . identifier[Bernoulli] ( identifier[logits] = identifier[logits] )
def bernoulli(x, layer_fn=tf.compat.v1.layers.dense, name=None): """Constructs a trainable `tfd.Bernoulli` distribution. This function creates a Bernoulli distribution parameterized by logits. Using default args, this function is mathematically equivalent to: ```none Y = Bernoulli(logits=matmul(W, x) + b) where, W in R^[d, n] b in R^d ``` #### Examples This function can be used as a [logistic regression]( https://en.wikipedia.org/wiki/Logistic_regression) loss. ```python # This example fits a logistic regression loss. import tensorflow as tf import tensorflow_probability as tfp # Create fictitious training data. dtype = np.float32 n = 3000 # number of samples x_size = 4 # size of single x def make_training_data(): np.random.seed(142) x = np.random.randn(n, x_size).astype(dtype) w = np.random.randn(x_size).astype(dtype) b = np.random.randn(1).astype(dtype) true_logits = np.tensordot(x, w, axes=[[-1], [-1]]) + b noise = np.random.logistic(size=n).astype(dtype) y = dtype(true_logits + noise > 0.) return y, x y, x = make_training_data() # Build TF graph for fitting Bernoulli maximum likelihood estimator. bernoulli = tfp.trainable_distributions.bernoulli(x) loss = -tf.reduce_mean(bernoulli.log_prob(y)) train_op = tf.train.AdamOptimizer(learning_rate=2.**-5).minimize(loss) mse = tf.reduce_mean(tf.squared_difference(y, bernoulli.mean())) init_op = tf.global_variables_initializer() # Run graph 1000 times. num_steps = 1000 loss_ = np.zeros(num_steps) # Style: `_` to indicate sess.run result. mse_ = np.zeros(num_steps) with tf.Session() as sess: sess.run(init_op) for it in xrange(loss_.size): _, loss_[it], mse_[it] = sess.run([train_op, loss, mse]) if it % 200 == 0 or it == loss_.size - 1: print("iteration:{} loss:{} mse:{}".format(it, loss_[it], mse_[it])) # ==> iteration:0 loss:0.635675370693 mse:0.222526371479 # iteration:200 loss:0.440077394247 mse:0.143687799573 # iteration:400 loss:0.440077394247 mse:0.143687844276 # iteration:600 loss:0.440077394247 mse:0.143687844276 # iteration:800 loss:0.440077424049 mse:0.143687844276 # iteration:999 loss:0.440077424049 mse:0.143687844276 ``` Args: x: `Tensor` with floating type. Must have statically defined rank and statically known right-most dimension. layer_fn: Python `callable` which takes input `x` and `int` scalar `d` and returns a transformation of `x` with shape `tf.concat([tf.shape(x)[:-1], [1]], axis=0)`. Default value: `tf.layers.dense`. name: A `name_scope` name for operations created by this function. Default value: `None` (i.e., "bernoulli"). Returns: bernoulli: An instance of `tfd.Bernoulli`. """ with tf.compat.v1.name_scope(name, 'bernoulli', [x]): x = tf.convert_to_tensor(value=x, name='x') logits = tf.squeeze(layer_fn(x, 1), axis=-1) return tfd.Bernoulli(logits=logits) # depends on [control=['with'], data=[]]
def outgoing_manipulators(self): """**DEPRECATED**: All outgoing SON manipulators. .. versionchanged:: 3.5 Deprecated. .. versionadded:: 2.0 """ warnings.warn("Database.outgoing_manipulators() is deprecated", DeprecationWarning, stacklevel=2) return [manipulator.__class__.__name__ for manipulator in self.__outgoing_manipulators]
def function[outgoing_manipulators, parameter[self]]: constant[**DEPRECATED**: All outgoing SON manipulators. .. versionchanged:: 3.5 Deprecated. .. versionadded:: 2.0 ] call[name[warnings].warn, parameter[constant[Database.outgoing_manipulators() is deprecated], name[DeprecationWarning]]] return[<ast.ListComp object at 0x7da20c6e4430>]
keyword[def] identifier[outgoing_manipulators] ( identifier[self] ): literal[string] identifier[warnings] . identifier[warn] ( literal[string] , identifier[DeprecationWarning] , identifier[stacklevel] = literal[int] ) keyword[return] [ identifier[manipulator] . identifier[__class__] . identifier[__name__] keyword[for] identifier[manipulator] keyword[in] identifier[self] . identifier[__outgoing_manipulators] ]
def outgoing_manipulators(self): """**DEPRECATED**: All outgoing SON manipulators. .. versionchanged:: 3.5 Deprecated. .. versionadded:: 2.0 """ warnings.warn('Database.outgoing_manipulators() is deprecated', DeprecationWarning, stacklevel=2) return [manipulator.__class__.__name__ for manipulator in self.__outgoing_manipulators]
def get_tunnel(self, remote_port, remote_host="localhost", local_port=None): """ Creates a tunnel between two hosts. Like ssh -L <LOCAL_PORT>:host:<REMOTE_PORT>. :param remote_port: The remote port to create a tunnel to :type remote_port: int :param remote_host: The remote host to create a tunnel to (default localhost) :type remote_host: str :param local_port: The local port to attach the tunnel to :type local_port: int :return: sshtunnel.SSHTunnelForwarder object """ if local_port: local_bind_address = ('localhost', local_port) else: local_bind_address = ('localhost',) if self.password and self.password.strip(): client = SSHTunnelForwarder(self.remote_host, ssh_port=self.port, ssh_username=self.username, ssh_password=self.password, ssh_pkey=self.key_file, ssh_proxy=self.host_proxy, local_bind_address=local_bind_address, remote_bind_address=(remote_host, remote_port), logger=self.log) else: client = SSHTunnelForwarder(self.remote_host, ssh_port=self.port, ssh_username=self.username, ssh_pkey=self.key_file, ssh_proxy=self.host_proxy, local_bind_address=local_bind_address, remote_bind_address=(remote_host, remote_port), host_pkey_directories=[], logger=self.log) return client
def function[get_tunnel, parameter[self, remote_port, remote_host, local_port]]: constant[ Creates a tunnel between two hosts. Like ssh -L <LOCAL_PORT>:host:<REMOTE_PORT>. :param remote_port: The remote port to create a tunnel to :type remote_port: int :param remote_host: The remote host to create a tunnel to (default localhost) :type remote_host: str :param local_port: The local port to attach the tunnel to :type local_port: int :return: sshtunnel.SSHTunnelForwarder object ] if name[local_port] begin[:] variable[local_bind_address] assign[=] tuple[[<ast.Constant object at 0x7da1b0594790>, <ast.Name object at 0x7da1b0595480>]] if <ast.BoolOp object at 0x7da1b0595510> begin[:] variable[client] assign[=] call[name[SSHTunnelForwarder], parameter[name[self].remote_host]] return[name[client]]
keyword[def] identifier[get_tunnel] ( identifier[self] , identifier[remote_port] , identifier[remote_host] = literal[string] , identifier[local_port] = keyword[None] ): literal[string] keyword[if] identifier[local_port] : identifier[local_bind_address] =( literal[string] , identifier[local_port] ) keyword[else] : identifier[local_bind_address] =( literal[string] ,) keyword[if] identifier[self] . identifier[password] keyword[and] identifier[self] . identifier[password] . identifier[strip] (): identifier[client] = identifier[SSHTunnelForwarder] ( identifier[self] . identifier[remote_host] , identifier[ssh_port] = identifier[self] . identifier[port] , identifier[ssh_username] = identifier[self] . identifier[username] , identifier[ssh_password] = identifier[self] . identifier[password] , identifier[ssh_pkey] = identifier[self] . identifier[key_file] , identifier[ssh_proxy] = identifier[self] . identifier[host_proxy] , identifier[local_bind_address] = identifier[local_bind_address] , identifier[remote_bind_address] =( identifier[remote_host] , identifier[remote_port] ), identifier[logger] = identifier[self] . identifier[log] ) keyword[else] : identifier[client] = identifier[SSHTunnelForwarder] ( identifier[self] . identifier[remote_host] , identifier[ssh_port] = identifier[self] . identifier[port] , identifier[ssh_username] = identifier[self] . identifier[username] , identifier[ssh_pkey] = identifier[self] . identifier[key_file] , identifier[ssh_proxy] = identifier[self] . identifier[host_proxy] , identifier[local_bind_address] = identifier[local_bind_address] , identifier[remote_bind_address] =( identifier[remote_host] , identifier[remote_port] ), identifier[host_pkey_directories] =[], identifier[logger] = identifier[self] . identifier[log] ) keyword[return] identifier[client]
def get_tunnel(self, remote_port, remote_host='localhost', local_port=None): """ Creates a tunnel between two hosts. Like ssh -L <LOCAL_PORT>:host:<REMOTE_PORT>. :param remote_port: The remote port to create a tunnel to :type remote_port: int :param remote_host: The remote host to create a tunnel to (default localhost) :type remote_host: str :param local_port: The local port to attach the tunnel to :type local_port: int :return: sshtunnel.SSHTunnelForwarder object """ if local_port: local_bind_address = ('localhost', local_port) # depends on [control=['if'], data=[]] else: local_bind_address = ('localhost',) if self.password and self.password.strip(): client = SSHTunnelForwarder(self.remote_host, ssh_port=self.port, ssh_username=self.username, ssh_password=self.password, ssh_pkey=self.key_file, ssh_proxy=self.host_proxy, local_bind_address=local_bind_address, remote_bind_address=(remote_host, remote_port), logger=self.log) # depends on [control=['if'], data=[]] else: client = SSHTunnelForwarder(self.remote_host, ssh_port=self.port, ssh_username=self.username, ssh_pkey=self.key_file, ssh_proxy=self.host_proxy, local_bind_address=local_bind_address, remote_bind_address=(remote_host, remote_port), host_pkey_directories=[], logger=self.log) return client
def remove_object(self, obj): """Remove an object from the definition.""" self._objects.remove(obj) self._pairs.difference_update((obj, p) for p in self._properties)
def function[remove_object, parameter[self, obj]]: constant[Remove an object from the definition.] call[name[self]._objects.remove, parameter[name[obj]]] call[name[self]._pairs.difference_update, parameter[<ast.GeneratorExp object at 0x7da20c6e7070>]]
keyword[def] identifier[remove_object] ( identifier[self] , identifier[obj] ): literal[string] identifier[self] . identifier[_objects] . identifier[remove] ( identifier[obj] ) identifier[self] . identifier[_pairs] . identifier[difference_update] (( identifier[obj] , identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[self] . identifier[_properties] )
def remove_object(self, obj): """Remove an object from the definition.""" self._objects.remove(obj) self._pairs.difference_update(((obj, p) for p in self._properties))
def mkdtemp(hint=''): """Create a temporary directory, then clean it up. Use as a context manager: with mkdtemp('-purpose'): ... """ dirname = tempfile.mkdtemp(prefix='check-manifest-', suffix=hint) try: yield dirname finally: rmtree(dirname)
def function[mkdtemp, parameter[hint]]: constant[Create a temporary directory, then clean it up. Use as a context manager: with mkdtemp('-purpose'): ... ] variable[dirname] assign[=] call[name[tempfile].mkdtemp, parameter[]] <ast.Try object at 0x7da1b13076d0>
keyword[def] identifier[mkdtemp] ( identifier[hint] = literal[string] ): literal[string] identifier[dirname] = identifier[tempfile] . identifier[mkdtemp] ( identifier[prefix] = literal[string] , identifier[suffix] = identifier[hint] ) keyword[try] : keyword[yield] identifier[dirname] keyword[finally] : identifier[rmtree] ( identifier[dirname] )
def mkdtemp(hint=''): """Create a temporary directory, then clean it up. Use as a context manager: with mkdtemp('-purpose'): ... """ dirname = tempfile.mkdtemp(prefix='check-manifest-', suffix=hint) try: yield dirname # depends on [control=['try'], data=[]] finally: rmtree(dirname)
def set_state(block, state): """ Sets the user state, generally used for syntax highlighting. :param block: block to modify :param state: new state value. :return: """ if block is None: return user_state = block.userState() if user_state == -1: user_state = 0 higher_part = user_state & 0x7FFF0000 state &= 0x0000FFFF state |= higher_part block.setUserState(state)
def function[set_state, parameter[block, state]]: constant[ Sets the user state, generally used for syntax highlighting. :param block: block to modify :param state: new state value. :return: ] if compare[name[block] is constant[None]] begin[:] return[None] variable[user_state] assign[=] call[name[block].userState, parameter[]] if compare[name[user_state] equal[==] <ast.UnaryOp object at 0x7da2041d9d20>] begin[:] variable[user_state] assign[=] constant[0] variable[higher_part] assign[=] binary_operation[name[user_state] <ast.BitAnd object at 0x7da2590d6b60> constant[2147418112]] <ast.AugAssign object at 0x7da2041db7f0> <ast.AugAssign object at 0x7da2041d8cd0> call[name[block].setUserState, parameter[name[state]]]
keyword[def] identifier[set_state] ( identifier[block] , identifier[state] ): literal[string] keyword[if] identifier[block] keyword[is] keyword[None] : keyword[return] identifier[user_state] = identifier[block] . identifier[userState] () keyword[if] identifier[user_state] ==- literal[int] : identifier[user_state] = literal[int] identifier[higher_part] = identifier[user_state] & literal[int] identifier[state] &= literal[int] identifier[state] |= identifier[higher_part] identifier[block] . identifier[setUserState] ( identifier[state] )
def set_state(block, state): """ Sets the user state, generally used for syntax highlighting. :param block: block to modify :param state: new state value. :return: """ if block is None: return # depends on [control=['if'], data=[]] user_state = block.userState() if user_state == -1: user_state = 0 # depends on [control=['if'], data=['user_state']] higher_part = user_state & 2147418112 state &= 65535 state |= higher_part block.setUserState(state)
def do_shutdown(self, restart): """ Shut down the app gracefully, saving history. """ print("in shutdown function") if self.hist_file: with open(self.hist_file, 'wb') as fid: data = '\n'.join(self.hist_cache[-self.max_hist_cache:]) fid.write(data.encode('utf-8')) if self.mva: self.mva._endsas() self.mva = None if restart: self.Print("Restarting kernel...") self.reload_magics() self.restart_kernel() self.Print("Done!") return {'status': 'ok', 'restart': restart}
def function[do_shutdown, parameter[self, restart]]: constant[ Shut down the app gracefully, saving history. ] call[name[print], parameter[constant[in shutdown function]]] if name[self].hist_file begin[:] with call[name[open], parameter[name[self].hist_file, constant[wb]]] begin[:] variable[data] assign[=] call[constant[ ].join, parameter[call[name[self].hist_cache][<ast.Slice object at 0x7da2044c25c0>]]] call[name[fid].write, parameter[call[name[data].encode, parameter[constant[utf-8]]]]] if name[self].mva begin[:] call[name[self].mva._endsas, parameter[]] name[self].mva assign[=] constant[None] if name[restart] begin[:] call[name[self].Print, parameter[constant[Restarting kernel...]]] call[name[self].reload_magics, parameter[]] call[name[self].restart_kernel, parameter[]] call[name[self].Print, parameter[constant[Done!]]] return[dictionary[[<ast.Constant object at 0x7da2044c07c0>, <ast.Constant object at 0x7da20c7cada0>], [<ast.Constant object at 0x7da20c7c9900>, <ast.Name object at 0x7da20c7cbac0>]]]
keyword[def] identifier[do_shutdown] ( identifier[self] , identifier[restart] ): literal[string] identifier[print] ( literal[string] ) keyword[if] identifier[self] . identifier[hist_file] : keyword[with] identifier[open] ( identifier[self] . identifier[hist_file] , literal[string] ) keyword[as] identifier[fid] : identifier[data] = literal[string] . identifier[join] ( identifier[self] . identifier[hist_cache] [- identifier[self] . identifier[max_hist_cache] :]) identifier[fid] . identifier[write] ( identifier[data] . identifier[encode] ( literal[string] )) keyword[if] identifier[self] . identifier[mva] : identifier[self] . identifier[mva] . identifier[_endsas] () identifier[self] . identifier[mva] = keyword[None] keyword[if] identifier[restart] : identifier[self] . identifier[Print] ( literal[string] ) identifier[self] . identifier[reload_magics] () identifier[self] . identifier[restart_kernel] () identifier[self] . identifier[Print] ( literal[string] ) keyword[return] { literal[string] : literal[string] , literal[string] : identifier[restart] }
def do_shutdown(self, restart): """ Shut down the app gracefully, saving history. """ print('in shutdown function') if self.hist_file: with open(self.hist_file, 'wb') as fid: data = '\n'.join(self.hist_cache[-self.max_hist_cache:]) fid.write(data.encode('utf-8')) # depends on [control=['with'], data=['fid']] # depends on [control=['if'], data=[]] if self.mva: self.mva._endsas() self.mva = None # depends on [control=['if'], data=[]] if restart: self.Print('Restarting kernel...') self.reload_magics() self.restart_kernel() self.Print('Done!') # depends on [control=['if'], data=[]] return {'status': 'ok', 'restart': restart}
def add_camera_make_model(self, make, model): ''' Add camera make and model.''' self._ef['0th'][piexif.ImageIFD.Make] = make self._ef['0th'][piexif.ImageIFD.Model] = model
def function[add_camera_make_model, parameter[self, make, model]]: constant[ Add camera make and model.] call[call[name[self]._ef][constant[0th]]][name[piexif].ImageIFD.Make] assign[=] name[make] call[call[name[self]._ef][constant[0th]]][name[piexif].ImageIFD.Model] assign[=] name[model]
keyword[def] identifier[add_camera_make_model] ( identifier[self] , identifier[make] , identifier[model] ): literal[string] identifier[self] . identifier[_ef] [ literal[string] ][ identifier[piexif] . identifier[ImageIFD] . identifier[Make] ]= identifier[make] identifier[self] . identifier[_ef] [ literal[string] ][ identifier[piexif] . identifier[ImageIFD] . identifier[Model] ]= identifier[model]
def add_camera_make_model(self, make, model): """ Add camera make and model.""" self._ef['0th'][piexif.ImageIFD.Make] = make self._ef['0th'][piexif.ImageIFD.Model] = model
def jump_server(self, msg="Changing servers"): """Connect to a new server, possibly disconnecting from the current. The bot will skip to next server in the server_list each time jump_server is called. """ if self.connection.is_connected(): self.connection.disconnect(msg) next(self.servers) self._connect()
def function[jump_server, parameter[self, msg]]: constant[Connect to a new server, possibly disconnecting from the current. The bot will skip to next server in the server_list each time jump_server is called. ] if call[name[self].connection.is_connected, parameter[]] begin[:] call[name[self].connection.disconnect, parameter[name[msg]]] call[name[next], parameter[name[self].servers]] call[name[self]._connect, parameter[]]
keyword[def] identifier[jump_server] ( identifier[self] , identifier[msg] = literal[string] ): literal[string] keyword[if] identifier[self] . identifier[connection] . identifier[is_connected] (): identifier[self] . identifier[connection] . identifier[disconnect] ( identifier[msg] ) identifier[next] ( identifier[self] . identifier[servers] ) identifier[self] . identifier[_connect] ()
def jump_server(self, msg='Changing servers'): """Connect to a new server, possibly disconnecting from the current. The bot will skip to next server in the server_list each time jump_server is called. """ if self.connection.is_connected(): self.connection.disconnect(msg) # depends on [control=['if'], data=[]] next(self.servers) self._connect()
def _cast_dict(self, data_dict): """Internal method that makes sure any dictionary elements are properly cast into the correct types, instead of just treating everything like a string from the csv file. Args: data_dict: dictionary containing bro log data. Returns: Cleaned Data dict. """ for key, value in data_dict.iteritems(): data_dict[key] = self._cast_value(value) # Fixme: resp_body_data can be very large so removing it for now if 'resp_body_data' in data_dict: del data_dict['resp_body_data'] return data_dict
def function[_cast_dict, parameter[self, data_dict]]: constant[Internal method that makes sure any dictionary elements are properly cast into the correct types, instead of just treating everything like a string from the csv file. Args: data_dict: dictionary containing bro log data. Returns: Cleaned Data dict. ] for taget[tuple[[<ast.Name object at 0x7da18bccb850>, <ast.Name object at 0x7da18bcca9b0>]]] in starred[call[name[data_dict].iteritems, parameter[]]] begin[:] call[name[data_dict]][name[key]] assign[=] call[name[self]._cast_value, parameter[name[value]]] if compare[constant[resp_body_data] in name[data_dict]] begin[:] <ast.Delete object at 0x7da18bcc9db0> return[name[data_dict]]
keyword[def] identifier[_cast_dict] ( identifier[self] , identifier[data_dict] ): literal[string] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[data_dict] . identifier[iteritems] (): identifier[data_dict] [ identifier[key] ]= identifier[self] . identifier[_cast_value] ( identifier[value] ) keyword[if] literal[string] keyword[in] identifier[data_dict] : keyword[del] identifier[data_dict] [ literal[string] ] keyword[return] identifier[data_dict]
def _cast_dict(self, data_dict): """Internal method that makes sure any dictionary elements are properly cast into the correct types, instead of just treating everything like a string from the csv file. Args: data_dict: dictionary containing bro log data. Returns: Cleaned Data dict. """ for (key, value) in data_dict.iteritems(): data_dict[key] = self._cast_value(value) # depends on [control=['for'], data=[]] # Fixme: resp_body_data can be very large so removing it for now if 'resp_body_data' in data_dict: del data_dict['resp_body_data'] # depends on [control=['if'], data=['data_dict']] return data_dict
def compute_params_curve(points, centripetal=False): """ Computes :math:`\\overline{u}_{k}` for curves. Please refer to the Equations 9.4 and 9.5 for chord length parametrization, and Equation 9.6 for centripetal method on The NURBS Book (2nd Edition), pp.364-365. :param points: data points :type points: list, tuple :param centripetal: activates centripetal parametrization method :type centripetal: bool :return: parameter array, :math:`\\overline{u}_{k}` :rtype: list """ if not isinstance(points, (list, tuple)): raise TypeError("Data points must be a list or a tuple") # Length of the points array num_points = len(points) # Calculate chord lengths cds = [0.0 for _ in range(num_points + 1)] cds[-1] = 1.0 for i in range(1, num_points): distance = linalg.point_distance(points[i], points[i - 1]) cds[i] = math.sqrt(distance) if centripetal else distance # Find the total chord length d = sum(cds[1:-1]) # Divide individual chord lengths by the total chord length uk = [0.0 for _ in range(num_points)] for i in range(num_points): uk[i] = sum(cds[0:i + 1]) / d return uk
def function[compute_params_curve, parameter[points, centripetal]]: constant[ Computes :math:`\overline{u}_{k}` for curves. Please refer to the Equations 9.4 and 9.5 for chord length parametrization, and Equation 9.6 for centripetal method on The NURBS Book (2nd Edition), pp.364-365. :param points: data points :type points: list, tuple :param centripetal: activates centripetal parametrization method :type centripetal: bool :return: parameter array, :math:`\overline{u}_{k}` :rtype: list ] if <ast.UnaryOp object at 0x7da1b1645600> begin[:] <ast.Raise object at 0x7da1b1647b20> variable[num_points] assign[=] call[name[len], parameter[name[points]]] variable[cds] assign[=] <ast.ListComp object at 0x7da1b16478b0> call[name[cds]][<ast.UnaryOp object at 0x7da1b1645cf0>] assign[=] constant[1.0] for taget[name[i]] in starred[call[name[range], parameter[constant[1], name[num_points]]]] begin[:] variable[distance] assign[=] call[name[linalg].point_distance, parameter[call[name[points]][name[i]], call[name[points]][binary_operation[name[i] - constant[1]]]]] call[name[cds]][name[i]] assign[=] <ast.IfExp object at 0x7da1b1644ee0> variable[d] assign[=] call[name[sum], parameter[call[name[cds]][<ast.Slice object at 0x7da1b16abca0>]]] variable[uk] assign[=] <ast.ListComp object at 0x7da1b16ab940> for taget[name[i]] in starred[call[name[range], parameter[name[num_points]]]] begin[:] call[name[uk]][name[i]] assign[=] binary_operation[call[name[sum], parameter[call[name[cds]][<ast.Slice object at 0x7da1b16abdf0>]]] / name[d]] return[name[uk]]
keyword[def] identifier[compute_params_curve] ( identifier[points] , identifier[centripetal] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[points] ,( identifier[list] , identifier[tuple] )): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[num_points] = identifier[len] ( identifier[points] ) identifier[cds] =[ literal[int] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[num_points] + literal[int] )] identifier[cds] [- literal[int] ]= literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[num_points] ): identifier[distance] = identifier[linalg] . identifier[point_distance] ( identifier[points] [ identifier[i] ], identifier[points] [ identifier[i] - literal[int] ]) identifier[cds] [ identifier[i] ]= identifier[math] . identifier[sqrt] ( identifier[distance] ) keyword[if] identifier[centripetal] keyword[else] identifier[distance] identifier[d] = identifier[sum] ( identifier[cds] [ literal[int] :- literal[int] ]) identifier[uk] =[ literal[int] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[num_points] )] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_points] ): identifier[uk] [ identifier[i] ]= identifier[sum] ( identifier[cds] [ literal[int] : identifier[i] + literal[int] ])/ identifier[d] keyword[return] identifier[uk]
def compute_params_curve(points, centripetal=False): """ Computes :math:`\\overline{u}_{k}` for curves. Please refer to the Equations 9.4 and 9.5 for chord length parametrization, and Equation 9.6 for centripetal method on The NURBS Book (2nd Edition), pp.364-365. :param points: data points :type points: list, tuple :param centripetal: activates centripetal parametrization method :type centripetal: bool :return: parameter array, :math:`\\overline{u}_{k}` :rtype: list """ if not isinstance(points, (list, tuple)): raise TypeError('Data points must be a list or a tuple') # depends on [control=['if'], data=[]] # Length of the points array num_points = len(points) # Calculate chord lengths cds = [0.0 for _ in range(num_points + 1)] cds[-1] = 1.0 for i in range(1, num_points): distance = linalg.point_distance(points[i], points[i - 1]) cds[i] = math.sqrt(distance) if centripetal else distance # depends on [control=['for'], data=['i']] # Find the total chord length d = sum(cds[1:-1]) # Divide individual chord lengths by the total chord length uk = [0.0 for _ in range(num_points)] for i in range(num_points): uk[i] = sum(cds[0:i + 1]) / d # depends on [control=['for'], data=['i']] return uk
def add_vrf(self, auth, attr): """ Add a new VRF. * `auth` [BaseAuth] AAA options. * `attr` [vrf_attr] The news VRF's attributes. Add a VRF based on the values stored in the `attr` dict. Returns a dict describing the VRF which was added. This is the documentation of the internal backend function. It's exposed over XML-RPC, please also see the XML-RPC documentation for :py:func:`nipap.xmlrpc.NipapXMLRPC.add_vrf` for full understanding. """ self._logger.debug("add_vrf called; attr: %s" % unicode(attr)) # sanity check - do we have all attributes? req_attr = [ 'rt', 'name' ] self._check_attr(attr, req_attr, _vrf_attrs) insert, params = self._sql_expand_insert(attr) sql = "INSERT INTO ip_net_vrf " + insert self._execute(sql, params) vrf_id = self._lastrowid() vrf = self.list_vrf(auth, { 'id': vrf_id })[0] # write to audit table audit_params = { 'vrf_id': vrf['id'], 'vrf_rt': vrf['rt'], 'vrf_name': vrf['name'], 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source, 'description': 'Added VRF %s with attr: %s' % (vrf['rt'], unicode(vrf)) } sql, params = self._sql_expand_insert(audit_params) self._execute('INSERT INTO ip_net_log %s' % sql, params) return vrf
def function[add_vrf, parameter[self, auth, attr]]: constant[ Add a new VRF. * `auth` [BaseAuth] AAA options. * `attr` [vrf_attr] The news VRF's attributes. Add a VRF based on the values stored in the `attr` dict. Returns a dict describing the VRF which was added. This is the documentation of the internal backend function. It's exposed over XML-RPC, please also see the XML-RPC documentation for :py:func:`nipap.xmlrpc.NipapXMLRPC.add_vrf` for full understanding. ] call[name[self]._logger.debug, parameter[binary_operation[constant[add_vrf called; attr: %s] <ast.Mod object at 0x7da2590d6920> call[name[unicode], parameter[name[attr]]]]]] variable[req_attr] assign[=] list[[<ast.Constant object at 0x7da2044c0100>, <ast.Constant object at 0x7da2044c3940>]] call[name[self]._check_attr, parameter[name[attr], name[req_attr], name[_vrf_attrs]]] <ast.Tuple object at 0x7da2044c01c0> assign[=] call[name[self]._sql_expand_insert, parameter[name[attr]]] variable[sql] assign[=] binary_operation[constant[INSERT INTO ip_net_vrf ] + name[insert]] call[name[self]._execute, parameter[name[sql], name[params]]] variable[vrf_id] assign[=] call[name[self]._lastrowid, parameter[]] variable[vrf] assign[=] call[call[name[self].list_vrf, parameter[name[auth], dictionary[[<ast.Constant object at 0x7da2044c3820>], [<ast.Name object at 0x7da2044c2c80>]]]]][constant[0]] variable[audit_params] assign[=] dictionary[[<ast.Constant object at 0x7da2044c0ee0>, <ast.Constant object at 0x7da2044c06a0>, <ast.Constant object at 0x7da2044c3a90>, <ast.Constant object at 0x7da2044c2ef0>, <ast.Constant object at 0x7da2044c2bf0>, <ast.Constant object at 0x7da2044c0a90>, <ast.Constant object at 0x7da2044c3c40>, <ast.Constant object at 0x7da2044c25f0>], [<ast.Subscript object at 0x7da2044c3c70>, <ast.Subscript object at 0x7da2044c2980>, <ast.Subscript object at 0x7da2044c2b90>, <ast.Attribute object at 0x7da2044c1c00>, <ast.Attribute object at 0x7da2044c3190>, <ast.Attribute object at 0x7da2044c0790>, <ast.Attribute object at 0x7da2044c0eb0>, <ast.BinOp object at 0x7da2044c1a20>]] <ast.Tuple object at 0x7da20e9b1480> assign[=] call[name[self]._sql_expand_insert, parameter[name[audit_params]]] call[name[self]._execute, parameter[binary_operation[constant[INSERT INTO ip_net_log %s] <ast.Mod object at 0x7da2590d6920> name[sql]], name[params]]] return[name[vrf]]
keyword[def] identifier[add_vrf] ( identifier[self] , identifier[auth] , identifier[attr] ): literal[string] identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] % identifier[unicode] ( identifier[attr] )) identifier[req_attr] =[ literal[string] , literal[string] ] identifier[self] . identifier[_check_attr] ( identifier[attr] , identifier[req_attr] , identifier[_vrf_attrs] ) identifier[insert] , identifier[params] = identifier[self] . identifier[_sql_expand_insert] ( identifier[attr] ) identifier[sql] = literal[string] + identifier[insert] identifier[self] . identifier[_execute] ( identifier[sql] , identifier[params] ) identifier[vrf_id] = identifier[self] . identifier[_lastrowid] () identifier[vrf] = identifier[self] . identifier[list_vrf] ( identifier[auth] ,{ literal[string] : identifier[vrf_id] })[ literal[int] ] identifier[audit_params] ={ literal[string] : identifier[vrf] [ literal[string] ], literal[string] : identifier[vrf] [ literal[string] ], literal[string] : identifier[vrf] [ literal[string] ], literal[string] : identifier[auth] . identifier[username] , literal[string] : identifier[auth] . identifier[authenticated_as] , literal[string] : identifier[auth] . identifier[full_name] , literal[string] : identifier[auth] . identifier[authoritative_source] , literal[string] : literal[string] %( identifier[vrf] [ literal[string] ], identifier[unicode] ( identifier[vrf] )) } identifier[sql] , identifier[params] = identifier[self] . identifier[_sql_expand_insert] ( identifier[audit_params] ) identifier[self] . identifier[_execute] ( literal[string] % identifier[sql] , identifier[params] ) keyword[return] identifier[vrf]
def add_vrf(self, auth, attr): """ Add a new VRF. * `auth` [BaseAuth] AAA options. * `attr` [vrf_attr] The news VRF's attributes. Add a VRF based on the values stored in the `attr` dict. Returns a dict describing the VRF which was added. This is the documentation of the internal backend function. It's exposed over XML-RPC, please also see the XML-RPC documentation for :py:func:`nipap.xmlrpc.NipapXMLRPC.add_vrf` for full understanding. """ self._logger.debug('add_vrf called; attr: %s' % unicode(attr)) # sanity check - do we have all attributes? req_attr = ['rt', 'name'] self._check_attr(attr, req_attr, _vrf_attrs) (insert, params) = self._sql_expand_insert(attr) sql = 'INSERT INTO ip_net_vrf ' + insert self._execute(sql, params) vrf_id = self._lastrowid() vrf = self.list_vrf(auth, {'id': vrf_id})[0] # write to audit table audit_params = {'vrf_id': vrf['id'], 'vrf_rt': vrf['rt'], 'vrf_name': vrf['name'], 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source, 'description': 'Added VRF %s with attr: %s' % (vrf['rt'], unicode(vrf))} (sql, params) = self._sql_expand_insert(audit_params) self._execute('INSERT INTO ip_net_log %s' % sql, params) return vrf
def And(*xs, simplify=True): """Expression conjunction (product, AND) operator If *simplify* is ``True``, return a simplified expression. """ xs = [Expression.box(x).node for x in xs] y = exprnode.and_(*xs) if simplify: y = y.simplify() return _expr(y)
def function[And, parameter[]]: constant[Expression conjunction (product, AND) operator If *simplify* is ``True``, return a simplified expression. ] variable[xs] assign[=] <ast.ListComp object at 0x7da1b0efb3d0> variable[y] assign[=] call[name[exprnode].and_, parameter[<ast.Starred object at 0x7da1b0efb8b0>]] if name[simplify] begin[:] variable[y] assign[=] call[name[y].simplify, parameter[]] return[call[name[_expr], parameter[name[y]]]]
keyword[def] identifier[And] (* identifier[xs] , identifier[simplify] = keyword[True] ): literal[string] identifier[xs] =[ identifier[Expression] . identifier[box] ( identifier[x] ). identifier[node] keyword[for] identifier[x] keyword[in] identifier[xs] ] identifier[y] = identifier[exprnode] . identifier[and_] (* identifier[xs] ) keyword[if] identifier[simplify] : identifier[y] = identifier[y] . identifier[simplify] () keyword[return] identifier[_expr] ( identifier[y] )
def And(*xs, simplify=True): """Expression conjunction (product, AND) operator If *simplify* is ``True``, return a simplified expression. """ xs = [Expression.box(x).node for x in xs] y = exprnode.and_(*xs) if simplify: y = y.simplify() # depends on [control=['if'], data=[]] return _expr(y)
def __destroyLockedView(self): """ Destroys the locked view from this widget. """ if self._lockedView: self._lockedView.close() self._lockedView.deleteLater() self._lockedView = None
def function[__destroyLockedView, parameter[self]]: constant[ Destroys the locked view from this widget. ] if name[self]._lockedView begin[:] call[name[self]._lockedView.close, parameter[]] call[name[self]._lockedView.deleteLater, parameter[]] name[self]._lockedView assign[=] constant[None]
keyword[def] identifier[__destroyLockedView] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_lockedView] : identifier[self] . identifier[_lockedView] . identifier[close] () identifier[self] . identifier[_lockedView] . identifier[deleteLater] () identifier[self] . identifier[_lockedView] = keyword[None]
def __destroyLockedView(self): """ Destroys the locked view from this widget. """ if self._lockedView: self._lockedView.close() self._lockedView.deleteLater() self._lockedView = None # depends on [control=['if'], data=[]]
def order_book(self, symbol, parameters=None): """ curl "https://api.bitfinex.com/v1/book/btcusd" {"bids":[{"price":"561.1101","amount":"0.985","timestamp":"1395557729.0"}],"asks":[{"price":"562.9999","amount":"0.985","timestamp":"1395557711.0"}]} The 'bids' and 'asks' arrays will have multiple bid and ask dicts. Optional parameters limit_bids (int): Optional. Limit the number of bids returned. May be 0 in which case the array of bids is empty. Default is 50. limit_asks (int): Optional. Limit the number of asks returned. May be 0 in which case the array of asks is empty. Default is 50. eg. curl "https://api.bitfinex.com/v1/book/btcusd?limit_bids=1&limit_asks=0" {"bids":[{"price":"561.1101","amount":"0.985","timestamp":"1395557729.0"}],"asks":[]} """ data = self._get(self.url_for(PATH_ORDERBOOK, path_arg=symbol, parameters=parameters)) for type_ in data.keys(): for list_ in data[type_]: for key, value in list_.items(): list_[key] = float(value) return data
def function[order_book, parameter[self, symbol, parameters]]: constant[ curl "https://api.bitfinex.com/v1/book/btcusd" {"bids":[{"price":"561.1101","amount":"0.985","timestamp":"1395557729.0"}],"asks":[{"price":"562.9999","amount":"0.985","timestamp":"1395557711.0"}]} The 'bids' and 'asks' arrays will have multiple bid and ask dicts. Optional parameters limit_bids (int): Optional. Limit the number of bids returned. May be 0 in which case the array of bids is empty. Default is 50. limit_asks (int): Optional. Limit the number of asks returned. May be 0 in which case the array of asks is empty. Default is 50. eg. curl "https://api.bitfinex.com/v1/book/btcusd?limit_bids=1&limit_asks=0" {"bids":[{"price":"561.1101","amount":"0.985","timestamp":"1395557729.0"}],"asks":[]} ] variable[data] assign[=] call[name[self]._get, parameter[call[name[self].url_for, parameter[name[PATH_ORDERBOOK]]]]] for taget[name[type_]] in starred[call[name[data].keys, parameter[]]] begin[:] for taget[name[list_]] in starred[call[name[data]][name[type_]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b0efac50>, <ast.Name object at 0x7da1b0efae90>]]] in starred[call[name[list_].items, parameter[]]] begin[:] call[name[list_]][name[key]] assign[=] call[name[float], parameter[name[value]]] return[name[data]]
keyword[def] identifier[order_book] ( identifier[self] , identifier[symbol] , identifier[parameters] = keyword[None] ): literal[string] identifier[data] = identifier[self] . identifier[_get] ( identifier[self] . identifier[url_for] ( identifier[PATH_ORDERBOOK] , identifier[path_arg] = identifier[symbol] , identifier[parameters] = identifier[parameters] )) keyword[for] identifier[type_] keyword[in] identifier[data] . identifier[keys] (): keyword[for] identifier[list_] keyword[in] identifier[data] [ identifier[type_] ]: keyword[for] identifier[key] , identifier[value] keyword[in] identifier[list_] . identifier[items] (): identifier[list_] [ identifier[key] ]= identifier[float] ( identifier[value] ) keyword[return] identifier[data]
def order_book(self, symbol, parameters=None): """ curl "https://api.bitfinex.com/v1/book/btcusd" {"bids":[{"price":"561.1101","amount":"0.985","timestamp":"1395557729.0"}],"asks":[{"price":"562.9999","amount":"0.985","timestamp":"1395557711.0"}]} The 'bids' and 'asks' arrays will have multiple bid and ask dicts. Optional parameters limit_bids (int): Optional. Limit the number of bids returned. May be 0 in which case the array of bids is empty. Default is 50. limit_asks (int): Optional. Limit the number of asks returned. May be 0 in which case the array of asks is empty. Default is 50. eg. curl "https://api.bitfinex.com/v1/book/btcusd?limit_bids=1&limit_asks=0" {"bids":[{"price":"561.1101","amount":"0.985","timestamp":"1395557729.0"}],"asks":[]} """ data = self._get(self.url_for(PATH_ORDERBOOK, path_arg=symbol, parameters=parameters)) for type_ in data.keys(): for list_ in data[type_]: for (key, value) in list_.items(): list_[key] = float(value) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['list_']] # depends on [control=['for'], data=['type_']] return data
def requires_user(fn): """ Requires that the calling Subject be *either* authenticated *or* remembered via RememberMe services before allowing access. This method essentially ensures that subject.identifiers IS NOT None """ @functools.wraps(fn) def wrap(*args, **kwargs): subject = WebYosai.get_current_subject() if subject.identifiers is None: msg = ("Attempting to perform a user-only operation. The " "current Subject is NOT a user (they haven't been " "authenticated or remembered from a previous login). " "ACCESS DENIED.") raise WebYosai.get_current_webregistry().raise_unauthorized(msg) return fn(*args, **kwargs) return wrap
def function[requires_user, parameter[fn]]: constant[ Requires that the calling Subject be *either* authenticated *or* remembered via RememberMe services before allowing access. This method essentially ensures that subject.identifiers IS NOT None ] def function[wrap, parameter[]]: variable[subject] assign[=] call[name[WebYosai].get_current_subject, parameter[]] if compare[name[subject].identifiers is constant[None]] begin[:] variable[msg] assign[=] constant[Attempting to perform a user-only operation. The current Subject is NOT a user (they haven't been authenticated or remembered from a previous login). ACCESS DENIED.] <ast.Raise object at 0x7da20c6e54e0> return[call[name[fn], parameter[<ast.Starred object at 0x7da20c6e79d0>]]] return[name[wrap]]
keyword[def] identifier[requires_user] ( identifier[fn] ): literal[string] @ identifier[functools] . identifier[wraps] ( identifier[fn] ) keyword[def] identifier[wrap] (* identifier[args] ,** identifier[kwargs] ): identifier[subject] = identifier[WebYosai] . identifier[get_current_subject] () keyword[if] identifier[subject] . identifier[identifiers] keyword[is] keyword[None] : identifier[msg] =( literal[string] literal[string] literal[string] literal[string] ) keyword[raise] identifier[WebYosai] . identifier[get_current_webregistry] (). identifier[raise_unauthorized] ( identifier[msg] ) keyword[return] identifier[fn] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[wrap]
def requires_user(fn): """ Requires that the calling Subject be *either* authenticated *or* remembered via RememberMe services before allowing access. This method essentially ensures that subject.identifiers IS NOT None """ @functools.wraps(fn) def wrap(*args, **kwargs): subject = WebYosai.get_current_subject() if subject.identifiers is None: msg = "Attempting to perform a user-only operation. The current Subject is NOT a user (they haven't been authenticated or remembered from a previous login). ACCESS DENIED." raise WebYosai.get_current_webregistry().raise_unauthorized(msg) # depends on [control=['if'], data=[]] return fn(*args, **kwargs) return wrap
def _format_operation_dict(operation, parameters): """Formats parameters in operation in the way BigQuery expects. The input operation will be a query like ``SELECT %(namedparam)s`` and the output will be a query like ``SELECT @namedparam``. :type operation: str :param operation: A Google BigQuery query string. :type parameters: Mapping[str, Any] :param parameters: Dictionary of parameter values. :rtype: str :returns: A formatted query string. :raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError` if a parameter used in the operation is not found in the ``parameters`` argument. """ formatted_params = {} for name in parameters: escaped_name = name.replace("`", r"\`") formatted_params[name] = "@`{}`".format(escaped_name) try: return operation % formatted_params except KeyError as exc: raise exceptions.ProgrammingError(exc)
def function[_format_operation_dict, parameter[operation, parameters]]: constant[Formats parameters in operation in the way BigQuery expects. The input operation will be a query like ``SELECT %(namedparam)s`` and the output will be a query like ``SELECT @namedparam``. :type operation: str :param operation: A Google BigQuery query string. :type parameters: Mapping[str, Any] :param parameters: Dictionary of parameter values. :rtype: str :returns: A formatted query string. :raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError` if a parameter used in the operation is not found in the ``parameters`` argument. ] variable[formatted_params] assign[=] dictionary[[], []] for taget[name[name]] in starred[name[parameters]] begin[:] variable[escaped_name] assign[=] call[name[name].replace, parameter[constant[`], constant[\`]]] call[name[formatted_params]][name[name]] assign[=] call[constant[@`{}`].format, parameter[name[escaped_name]]] <ast.Try object at 0x7da20e9568c0>
keyword[def] identifier[_format_operation_dict] ( identifier[operation] , identifier[parameters] ): literal[string] identifier[formatted_params] ={} keyword[for] identifier[name] keyword[in] identifier[parameters] : identifier[escaped_name] = identifier[name] . identifier[replace] ( literal[string] , literal[string] ) identifier[formatted_params] [ identifier[name] ]= literal[string] . identifier[format] ( identifier[escaped_name] ) keyword[try] : keyword[return] identifier[operation] % identifier[formatted_params] keyword[except] identifier[KeyError] keyword[as] identifier[exc] : keyword[raise] identifier[exceptions] . identifier[ProgrammingError] ( identifier[exc] )
def _format_operation_dict(operation, parameters): """Formats parameters in operation in the way BigQuery expects. The input operation will be a query like ``SELECT %(namedparam)s`` and the output will be a query like ``SELECT @namedparam``. :type operation: str :param operation: A Google BigQuery query string. :type parameters: Mapping[str, Any] :param parameters: Dictionary of parameter values. :rtype: str :returns: A formatted query string. :raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError` if a parameter used in the operation is not found in the ``parameters`` argument. """ formatted_params = {} for name in parameters: escaped_name = name.replace('`', '\\`') formatted_params[name] = '@`{}`'.format(escaped_name) # depends on [control=['for'], data=['name']] try: return operation % formatted_params # depends on [control=['try'], data=[]] except KeyError as exc: raise exceptions.ProgrammingError(exc) # depends on [control=['except'], data=['exc']]
def get_dimension(dimension_id, do_accept_dimension_id_none=False,**kwargs): """ Given a dimension id returns all its data """ if do_accept_dimension_id_none == True and dimension_id is None: # In this special case, the method returns a dimension with id None return get_empty_dimension() try: dimension = db.DBSession.query(Dimension).filter(Dimension.id==dimension_id).one() #lazy load units dimension.units return JSONObject(dimension) except NoResultFound: # The dimension does not exist raise ResourceNotFoundError("Dimension %s not found"%(dimension_id))
def function[get_dimension, parameter[dimension_id, do_accept_dimension_id_none]]: constant[ Given a dimension id returns all its data ] if <ast.BoolOp object at 0x7da20e9b28f0> begin[:] return[call[name[get_empty_dimension], parameter[]]] <ast.Try object at 0x7da20e9b1a20>
keyword[def] identifier[get_dimension] ( identifier[dimension_id] , identifier[do_accept_dimension_id_none] = keyword[False] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[do_accept_dimension_id_none] == keyword[True] keyword[and] identifier[dimension_id] keyword[is] keyword[None] : keyword[return] identifier[get_empty_dimension] () keyword[try] : identifier[dimension] = identifier[db] . identifier[DBSession] . identifier[query] ( identifier[Dimension] ). identifier[filter] ( identifier[Dimension] . identifier[id] == identifier[dimension_id] ). identifier[one] () identifier[dimension] . identifier[units] keyword[return] identifier[JSONObject] ( identifier[dimension] ) keyword[except] identifier[NoResultFound] : keyword[raise] identifier[ResourceNotFoundError] ( literal[string] %( identifier[dimension_id] ))
def get_dimension(dimension_id, do_accept_dimension_id_none=False, **kwargs): """ Given a dimension id returns all its data """ if do_accept_dimension_id_none == True and dimension_id is None: # In this special case, the method returns a dimension with id None return get_empty_dimension() # depends on [control=['if'], data=[]] try: dimension = db.DBSession.query(Dimension).filter(Dimension.id == dimension_id).one() #lazy load units dimension.units return JSONObject(dimension) # depends on [control=['try'], data=[]] except NoResultFound: # The dimension does not exist raise ResourceNotFoundError('Dimension %s not found' % dimension_id) # depends on [control=['except'], data=[]]
def _convert_ddb_list_to_list(conversion_list): """Given a dynamodb list, it will return a python list without the dynamodb datatypes Args: conversion_list (dict): a dynamodb list which includes the datatypes Returns: list: Returns a sanitized list without the dynamodb datatypes """ ret_list = [] for v in conversion_list: for v1 in v: ret_list.append(v[v1]) return ret_list
def function[_convert_ddb_list_to_list, parameter[conversion_list]]: constant[Given a dynamodb list, it will return a python list without the dynamodb datatypes Args: conversion_list (dict): a dynamodb list which includes the datatypes Returns: list: Returns a sanitized list without the dynamodb datatypes ] variable[ret_list] assign[=] list[[]] for taget[name[v]] in starred[name[conversion_list]] begin[:] for taget[name[v1]] in starred[name[v]] begin[:] call[name[ret_list].append, parameter[call[name[v]][name[v1]]]] return[name[ret_list]]
keyword[def] identifier[_convert_ddb_list_to_list] ( identifier[conversion_list] ): literal[string] identifier[ret_list] =[] keyword[for] identifier[v] keyword[in] identifier[conversion_list] : keyword[for] identifier[v1] keyword[in] identifier[v] : identifier[ret_list] . identifier[append] ( identifier[v] [ identifier[v1] ]) keyword[return] identifier[ret_list]
def _convert_ddb_list_to_list(conversion_list): """Given a dynamodb list, it will return a python list without the dynamodb datatypes Args: conversion_list (dict): a dynamodb list which includes the datatypes Returns: list: Returns a sanitized list without the dynamodb datatypes """ ret_list = [] for v in conversion_list: for v1 in v: ret_list.append(v[v1]) # depends on [control=['for'], data=['v1']] # depends on [control=['for'], data=['v']] return ret_list
def get_gammadot(F, mc, q, e): """ Compute gamma dot from Barack and Cutler (2004) :param F: Orbital frequency [Hz] :param mc: Chirp mass of binary [Solar Mass] :param q: Mass ratio of binary :param e: Eccentricity of binary :returns: dgamma/dt """ # chirp mass mc *= SOLAR2S #total mass m = (((1+q)**2)/q)**(3/5) * mc dgdt = 6*np.pi*F * (2*np.pi*F*m)**(2/3) / (1-e**2) * \ (1 + 0.25*(2*np.pi*F*m)**(2/3)/(1-e**2)*(26-15*e**2)) return dgdt
def function[get_gammadot, parameter[F, mc, q, e]]: constant[ Compute gamma dot from Barack and Cutler (2004) :param F: Orbital frequency [Hz] :param mc: Chirp mass of binary [Solar Mass] :param q: Mass ratio of binary :param e: Eccentricity of binary :returns: dgamma/dt ] <ast.AugAssign object at 0x7da2046215d0> variable[m] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[1] + name[q]] ** constant[2]] / name[q]] ** binary_operation[constant[3] / constant[5]]] * name[mc]] variable[dgdt] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[6] * name[np].pi] * name[F]] * binary_operation[binary_operation[binary_operation[binary_operation[constant[2] * name[np].pi] * name[F]] * name[m]] ** binary_operation[constant[2] / constant[3]]]] / binary_operation[constant[1] - binary_operation[name[e] ** constant[2]]]] * binary_operation[constant[1] + binary_operation[binary_operation[binary_operation[constant[0.25] * binary_operation[binary_operation[binary_operation[binary_operation[constant[2] * name[np].pi] * name[F]] * name[m]] ** binary_operation[constant[2] / constant[3]]]] / binary_operation[constant[1] - binary_operation[name[e] ** constant[2]]]] * binary_operation[constant[26] - binary_operation[constant[15] * binary_operation[name[e] ** constant[2]]]]]]] return[name[dgdt]]
keyword[def] identifier[get_gammadot] ( identifier[F] , identifier[mc] , identifier[q] , identifier[e] ): literal[string] identifier[mc] *= identifier[SOLAR2S] identifier[m] =((( literal[int] + identifier[q] )** literal[int] )/ identifier[q] )**( literal[int] / literal[int] )* identifier[mc] identifier[dgdt] = literal[int] * identifier[np] . identifier[pi] * identifier[F] *( literal[int] * identifier[np] . identifier[pi] * identifier[F] * identifier[m] )**( literal[int] / literal[int] )/( literal[int] - identifier[e] ** literal[int] )*( literal[int] + literal[int] *( literal[int] * identifier[np] . identifier[pi] * identifier[F] * identifier[m] )**( literal[int] / literal[int] )/( literal[int] - identifier[e] ** literal[int] )*( literal[int] - literal[int] * identifier[e] ** literal[int] )) keyword[return] identifier[dgdt]
def get_gammadot(F, mc, q, e): """ Compute gamma dot from Barack and Cutler (2004) :param F: Orbital frequency [Hz] :param mc: Chirp mass of binary [Solar Mass] :param q: Mass ratio of binary :param e: Eccentricity of binary :returns: dgamma/dt """ # chirp mass mc *= SOLAR2S #total mass m = ((1 + q) ** 2 / q) ** (3 / 5) * mc dgdt = 6 * np.pi * F * (2 * np.pi * F * m) ** (2 / 3) / (1 - e ** 2) * (1 + 0.25 * (2 * np.pi * F * m) ** (2 / 3) / (1 - e ** 2) * (26 - 15 * e ** 2)) return dgdt
def display_animation(anim, **kwargs): """Display the animation with an IPython HTML object""" from IPython.display import HTML return HTML(anim_to_html(anim, **kwargs))
def function[display_animation, parameter[anim]]: constant[Display the animation with an IPython HTML object] from relative_module[IPython.display] import module[HTML] return[call[name[HTML], parameter[call[name[anim_to_html], parameter[name[anim]]]]]]
keyword[def] identifier[display_animation] ( identifier[anim] ,** identifier[kwargs] ): literal[string] keyword[from] identifier[IPython] . identifier[display] keyword[import] identifier[HTML] keyword[return] identifier[HTML] ( identifier[anim_to_html] ( identifier[anim] ,** identifier[kwargs] ))
def display_animation(anim, **kwargs): """Display the animation with an IPython HTML object""" from IPython.display import HTML return HTML(anim_to_html(anim, **kwargs))
def _prune_hit(hit, model): """ Check whether a document should be pruned. This method uses the SearchDocumentManagerMixin.in_search_queryset method to determine whether a 'hit' (search document) should be pruned from an index, and if so it returns the hit as a Django object(id=hit_id). Args: hit: dict object the represents a document as returned from the scan_index function. (Contains object id and index.) model: the Django model (not object) from which the document was derived. Used to get the correct model manager and bulk action. Returns: an object of type model, with id=hit_id. NB this is not the object itself, which by definition may not exist in the underlying database, but a temporary object with the document id - which is enough to create a 'delete' action. """ hit_id = hit["_id"] hit_index = hit["_index"] if model.objects.in_search_queryset(hit_id, index=hit_index): logger.debug( "%s with id=%s exists in the '%s' index queryset.", model, hit_id, hit_index ) return None else: logger.debug( "%s with id=%s does not exist in the '%s' index queryset and will be pruned.", model, hit_id, hit_index, ) # we don't need the full obj for a delete action, just the id. # (the object itself may not even exist.) return model(pk=hit_id)
def function[_prune_hit, parameter[hit, model]]: constant[ Check whether a document should be pruned. This method uses the SearchDocumentManagerMixin.in_search_queryset method to determine whether a 'hit' (search document) should be pruned from an index, and if so it returns the hit as a Django object(id=hit_id). Args: hit: dict object the represents a document as returned from the scan_index function. (Contains object id and index.) model: the Django model (not object) from which the document was derived. Used to get the correct model manager and bulk action. Returns: an object of type model, with id=hit_id. NB this is not the object itself, which by definition may not exist in the underlying database, but a temporary object with the document id - which is enough to create a 'delete' action. ] variable[hit_id] assign[=] call[name[hit]][constant[_id]] variable[hit_index] assign[=] call[name[hit]][constant[_index]] if call[name[model].objects.in_search_queryset, parameter[name[hit_id]]] begin[:] call[name[logger].debug, parameter[constant[%s with id=%s exists in the '%s' index queryset.], name[model], name[hit_id], name[hit_index]]] return[constant[None]]
keyword[def] identifier[_prune_hit] ( identifier[hit] , identifier[model] ): literal[string] identifier[hit_id] = identifier[hit] [ literal[string] ] identifier[hit_index] = identifier[hit] [ literal[string] ] keyword[if] identifier[model] . identifier[objects] . identifier[in_search_queryset] ( identifier[hit_id] , identifier[index] = identifier[hit_index] ): identifier[logger] . identifier[debug] ( literal[string] , identifier[model] , identifier[hit_id] , identifier[hit_index] ) keyword[return] keyword[None] keyword[else] : identifier[logger] . identifier[debug] ( literal[string] , identifier[model] , identifier[hit_id] , identifier[hit_index] , ) keyword[return] identifier[model] ( identifier[pk] = identifier[hit_id] )
def _prune_hit(hit, model): """ Check whether a document should be pruned. This method uses the SearchDocumentManagerMixin.in_search_queryset method to determine whether a 'hit' (search document) should be pruned from an index, and if so it returns the hit as a Django object(id=hit_id). Args: hit: dict object the represents a document as returned from the scan_index function. (Contains object id and index.) model: the Django model (not object) from which the document was derived. Used to get the correct model manager and bulk action. Returns: an object of type model, with id=hit_id. NB this is not the object itself, which by definition may not exist in the underlying database, but a temporary object with the document id - which is enough to create a 'delete' action. """ hit_id = hit['_id'] hit_index = hit['_index'] if model.objects.in_search_queryset(hit_id, index=hit_index): logger.debug("%s with id=%s exists in the '%s' index queryset.", model, hit_id, hit_index) return None # depends on [control=['if'], data=[]] else: logger.debug("%s with id=%s does not exist in the '%s' index queryset and will be pruned.", model, hit_id, hit_index) # we don't need the full obj for a delete action, just the id. # (the object itself may not even exist.) return model(pk=hit_id)
def get_uint8(self): """Read the next token and interpret it as an 8-bit unsigned integer. @raises dns.exception.SyntaxError: @rtype: int """ value = self.get_int() if value < 0 or value > 255: raise dns.exception.SyntaxError('%d is not an unsigned 8-bit integer' % value) return value
def function[get_uint8, parameter[self]]: constant[Read the next token and interpret it as an 8-bit unsigned integer. @raises dns.exception.SyntaxError: @rtype: int ] variable[value] assign[=] call[name[self].get_int, parameter[]] if <ast.BoolOp object at 0x7da1b0ab9540> begin[:] <ast.Raise object at 0x7da1b0aba110> return[name[value]]
keyword[def] identifier[get_uint8] ( identifier[self] ): literal[string] identifier[value] = identifier[self] . identifier[get_int] () keyword[if] identifier[value] < literal[int] keyword[or] identifier[value] > literal[int] : keyword[raise] identifier[dns] . identifier[exception] . identifier[SyntaxError] ( literal[string] % identifier[value] ) keyword[return] identifier[value]
def get_uint8(self): """Read the next token and interpret it as an 8-bit unsigned integer. @raises dns.exception.SyntaxError: @rtype: int """ value = self.get_int() if value < 0 or value > 255: raise dns.exception.SyntaxError('%d is not an unsigned 8-bit integer' % value) # depends on [control=['if'], data=[]] return value
def lock_key(key_name, stash, passphrase, backend): """Lock a key to prevent it from being deleted, purged or modified `KEY_NAME` is the name of the key to lock """ stash = _get_stash(backend, stash, passphrase) try: click.echo('Locking key...') stash.lock(key_name=key_name) click.echo('Key locked successfully') except GhostError as ex: sys.exit(ex)
def function[lock_key, parameter[key_name, stash, passphrase, backend]]: constant[Lock a key to prevent it from being deleted, purged or modified `KEY_NAME` is the name of the key to lock ] variable[stash] assign[=] call[name[_get_stash], parameter[name[backend], name[stash], name[passphrase]]] <ast.Try object at 0x7da20c992b30>
keyword[def] identifier[lock_key] ( identifier[key_name] , identifier[stash] , identifier[passphrase] , identifier[backend] ): literal[string] identifier[stash] = identifier[_get_stash] ( identifier[backend] , identifier[stash] , identifier[passphrase] ) keyword[try] : identifier[click] . identifier[echo] ( literal[string] ) identifier[stash] . identifier[lock] ( identifier[key_name] = identifier[key_name] ) identifier[click] . identifier[echo] ( literal[string] ) keyword[except] identifier[GhostError] keyword[as] identifier[ex] : identifier[sys] . identifier[exit] ( identifier[ex] )
def lock_key(key_name, stash, passphrase, backend): """Lock a key to prevent it from being deleted, purged or modified `KEY_NAME` is the name of the key to lock """ stash = _get_stash(backend, stash, passphrase) try: click.echo('Locking key...') stash.lock(key_name=key_name) click.echo('Key locked successfully') # depends on [control=['try'], data=[]] except GhostError as ex: sys.exit(ex) # depends on [control=['except'], data=['ex']]
def get_loco_name(self): """ Returns the Provider, Product and Engine name. :return list """ ret_str = self.dll.GetLocoName().decode() if not ret_str: return return ret_str.split('.:.')
def function[get_loco_name, parameter[self]]: constant[ Returns the Provider, Product and Engine name. :return list ] variable[ret_str] assign[=] call[call[name[self].dll.GetLocoName, parameter[]].decode, parameter[]] if <ast.UnaryOp object at 0x7da1b255a890> begin[:] return[None] return[call[name[ret_str].split, parameter[constant[.:.]]]]
keyword[def] identifier[get_loco_name] ( identifier[self] ): literal[string] identifier[ret_str] = identifier[self] . identifier[dll] . identifier[GetLocoName] (). identifier[decode] () keyword[if] keyword[not] identifier[ret_str] : keyword[return] keyword[return] identifier[ret_str] . identifier[split] ( literal[string] )
def get_loco_name(self): """ Returns the Provider, Product and Engine name. :return list """ ret_str = self.dll.GetLocoName().decode() if not ret_str: return # depends on [control=['if'], data=[]] return ret_str.split('.:.')
def add_log_file(path): """Add log file. Args: path (:obj:`str`): Path to the log file. """ logfile_handler = RotatingFileHandler( path, maxBytes=50000, backupCount=2) formatter = logging.Formatter( fmt='%(asctime)s %(levelname)s %(module)s - %(message)s', datefmt="%d-%b-%Y %H:%M:%S") logfile_handler.setFormatter(formatter) geoparse_logger.addHandler(logfile_handler)
def function[add_log_file, parameter[path]]: constant[Add log file. Args: path (:obj:`str`): Path to the log file. ] variable[logfile_handler] assign[=] call[name[RotatingFileHandler], parameter[name[path]]] variable[formatter] assign[=] call[name[logging].Formatter, parameter[]] call[name[logfile_handler].setFormatter, parameter[name[formatter]]] call[name[geoparse_logger].addHandler, parameter[name[logfile_handler]]]
keyword[def] identifier[add_log_file] ( identifier[path] ): literal[string] identifier[logfile_handler] = identifier[RotatingFileHandler] ( identifier[path] , identifier[maxBytes] = literal[int] , identifier[backupCount] = literal[int] ) identifier[formatter] = identifier[logging] . identifier[Formatter] ( identifier[fmt] = literal[string] , identifier[datefmt] = literal[string] ) identifier[logfile_handler] . identifier[setFormatter] ( identifier[formatter] ) identifier[geoparse_logger] . identifier[addHandler] ( identifier[logfile_handler] )
def add_log_file(path): """Add log file. Args: path (:obj:`str`): Path to the log file. """ logfile_handler = RotatingFileHandler(path, maxBytes=50000, backupCount=2) formatter = logging.Formatter(fmt='%(asctime)s %(levelname)s %(module)s - %(message)s', datefmt='%d-%b-%Y %H:%M:%S') logfile_handler.setFormatter(formatter) geoparse_logger.addHandler(logfile_handler)
def get(self, request): """ Used to make get calls to mattermost api :param request: :return: """ headers = {"Authorization": "Bearer " + self.token } g = requests.get(self.url + request, headers=headers) return json.loads(g.text)
def function[get, parameter[self, request]]: constant[ Used to make get calls to mattermost api :param request: :return: ] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b24b3ca0>], [<ast.BinOp object at 0x7da1b24b1330>]] variable[g] assign[=] call[name[requests].get, parameter[binary_operation[name[self].url + name[request]]]] return[call[name[json].loads, parameter[name[g].text]]]
keyword[def] identifier[get] ( identifier[self] , identifier[request] ): literal[string] identifier[headers] ={ literal[string] : literal[string] + identifier[self] . identifier[token] } identifier[g] = identifier[requests] . identifier[get] ( identifier[self] . identifier[url] + identifier[request] , identifier[headers] = identifier[headers] ) keyword[return] identifier[json] . identifier[loads] ( identifier[g] . identifier[text] )
def get(self, request): """ Used to make get calls to mattermost api :param request: :return: """ headers = {'Authorization': 'Bearer ' + self.token} g = requests.get(self.url + request, headers=headers) return json.loads(g.text)
def build_mappings(self): """ Uses CSV files of field names and positions for different filing types to load mappings into memory, for use in parsing different types of rows. """ self.mappings = {} for record_type in ('sa', 'sb', 'F8872'): path = os.path.join( os.path.dirname( os.path.dirname( os.path.dirname(__file__))), 'mappings', '{}.csv'.format(record_type)) mapping = {} with open(path, 'r') as csvfile: reader = csv.DictReader(csvfile) for row in reader: mapping[row['position']] = ( row['model_name'], row['field_type']) self.mappings[record_type] = mapping
def function[build_mappings, parameter[self]]: constant[ Uses CSV files of field names and positions for different filing types to load mappings into memory, for use in parsing different types of rows. ] name[self].mappings assign[=] dictionary[[], []] for taget[name[record_type]] in starred[tuple[[<ast.Constant object at 0x7da1b1504370>, <ast.Constant object at 0x7da1b15049d0>, <ast.Constant object at 0x7da1b1504550>]]] begin[:] variable[path] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[call[name[os].path.dirname, parameter[call[name[os].path.dirname, parameter[name[__file__]]]]]]], constant[mappings], call[constant[{}.csv].format, parameter[name[record_type]]]]] variable[mapping] assign[=] dictionary[[], []] with call[name[open], parameter[name[path], constant[r]]] begin[:] variable[reader] assign[=] call[name[csv].DictReader, parameter[name[csvfile]]] for taget[name[row]] in starred[name[reader]] begin[:] call[name[mapping]][call[name[row]][constant[position]]] assign[=] tuple[[<ast.Subscript object at 0x7da1b13018a0>, <ast.Subscript object at 0x7da1b13013f0>]] call[name[self].mappings][name[record_type]] assign[=] name[mapping]
keyword[def] identifier[build_mappings] ( identifier[self] ): literal[string] identifier[self] . identifier[mappings] ={} keyword[for] identifier[record_type] keyword[in] ( literal[string] , literal[string] , literal[string] ): identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ))), literal[string] , literal[string] . identifier[format] ( identifier[record_type] )) identifier[mapping] ={} keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[csvfile] : identifier[reader] = identifier[csv] . identifier[DictReader] ( identifier[csvfile] ) keyword[for] identifier[row] keyword[in] identifier[reader] : identifier[mapping] [ identifier[row] [ literal[string] ]]=( identifier[row] [ literal[string] ], identifier[row] [ literal[string] ]) identifier[self] . identifier[mappings] [ identifier[record_type] ]= identifier[mapping]
def build_mappings(self): """ Uses CSV files of field names and positions for different filing types to load mappings into memory, for use in parsing different types of rows. """ self.mappings = {} for record_type in ('sa', 'sb', 'F8872'): path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'mappings', '{}.csv'.format(record_type)) mapping = {} with open(path, 'r') as csvfile: reader = csv.DictReader(csvfile) for row in reader: mapping[row['position']] = (row['model_name'], row['field_type']) # depends on [control=['for'], data=['row']] # depends on [control=['with'], data=['csvfile']] self.mappings[record_type] = mapping # depends on [control=['for'], data=['record_type']]
def EvalGeneric(self, hashers=None): """Causes the entire file to be hashed by the given hash functions. This sets up a 'finger' for fingerprinting, where the entire file is passed through a pre-defined (or user defined) set of hash functions. Args: hashers: An iterable of hash classes (e.g. out of hashlib) which will be instantiated for use. If hashers is not provided, or is provided as 'None', the default hashers will get used. To invoke this without hashers, provide an empty list. Returns: Always True, as all files are 'generic' files. """ if hashers is None: hashers = Fingerprinter.GENERIC_HASH_CLASSES hashfuncs = [x() for x in hashers] finger = Finger(hashfuncs, [Range(0, self.filelength)], {'name': 'generic'}) self.fingers.append(finger) return True
def function[EvalGeneric, parameter[self, hashers]]: constant[Causes the entire file to be hashed by the given hash functions. This sets up a 'finger' for fingerprinting, where the entire file is passed through a pre-defined (or user defined) set of hash functions. Args: hashers: An iterable of hash classes (e.g. out of hashlib) which will be instantiated for use. If hashers is not provided, or is provided as 'None', the default hashers will get used. To invoke this without hashers, provide an empty list. Returns: Always True, as all files are 'generic' files. ] if compare[name[hashers] is constant[None]] begin[:] variable[hashers] assign[=] name[Fingerprinter].GENERIC_HASH_CLASSES variable[hashfuncs] assign[=] <ast.ListComp object at 0x7da1b1b0eb90> variable[finger] assign[=] call[name[Finger], parameter[name[hashfuncs], list[[<ast.Call object at 0x7da1b1b49d20>]], dictionary[[<ast.Constant object at 0x7da1b1b4a320>], [<ast.Constant object at 0x7da1b1b4a230>]]]] call[name[self].fingers.append, parameter[name[finger]]] return[constant[True]]
keyword[def] identifier[EvalGeneric] ( identifier[self] , identifier[hashers] = keyword[None] ): literal[string] keyword[if] identifier[hashers] keyword[is] keyword[None] : identifier[hashers] = identifier[Fingerprinter] . identifier[GENERIC_HASH_CLASSES] identifier[hashfuncs] =[ identifier[x] () keyword[for] identifier[x] keyword[in] identifier[hashers] ] identifier[finger] = identifier[Finger] ( identifier[hashfuncs] ,[ identifier[Range] ( literal[int] , identifier[self] . identifier[filelength] )],{ literal[string] : literal[string] }) identifier[self] . identifier[fingers] . identifier[append] ( identifier[finger] ) keyword[return] keyword[True]
def EvalGeneric(self, hashers=None): """Causes the entire file to be hashed by the given hash functions. This sets up a 'finger' for fingerprinting, where the entire file is passed through a pre-defined (or user defined) set of hash functions. Args: hashers: An iterable of hash classes (e.g. out of hashlib) which will be instantiated for use. If hashers is not provided, or is provided as 'None', the default hashers will get used. To invoke this without hashers, provide an empty list. Returns: Always True, as all files are 'generic' files. """ if hashers is None: hashers = Fingerprinter.GENERIC_HASH_CLASSES # depends on [control=['if'], data=['hashers']] hashfuncs = [x() for x in hashers] finger = Finger(hashfuncs, [Range(0, self.filelength)], {'name': 'generic'}) self.fingers.append(finger) return True
def default_subreddits(self, *args, **kwargs): """Return a get_content generator for the default subreddits. The additional parameters are passed directly into :meth:`.get_content`. Note: the `url` parameter cannot be altered. """ url = self.config['default_subreddits'] return self.get_content(url, *args, **kwargs)
def function[default_subreddits, parameter[self]]: constant[Return a get_content generator for the default subreddits. The additional parameters are passed directly into :meth:`.get_content`. Note: the `url` parameter cannot be altered. ] variable[url] assign[=] call[name[self].config][constant[default_subreddits]] return[call[name[self].get_content, parameter[name[url], <ast.Starred object at 0x7da2054a4610>]]]
keyword[def] identifier[default_subreddits] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[url] = identifier[self] . identifier[config] [ literal[string] ] keyword[return] identifier[self] . identifier[get_content] ( identifier[url] ,* identifier[args] ,** identifier[kwargs] )
def default_subreddits(self, *args, **kwargs): """Return a get_content generator for the default subreddits. The additional parameters are passed directly into :meth:`.get_content`. Note: the `url` parameter cannot be altered. """ url = self.config['default_subreddits'] return self.get_content(url, *args, **kwargs)
def set_operation_mode(self, mode): """ :param mode: a string one of self.modes() :return: nothing """ if mode == "off": desired_state = {"powered": False} else: desired_state = {"powered": True, "mode": mode} response = self.api_interface.set_device_state(self, { "desired_state": desired_state }) self._update_state_from_response(response)
def function[set_operation_mode, parameter[self, mode]]: constant[ :param mode: a string one of self.modes() :return: nothing ] if compare[name[mode] equal[==] constant[off]] begin[:] variable[desired_state] assign[=] dictionary[[<ast.Constant object at 0x7da1b2632590>], [<ast.Constant object at 0x7da1b2630a00>]] variable[response] assign[=] call[name[self].api_interface.set_device_state, parameter[name[self], dictionary[[<ast.Constant object at 0x7da1b2632e30>], [<ast.Name object at 0x7da1b2631870>]]]] call[name[self]._update_state_from_response, parameter[name[response]]]
keyword[def] identifier[set_operation_mode] ( identifier[self] , identifier[mode] ): literal[string] keyword[if] identifier[mode] == literal[string] : identifier[desired_state] ={ literal[string] : keyword[False] } keyword[else] : identifier[desired_state] ={ literal[string] : keyword[True] , literal[string] : identifier[mode] } identifier[response] = identifier[self] . identifier[api_interface] . identifier[set_device_state] ( identifier[self] ,{ literal[string] : identifier[desired_state] }) identifier[self] . identifier[_update_state_from_response] ( identifier[response] )
def set_operation_mode(self, mode): """ :param mode: a string one of self.modes() :return: nothing """ if mode == 'off': desired_state = {'powered': False} # depends on [control=['if'], data=[]] else: desired_state = {'powered': True, 'mode': mode} response = self.api_interface.set_device_state(self, {'desired_state': desired_state}) self._update_state_from_response(response)
def generate(env): """Add Builders and construction variables for compaq visual fortran to an Environment.""" fortran.generate(env) env['FORTRAN'] = 'f90' env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['OBJSUFFIX'] = '.obj' env['FORTRANMODDIR'] = '${TARGET.dir}' env['FORTRANMODDIRPREFIX'] = '/module:' env['FORTRANMODDIRSUFFIX'] = ''
def function[generate, parameter[env]]: constant[Add Builders and construction variables for compaq visual fortran to an Environment.] call[name[fortran].generate, parameter[name[env]]] call[name[env]][constant[FORTRAN]] assign[=] constant[f90] call[name[env]][constant[FORTRANCOM]] assign[=] constant[$FORTRAN $FORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}] call[name[env]][constant[FORTRANPPCOM]] assign[=] constant[$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}] call[name[env]][constant[SHFORTRANCOM]] assign[=] constant[$SHFORTRAN $SHFORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}] call[name[env]][constant[SHFORTRANPPCOM]] assign[=] constant[$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}] call[name[env]][constant[OBJSUFFIX]] assign[=] constant[.obj] call[name[env]][constant[FORTRANMODDIR]] assign[=] constant[${TARGET.dir}] call[name[env]][constant[FORTRANMODDIRPREFIX]] assign[=] constant[/module:] call[name[env]][constant[FORTRANMODDIRSUFFIX]] assign[=] constant[]
keyword[def] identifier[generate] ( identifier[env] ): literal[string] identifier[fortran] . identifier[generate] ( identifier[env] ) identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string]
def generate(env): """Add Builders and construction variables for compaq visual fortran to an Environment.""" fortran.generate(env) env['FORTRAN'] = 'f90' env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}' env['OBJSUFFIX'] = '.obj' env['FORTRANMODDIR'] = '${TARGET.dir}' env['FORTRANMODDIRPREFIX'] = '/module:' env['FORTRANMODDIRSUFFIX'] = ''
def _filter_hovered_items(self, items, event): """Filters out items that cannot be hovered :param list items: Sorted list of items beneath the cursor :param Gtk.Event event: Motion event :return: filtered items :rtype: list """ items = self._filter_library_state(items) if not items: return items top_most_item = items[0] second_top_most_item = items[1] if len(items) > 1 else None # States/Names take precedence over connections if the connections are on the same hierarchy and if there is # a port beneath the cursor first_state_v = next(filter(lambda item: isinstance(item, (NameView, StateView)), items)) first_state_v = first_state_v.parent if isinstance(first_state_v, NameView) else first_state_v if first_state_v: # There can be several connections above the state/name skip those and find the first non-connection-item for item in items: if isinstance(item, ConnectionView): # connection is on the same hierarchy level as the state/name, thus we dismiss it if self.view.canvas.get_parent(top_most_item) is not first_state_v: continue break # Connections are only dismissed, if there is a port beneath the cursor. Search for ports here: port_beneath_cursor = False state_ports = first_state_v.get_all_ports() position = self.view.get_matrix_v2i(first_state_v).transform_point(event.x, event.y) i2v_matrix = self.view.get_matrix_i2v(first_state_v) for port_v in state_ports: item_distance = port_v.port.glue(position)[1] view_distance = i2v_matrix.transform_distance(item_distance, 0)[0] if view_distance == 0: port_beneath_cursor = True break if port_beneath_cursor: items = self.dismiss_upper_items(items, item) top_most_item = items[0] second_top_most_item = items[1] if len(items) > 1 else None # NameView can only be hovered if it or its parent state is selected if isinstance(top_most_item, NameView): state_v = second_top_most_item # second item in the list must be the parent state of the NameView if state_v not in self.view.selected_items and top_most_item not in self.view.selected_items: items = items[1:] return items
def function[_filter_hovered_items, parameter[self, items, event]]: constant[Filters out items that cannot be hovered :param list items: Sorted list of items beneath the cursor :param Gtk.Event event: Motion event :return: filtered items :rtype: list ] variable[items] assign[=] call[name[self]._filter_library_state, parameter[name[items]]] if <ast.UnaryOp object at 0x7da1b1b59bd0> begin[:] return[name[items]] variable[top_most_item] assign[=] call[name[items]][constant[0]] variable[second_top_most_item] assign[=] <ast.IfExp object at 0x7da1b1b59780> variable[first_state_v] assign[=] call[name[next], parameter[call[name[filter], parameter[<ast.Lambda object at 0x7da1b1b59e40>, name[items]]]]] variable[first_state_v] assign[=] <ast.IfExp object at 0x7da1b1b5a230> if name[first_state_v] begin[:] for taget[name[item]] in starred[name[items]] begin[:] if call[name[isinstance], parameter[name[item], name[ConnectionView]]] begin[:] if compare[call[name[self].view.canvas.get_parent, parameter[name[top_most_item]]] is_not name[first_state_v]] begin[:] continue break variable[port_beneath_cursor] assign[=] constant[False] variable[state_ports] assign[=] call[name[first_state_v].get_all_ports, parameter[]] variable[position] assign[=] call[call[name[self].view.get_matrix_v2i, parameter[name[first_state_v]]].transform_point, parameter[name[event].x, name[event].y]] variable[i2v_matrix] assign[=] call[name[self].view.get_matrix_i2v, parameter[name[first_state_v]]] for taget[name[port_v]] in starred[name[state_ports]] begin[:] variable[item_distance] assign[=] call[call[name[port_v].port.glue, parameter[name[position]]]][constant[1]] variable[view_distance] assign[=] call[call[name[i2v_matrix].transform_distance, parameter[name[item_distance], constant[0]]]][constant[0]] if compare[name[view_distance] equal[==] constant[0]] begin[:] variable[port_beneath_cursor] assign[=] constant[True] break if name[port_beneath_cursor] begin[:] variable[items] assign[=] call[name[self].dismiss_upper_items, parameter[name[items], name[item]]] variable[top_most_item] assign[=] call[name[items]][constant[0]] variable[second_top_most_item] assign[=] <ast.IfExp object at 0x7da1b1aa78b0> if call[name[isinstance], parameter[name[top_most_item], name[NameView]]] begin[:] variable[state_v] assign[=] name[second_top_most_item] if <ast.BoolOp object at 0x7da18bc713c0> begin[:] variable[items] assign[=] call[name[items]][<ast.Slice object at 0x7da18bc71240>] return[name[items]]
keyword[def] identifier[_filter_hovered_items] ( identifier[self] , identifier[items] , identifier[event] ): literal[string] identifier[items] = identifier[self] . identifier[_filter_library_state] ( identifier[items] ) keyword[if] keyword[not] identifier[items] : keyword[return] identifier[items] identifier[top_most_item] = identifier[items] [ literal[int] ] identifier[second_top_most_item] = identifier[items] [ literal[int] ] keyword[if] identifier[len] ( identifier[items] )> literal[int] keyword[else] keyword[None] identifier[first_state_v] = identifier[next] ( identifier[filter] ( keyword[lambda] identifier[item] : identifier[isinstance] ( identifier[item] ,( identifier[NameView] , identifier[StateView] )), identifier[items] )) identifier[first_state_v] = identifier[first_state_v] . identifier[parent] keyword[if] identifier[isinstance] ( identifier[first_state_v] , identifier[NameView] ) keyword[else] identifier[first_state_v] keyword[if] identifier[first_state_v] : keyword[for] identifier[item] keyword[in] identifier[items] : keyword[if] identifier[isinstance] ( identifier[item] , identifier[ConnectionView] ): keyword[if] identifier[self] . identifier[view] . identifier[canvas] . identifier[get_parent] ( identifier[top_most_item] ) keyword[is] keyword[not] identifier[first_state_v] : keyword[continue] keyword[break] identifier[port_beneath_cursor] = keyword[False] identifier[state_ports] = identifier[first_state_v] . identifier[get_all_ports] () identifier[position] = identifier[self] . identifier[view] . identifier[get_matrix_v2i] ( identifier[first_state_v] ). identifier[transform_point] ( identifier[event] . identifier[x] , identifier[event] . identifier[y] ) identifier[i2v_matrix] = identifier[self] . identifier[view] . identifier[get_matrix_i2v] ( identifier[first_state_v] ) keyword[for] identifier[port_v] keyword[in] identifier[state_ports] : identifier[item_distance] = identifier[port_v] . identifier[port] . identifier[glue] ( identifier[position] )[ literal[int] ] identifier[view_distance] = identifier[i2v_matrix] . identifier[transform_distance] ( identifier[item_distance] , literal[int] )[ literal[int] ] keyword[if] identifier[view_distance] == literal[int] : identifier[port_beneath_cursor] = keyword[True] keyword[break] keyword[if] identifier[port_beneath_cursor] : identifier[items] = identifier[self] . identifier[dismiss_upper_items] ( identifier[items] , identifier[item] ) identifier[top_most_item] = identifier[items] [ literal[int] ] identifier[second_top_most_item] = identifier[items] [ literal[int] ] keyword[if] identifier[len] ( identifier[items] )> literal[int] keyword[else] keyword[None] keyword[if] identifier[isinstance] ( identifier[top_most_item] , identifier[NameView] ): identifier[state_v] = identifier[second_top_most_item] keyword[if] identifier[state_v] keyword[not] keyword[in] identifier[self] . identifier[view] . identifier[selected_items] keyword[and] identifier[top_most_item] keyword[not] keyword[in] identifier[self] . identifier[view] . identifier[selected_items] : identifier[items] = identifier[items] [ literal[int] :] keyword[return] identifier[items]
def _filter_hovered_items(self, items, event): """Filters out items that cannot be hovered :param list items: Sorted list of items beneath the cursor :param Gtk.Event event: Motion event :return: filtered items :rtype: list """ items = self._filter_library_state(items) if not items: return items # depends on [control=['if'], data=[]] top_most_item = items[0] second_top_most_item = items[1] if len(items) > 1 else None # States/Names take precedence over connections if the connections are on the same hierarchy and if there is # a port beneath the cursor first_state_v = next(filter(lambda item: isinstance(item, (NameView, StateView)), items)) first_state_v = first_state_v.parent if isinstance(first_state_v, NameView) else first_state_v if first_state_v: # There can be several connections above the state/name skip those and find the first non-connection-item for item in items: if isinstance(item, ConnectionView): # connection is on the same hierarchy level as the state/name, thus we dismiss it if self.view.canvas.get_parent(top_most_item) is not first_state_v: continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] break # depends on [control=['for'], data=['item']] # Connections are only dismissed, if there is a port beneath the cursor. Search for ports here: port_beneath_cursor = False state_ports = first_state_v.get_all_ports() position = self.view.get_matrix_v2i(first_state_v).transform_point(event.x, event.y) i2v_matrix = self.view.get_matrix_i2v(first_state_v) for port_v in state_ports: item_distance = port_v.port.glue(position)[1] view_distance = i2v_matrix.transform_distance(item_distance, 0)[0] if view_distance == 0: port_beneath_cursor = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['port_v']] if port_beneath_cursor: items = self.dismiss_upper_items(items, item) top_most_item = items[0] second_top_most_item = items[1] if len(items) > 1 else None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # NameView can only be hovered if it or its parent state is selected if isinstance(top_most_item, NameView): state_v = second_top_most_item # second item in the list must be the parent state of the NameView if state_v not in self.view.selected_items and top_most_item not in self.view.selected_items: items = items[1:] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return items
def map_callback(self, obj): '''called when an event happens on the slipmap''' from MAVProxy.modules.mavproxy_map import mp_slipmap if isinstance(obj, mp_slipmap.SlipMenuEvent): self.handle_menu_event(obj) return if not isinstance(obj, mp_slipmap.SlipMouseEvent): return if obj.event.leftIsDown and self.moving_rally is not None: self.click_position = obj.latlon self.click_time = time.time() self.mpstate.functions.process_stdin("rally move %u" % self.moving_rally) self.moving_rally = None return if obj.event.rightIsDown and self.moving_rally is not None: print("Cancelled rally move") self.moving_rally = None return if obj.event.leftIsDown and self.moving_wp is not None: self.click_position = obj.latlon self.click_time = time.time() self.mpstate.functions.process_stdin("wp move %u" % self.moving_wp) self.moving_wp = None return if obj.event.leftIsDown and self.moving_fencepoint is not None: self.click_position = obj.latlon self.click_time = time.time() self.mpstate.functions.process_stdin("fence move %u" % (self.moving_fencepoint+1)) self.moving_fencepoint = None return if obj.event.rightIsDown and self.moving_wp is not None: print("Cancelled wp move") self.moving_wp = None return if obj.event.rightIsDown and self.moving_fencepoint is not None: print("Cancelled fence move") self.moving_fencepoint = None return elif obj.event.leftIsDown: if time.time() - self.click_time > 0.1: self.click_position = obj.latlon self.click_time = time.time() self.drawing_update() if self.module('misseditor') is not None: self.module('misseditor').update_map_click_position(self.click_position) if obj.event.rightIsDown: if self.draw_callback is not None: self.drawing_end() return if time.time() - self.click_time > 0.1: self.click_position = obj.latlon self.click_time = time.time()
def function[map_callback, parameter[self, obj]]: constant[called when an event happens on the slipmap] from relative_module[MAVProxy.modules.mavproxy_map] import module[mp_slipmap] if call[name[isinstance], parameter[name[obj], name[mp_slipmap].SlipMenuEvent]] begin[:] call[name[self].handle_menu_event, parameter[name[obj]]] return[None] if <ast.UnaryOp object at 0x7da204345450> begin[:] return[None] if <ast.BoolOp object at 0x7da204347400> begin[:] name[self].click_position assign[=] name[obj].latlon name[self].click_time assign[=] call[name[time].time, parameter[]] call[name[self].mpstate.functions.process_stdin, parameter[binary_operation[constant[rally move %u] <ast.Mod object at 0x7da2590d6920> name[self].moving_rally]]] name[self].moving_rally assign[=] constant[None] return[None] if <ast.BoolOp object at 0x7da1b17df670> begin[:] call[name[print], parameter[constant[Cancelled rally move]]] name[self].moving_rally assign[=] constant[None] return[None] if <ast.BoolOp object at 0x7da1b17df490> begin[:] name[self].click_position assign[=] name[obj].latlon name[self].click_time assign[=] call[name[time].time, parameter[]] call[name[self].mpstate.functions.process_stdin, parameter[binary_operation[constant[wp move %u] <ast.Mod object at 0x7da2590d6920> name[self].moving_wp]]] name[self].moving_wp assign[=] constant[None] return[None] if <ast.BoolOp object at 0x7da1b17df2e0> begin[:] name[self].click_position assign[=] name[obj].latlon name[self].click_time assign[=] call[name[time].time, parameter[]] call[name[self].mpstate.functions.process_stdin, parameter[binary_operation[constant[fence move %u] <ast.Mod object at 0x7da2590d6920> binary_operation[name[self].moving_fencepoint + constant[1]]]]] name[self].moving_fencepoint assign[=] constant[None] return[None] if <ast.BoolOp object at 0x7da1b17dfd00> begin[:] call[name[print], parameter[constant[Cancelled wp move]]] name[self].moving_wp assign[=] constant[None] return[None] if <ast.BoolOp object at 0x7da1b17ddd80> begin[:] call[name[print], parameter[constant[Cancelled fence move]]] name[self].moving_fencepoint assign[=] constant[None] return[None] if name[obj].event.rightIsDown begin[:] if compare[name[self].draw_callback is_not constant[None]] begin[:] call[name[self].drawing_end, parameter[]] return[None] if compare[binary_operation[call[name[time].time, parameter[]] - name[self].click_time] greater[>] constant[0.1]] begin[:] name[self].click_position assign[=] name[obj].latlon name[self].click_time assign[=] call[name[time].time, parameter[]]
keyword[def] identifier[map_callback] ( identifier[self] , identifier[obj] ): literal[string] keyword[from] identifier[MAVProxy] . identifier[modules] . identifier[mavproxy_map] keyword[import] identifier[mp_slipmap] keyword[if] identifier[isinstance] ( identifier[obj] , identifier[mp_slipmap] . identifier[SlipMenuEvent] ): identifier[self] . identifier[handle_menu_event] ( identifier[obj] ) keyword[return] keyword[if] keyword[not] identifier[isinstance] ( identifier[obj] , identifier[mp_slipmap] . identifier[SlipMouseEvent] ): keyword[return] keyword[if] identifier[obj] . identifier[event] . identifier[leftIsDown] keyword[and] identifier[self] . identifier[moving_rally] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[click_position] = identifier[obj] . identifier[latlon] identifier[self] . identifier[click_time] = identifier[time] . identifier[time] () identifier[self] . identifier[mpstate] . identifier[functions] . identifier[process_stdin] ( literal[string] % identifier[self] . identifier[moving_rally] ) identifier[self] . identifier[moving_rally] = keyword[None] keyword[return] keyword[if] identifier[obj] . identifier[event] . identifier[rightIsDown] keyword[and] identifier[self] . identifier[moving_rally] keyword[is] keyword[not] keyword[None] : identifier[print] ( literal[string] ) identifier[self] . identifier[moving_rally] = keyword[None] keyword[return] keyword[if] identifier[obj] . identifier[event] . identifier[leftIsDown] keyword[and] identifier[self] . identifier[moving_wp] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[click_position] = identifier[obj] . identifier[latlon] identifier[self] . identifier[click_time] = identifier[time] . identifier[time] () identifier[self] . identifier[mpstate] . identifier[functions] . identifier[process_stdin] ( literal[string] % identifier[self] . identifier[moving_wp] ) identifier[self] . identifier[moving_wp] = keyword[None] keyword[return] keyword[if] identifier[obj] . identifier[event] . identifier[leftIsDown] keyword[and] identifier[self] . identifier[moving_fencepoint] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[click_position] = identifier[obj] . identifier[latlon] identifier[self] . identifier[click_time] = identifier[time] . identifier[time] () identifier[self] . identifier[mpstate] . identifier[functions] . identifier[process_stdin] ( literal[string] %( identifier[self] . identifier[moving_fencepoint] + literal[int] )) identifier[self] . identifier[moving_fencepoint] = keyword[None] keyword[return] keyword[if] identifier[obj] . identifier[event] . identifier[rightIsDown] keyword[and] identifier[self] . identifier[moving_wp] keyword[is] keyword[not] keyword[None] : identifier[print] ( literal[string] ) identifier[self] . identifier[moving_wp] = keyword[None] keyword[return] keyword[if] identifier[obj] . identifier[event] . identifier[rightIsDown] keyword[and] identifier[self] . identifier[moving_fencepoint] keyword[is] keyword[not] keyword[None] : identifier[print] ( literal[string] ) identifier[self] . identifier[moving_fencepoint] = keyword[None] keyword[return] keyword[elif] identifier[obj] . identifier[event] . identifier[leftIsDown] : keyword[if] identifier[time] . identifier[time] ()- identifier[self] . identifier[click_time] > literal[int] : identifier[self] . identifier[click_position] = identifier[obj] . identifier[latlon] identifier[self] . identifier[click_time] = identifier[time] . identifier[time] () identifier[self] . identifier[drawing_update] () keyword[if] identifier[self] . identifier[module] ( literal[string] ) keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[module] ( literal[string] ). identifier[update_map_click_position] ( identifier[self] . identifier[click_position] ) keyword[if] identifier[obj] . identifier[event] . identifier[rightIsDown] : keyword[if] identifier[self] . identifier[draw_callback] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[drawing_end] () keyword[return] keyword[if] identifier[time] . identifier[time] ()- identifier[self] . identifier[click_time] > literal[int] : identifier[self] . identifier[click_position] = identifier[obj] . identifier[latlon] identifier[self] . identifier[click_time] = identifier[time] . identifier[time] ()
def map_callback(self, obj): """called when an event happens on the slipmap""" from MAVProxy.modules.mavproxy_map import mp_slipmap if isinstance(obj, mp_slipmap.SlipMenuEvent): self.handle_menu_event(obj) return # depends on [control=['if'], data=[]] if not isinstance(obj, mp_slipmap.SlipMouseEvent): return # depends on [control=['if'], data=[]] if obj.event.leftIsDown and self.moving_rally is not None: self.click_position = obj.latlon self.click_time = time.time() self.mpstate.functions.process_stdin('rally move %u' % self.moving_rally) self.moving_rally = None return # depends on [control=['if'], data=[]] if obj.event.rightIsDown and self.moving_rally is not None: print('Cancelled rally move') self.moving_rally = None return # depends on [control=['if'], data=[]] if obj.event.leftIsDown and self.moving_wp is not None: self.click_position = obj.latlon self.click_time = time.time() self.mpstate.functions.process_stdin('wp move %u' % self.moving_wp) self.moving_wp = None return # depends on [control=['if'], data=[]] if obj.event.leftIsDown and self.moving_fencepoint is not None: self.click_position = obj.latlon self.click_time = time.time() self.mpstate.functions.process_stdin('fence move %u' % (self.moving_fencepoint + 1)) self.moving_fencepoint = None return # depends on [control=['if'], data=[]] if obj.event.rightIsDown and self.moving_wp is not None: print('Cancelled wp move') self.moving_wp = None return # depends on [control=['if'], data=[]] if obj.event.rightIsDown and self.moving_fencepoint is not None: print('Cancelled fence move') self.moving_fencepoint = None return # depends on [control=['if'], data=[]] elif obj.event.leftIsDown: if time.time() - self.click_time > 0.1: self.click_position = obj.latlon self.click_time = time.time() self.drawing_update() # depends on [control=['if'], data=[]] if self.module('misseditor') is not None: self.module('misseditor').update_map_click_position(self.click_position) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if obj.event.rightIsDown: if self.draw_callback is not None: self.drawing_end() return # depends on [control=['if'], data=[]] if time.time() - self.click_time > 0.1: self.click_position = obj.latlon self.click_time = time.time() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def present(name, **kwargs): ''' Creates new user group. NOTE: This function accepts all standard user group properties: keyword argument names differ depending on your zabbix version, see: https://www.zabbix.com/documentation/2.0/manual/appendix/api/usergroup/definitions#user_group .. versionadded:: 2016.3.0 :param name: name of the user group :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) .. code-block:: yaml make_new_thai_monks_usergroup: zabbix_usergroup.present: - name: 'Thai monks' - gui_access: 1 - debug_mode: 0 - users_status: 0 ''' connection_args = {} if '_connection_user' in kwargs: connection_args['_connection_user'] = kwargs['_connection_user'] if '_connection_password' in kwargs: connection_args['_connection_password'] = kwargs['_connection_password'] if '_connection_url' in kwargs: connection_args['_connection_url'] = kwargs['_connection_url'] ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} # Comment and change messages comment_usergroup_created = 'User group {0} created.'.format(name) comment_usergroup_updated = 'User group {0} updated.'.format(name) comment_usergroup_notcreated = 'Unable to create user group: {0}. '.format(name) comment_usergroup_exists = 'User group {0} already exists.'.format(name) changes_usergroup_created = {name: {'old': 'User group {0} does not exist.'.format(name), 'new': 'User group {0} created.'.format(name), } } usergroup_exists = __salt__['zabbix.usergroup_exists'](name, **connection_args) if usergroup_exists: usergroup = __salt__['zabbix.usergroup_get'](name, **connection_args)[0] usrgrpid = int(usergroup['usrgrpid']) update_debug_mode = False update_gui_access = False update_users_status = False update_rights = False if 'debug_mode' in kwargs: if int(kwargs['debug_mode']) != int(usergroup['debug_mode']): update_debug_mode = True if 'gui_access' in kwargs: if int(kwargs['gui_access']) != int(usergroup['gui_access']): update_gui_access = True if 'rights' in kwargs: # Older versions of Zabbix do not return the list of rights for the user group, handle this gracefully try: if usergroup['rights']: # Make sure right values are strings so we can compare them with the current user group rights for right in kwargs['rights']: for key in right: right[key] = six.text_type(right[key]) if sorted(kwargs['rights']) != sorted(usergroup['rights']): update_rights = True else: update_rights = True except KeyError: # As we don't know the current permissions, overwrite them as provided in the state. update_rights = True if 'users_status' in kwargs: if int(kwargs['users_status']) != int(usergroup['users_status']): update_users_status = True # Dry run, test=true mode if __opts__['test']: if usergroup_exists: if update_debug_mode or update_gui_access or update_rights or update_users_status: ret['result'] = None ret['comment'] = comment_usergroup_updated else: ret['result'] = True ret['comment'] = comment_usergroup_exists else: ret['result'] = None ret['comment'] = comment_usergroup_created return ret error = [] if usergroup_exists: if update_debug_mode or update_gui_access or update_rights or update_users_status: ret['result'] = True ret['comment'] = comment_usergroup_updated if update_debug_mode: updated_debug = __salt__['zabbix.usergroup_update'](usrgrpid, debug_mode=kwargs['debug_mode'], **connection_args) if 'error' in updated_debug: error.append(updated_debug['error']) else: ret['changes']['debug_mode'] = kwargs['debug_mode'] if update_gui_access: updated_gui = __salt__['zabbix.usergroup_update'](usrgrpid, gui_access=kwargs['gui_access'], **connection_args) if 'error' in updated_gui: error.append(updated_gui['error']) else: ret['changes']['gui_access'] = kwargs['gui_access'] if update_rights: updated_rights = __salt__['zabbix.usergroup_update'](usrgrpid, rights=kwargs['rights'], **connection_args) if 'error' in updated_rights: error.append(updated_rights['error']) else: ret['changes']['rights'] = kwargs['rights'] if update_users_status: updated_status = __salt__['zabbix.usergroup_update'](usrgrpid, users_status=kwargs['users_status'], **connection_args) if 'error' in updated_status: error.append(updated_status['error']) else: ret['changes']['users_status'] = kwargs['users_status'] else: ret['result'] = True ret['comment'] = comment_usergroup_exists else: usergroup_create = __salt__['zabbix.usergroup_create'](name, **kwargs) if 'error' not in usergroup_create: ret['result'] = True ret['comment'] = comment_usergroup_created ret['changes'] = changes_usergroup_created else: ret['result'] = False ret['comment'] = comment_usergroup_notcreated + six.text_type(usergroup_create['error']) # error detected if error: ret['changes'] = {} ret['result'] = False ret['comment'] = six.text_type(error) return ret
def function[present, parameter[name]]: constant[ Creates new user group. NOTE: This function accepts all standard user group properties: keyword argument names differ depending on your zabbix version, see: https://www.zabbix.com/documentation/2.0/manual/appendix/api/usergroup/definitions#user_group .. versionadded:: 2016.3.0 :param name: name of the user group :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) .. code-block:: yaml make_new_thai_monks_usergroup: zabbix_usergroup.present: - name: 'Thai monks' - gui_access: 1 - debug_mode: 0 - users_status: 0 ] variable[connection_args] assign[=] dictionary[[], []] if compare[constant[_connection_user] in name[kwargs]] begin[:] call[name[connection_args]][constant[_connection_user]] assign[=] call[name[kwargs]][constant[_connection_user]] if compare[constant[_connection_password] in name[kwargs]] begin[:] call[name[connection_args]][constant[_connection_password]] assign[=] call[name[kwargs]][constant[_connection_password]] if compare[constant[_connection_url] in name[kwargs]] begin[:] call[name[connection_args]][constant[_connection_url]] assign[=] call[name[kwargs]][constant[_connection_url]] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da18ede4580>, <ast.Constant object at 0x7da18ede6470>, <ast.Constant object at 0x7da18ede4700>, <ast.Constant object at 0x7da18ede4850>], [<ast.Name object at 0x7da18ede5d50>, <ast.Dict object at 0x7da18ede7760>, <ast.Constant object at 0x7da18ede7370>, <ast.Constant object at 0x7da18ede6920>]] variable[comment_usergroup_created] assign[=] call[constant[User group {0} created.].format, parameter[name[name]]] variable[comment_usergroup_updated] assign[=] call[constant[User group {0} updated.].format, parameter[name[name]]] variable[comment_usergroup_notcreated] assign[=] call[constant[Unable to create user group: {0}. ].format, parameter[name[name]]] variable[comment_usergroup_exists] assign[=] call[constant[User group {0} already exists.].format, parameter[name[name]]] variable[changes_usergroup_created] assign[=] dictionary[[<ast.Name object at 0x7da18ede6b30>], [<ast.Dict object at 0x7da18ede41c0>]] variable[usergroup_exists] assign[=] call[call[name[__salt__]][constant[zabbix.usergroup_exists]], parameter[name[name]]] if name[usergroup_exists] begin[:] variable[usergroup] assign[=] call[call[call[name[__salt__]][constant[zabbix.usergroup_get]], parameter[name[name]]]][constant[0]] variable[usrgrpid] assign[=] call[name[int], parameter[call[name[usergroup]][constant[usrgrpid]]]] variable[update_debug_mode] assign[=] constant[False] variable[update_gui_access] assign[=] constant[False] variable[update_users_status] assign[=] constant[False] variable[update_rights] assign[=] constant[False] if compare[constant[debug_mode] in name[kwargs]] begin[:] if compare[call[name[int], parameter[call[name[kwargs]][constant[debug_mode]]]] not_equal[!=] call[name[int], parameter[call[name[usergroup]][constant[debug_mode]]]]] begin[:] variable[update_debug_mode] assign[=] constant[True] if compare[constant[gui_access] in name[kwargs]] begin[:] if compare[call[name[int], parameter[call[name[kwargs]][constant[gui_access]]]] not_equal[!=] call[name[int], parameter[call[name[usergroup]][constant[gui_access]]]]] begin[:] variable[update_gui_access] assign[=] constant[True] if compare[constant[rights] in name[kwargs]] begin[:] <ast.Try object at 0x7da18ede4ee0> if compare[constant[users_status] in name[kwargs]] begin[:] if compare[call[name[int], parameter[call[name[kwargs]][constant[users_status]]]] not_equal[!=] call[name[int], parameter[call[name[usergroup]][constant[users_status]]]]] begin[:] variable[update_users_status] assign[=] constant[True] if call[name[__opts__]][constant[test]] begin[:] if name[usergroup_exists] begin[:] if <ast.BoolOp object at 0x7da18ede7970> begin[:] call[name[ret]][constant[result]] assign[=] constant[None] call[name[ret]][constant[comment]] assign[=] name[comment_usergroup_updated] return[name[ret]] variable[error] assign[=] list[[]] if name[usergroup_exists] begin[:] if <ast.BoolOp object at 0x7da1b2007a00> begin[:] call[name[ret]][constant[result]] assign[=] constant[True] call[name[ret]][constant[comment]] assign[=] name[comment_usergroup_updated] if name[update_debug_mode] begin[:] variable[updated_debug] assign[=] call[call[name[__salt__]][constant[zabbix.usergroup_update]], parameter[name[usrgrpid]]] if compare[constant[error] in name[updated_debug]] begin[:] call[name[error].append, parameter[call[name[updated_debug]][constant[error]]]] if name[update_gui_access] begin[:] variable[updated_gui] assign[=] call[call[name[__salt__]][constant[zabbix.usergroup_update]], parameter[name[usrgrpid]]] if compare[constant[error] in name[updated_gui]] begin[:] call[name[error].append, parameter[call[name[updated_gui]][constant[error]]]] if name[update_rights] begin[:] variable[updated_rights] assign[=] call[call[name[__salt__]][constant[zabbix.usergroup_update]], parameter[name[usrgrpid]]] if compare[constant[error] in name[updated_rights]] begin[:] call[name[error].append, parameter[call[name[updated_rights]][constant[error]]]] if name[update_users_status] begin[:] variable[updated_status] assign[=] call[call[name[__salt__]][constant[zabbix.usergroup_update]], parameter[name[usrgrpid]]] if compare[constant[error] in name[updated_status]] begin[:] call[name[error].append, parameter[call[name[updated_status]][constant[error]]]] if name[error] begin[:] call[name[ret]][constant[changes]] assign[=] dictionary[[], []] call[name[ret]][constant[result]] assign[=] constant[False] call[name[ret]][constant[comment]] assign[=] call[name[six].text_type, parameter[name[error]]] return[name[ret]]
keyword[def] identifier[present] ( identifier[name] ,** identifier[kwargs] ): literal[string] identifier[connection_args] ={} keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[connection_args] [ literal[string] ]= identifier[kwargs] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[connection_args] [ literal[string] ]= identifier[kwargs] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[connection_args] [ literal[string] ]= identifier[kwargs] [ literal[string] ] identifier[ret] ={ literal[string] : identifier[name] , literal[string] :{}, literal[string] : keyword[False] , literal[string] : literal[string] } identifier[comment_usergroup_created] = literal[string] . identifier[format] ( identifier[name] ) identifier[comment_usergroup_updated] = literal[string] . identifier[format] ( identifier[name] ) identifier[comment_usergroup_notcreated] = literal[string] . identifier[format] ( identifier[name] ) identifier[comment_usergroup_exists] = literal[string] . identifier[format] ( identifier[name] ) identifier[changes_usergroup_created] ={ identifier[name] :{ literal[string] : literal[string] . identifier[format] ( identifier[name] ), literal[string] : literal[string] . identifier[format] ( identifier[name] ), } } identifier[usergroup_exists] = identifier[__salt__] [ literal[string] ]( identifier[name] ,** identifier[connection_args] ) keyword[if] identifier[usergroup_exists] : identifier[usergroup] = identifier[__salt__] [ literal[string] ]( identifier[name] ,** identifier[connection_args] )[ literal[int] ] identifier[usrgrpid] = identifier[int] ( identifier[usergroup] [ literal[string] ]) identifier[update_debug_mode] = keyword[False] identifier[update_gui_access] = keyword[False] identifier[update_users_status] = keyword[False] identifier[update_rights] = keyword[False] keyword[if] literal[string] keyword[in] identifier[kwargs] : keyword[if] identifier[int] ( identifier[kwargs] [ literal[string] ])!= identifier[int] ( identifier[usergroup] [ literal[string] ]): identifier[update_debug_mode] = keyword[True] keyword[if] literal[string] keyword[in] identifier[kwargs] : keyword[if] identifier[int] ( identifier[kwargs] [ literal[string] ])!= identifier[int] ( identifier[usergroup] [ literal[string] ]): identifier[update_gui_access] = keyword[True] keyword[if] literal[string] keyword[in] identifier[kwargs] : keyword[try] : keyword[if] identifier[usergroup] [ literal[string] ]: keyword[for] identifier[right] keyword[in] identifier[kwargs] [ literal[string] ]: keyword[for] identifier[key] keyword[in] identifier[right] : identifier[right] [ identifier[key] ]= identifier[six] . identifier[text_type] ( identifier[right] [ identifier[key] ]) keyword[if] identifier[sorted] ( identifier[kwargs] [ literal[string] ])!= identifier[sorted] ( identifier[usergroup] [ literal[string] ]): identifier[update_rights] = keyword[True] keyword[else] : identifier[update_rights] = keyword[True] keyword[except] identifier[KeyError] : identifier[update_rights] = keyword[True] keyword[if] literal[string] keyword[in] identifier[kwargs] : keyword[if] identifier[int] ( identifier[kwargs] [ literal[string] ])!= identifier[int] ( identifier[usergroup] [ literal[string] ]): identifier[update_users_status] = keyword[True] keyword[if] identifier[__opts__] [ literal[string] ]: keyword[if] identifier[usergroup_exists] : keyword[if] identifier[update_debug_mode] keyword[or] identifier[update_gui_access] keyword[or] identifier[update_rights] keyword[or] identifier[update_users_status] : identifier[ret] [ literal[string] ]= keyword[None] identifier[ret] [ literal[string] ]= identifier[comment_usergroup_updated] keyword[else] : identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= identifier[comment_usergroup_exists] keyword[else] : identifier[ret] [ literal[string] ]= keyword[None] identifier[ret] [ literal[string] ]= identifier[comment_usergroup_created] keyword[return] identifier[ret] identifier[error] =[] keyword[if] identifier[usergroup_exists] : keyword[if] identifier[update_debug_mode] keyword[or] identifier[update_gui_access] keyword[or] identifier[update_rights] keyword[or] identifier[update_users_status] : identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= identifier[comment_usergroup_updated] keyword[if] identifier[update_debug_mode] : identifier[updated_debug] = identifier[__salt__] [ literal[string] ]( identifier[usrgrpid] , identifier[debug_mode] = identifier[kwargs] [ literal[string] ], ** identifier[connection_args] ) keyword[if] literal[string] keyword[in] identifier[updated_debug] : identifier[error] . identifier[append] ( identifier[updated_debug] [ literal[string] ]) keyword[else] : identifier[ret] [ literal[string] ][ literal[string] ]= identifier[kwargs] [ literal[string] ] keyword[if] identifier[update_gui_access] : identifier[updated_gui] = identifier[__salt__] [ literal[string] ]( identifier[usrgrpid] , identifier[gui_access] = identifier[kwargs] [ literal[string] ], ** identifier[connection_args] ) keyword[if] literal[string] keyword[in] identifier[updated_gui] : identifier[error] . identifier[append] ( identifier[updated_gui] [ literal[string] ]) keyword[else] : identifier[ret] [ literal[string] ][ literal[string] ]= identifier[kwargs] [ literal[string] ] keyword[if] identifier[update_rights] : identifier[updated_rights] = identifier[__salt__] [ literal[string] ]( identifier[usrgrpid] , identifier[rights] = identifier[kwargs] [ literal[string] ], ** identifier[connection_args] ) keyword[if] literal[string] keyword[in] identifier[updated_rights] : identifier[error] . identifier[append] ( identifier[updated_rights] [ literal[string] ]) keyword[else] : identifier[ret] [ literal[string] ][ literal[string] ]= identifier[kwargs] [ literal[string] ] keyword[if] identifier[update_users_status] : identifier[updated_status] = identifier[__salt__] [ literal[string] ]( identifier[usrgrpid] , identifier[users_status] = identifier[kwargs] [ literal[string] ], ** identifier[connection_args] ) keyword[if] literal[string] keyword[in] identifier[updated_status] : identifier[error] . identifier[append] ( identifier[updated_status] [ literal[string] ]) keyword[else] : identifier[ret] [ literal[string] ][ literal[string] ]= identifier[kwargs] [ literal[string] ] keyword[else] : identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= identifier[comment_usergroup_exists] keyword[else] : identifier[usergroup_create] = identifier[__salt__] [ literal[string] ]( identifier[name] ,** identifier[kwargs] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[usergroup_create] : identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]= identifier[comment_usergroup_created] identifier[ret] [ literal[string] ]= identifier[changes_usergroup_created] keyword[else] : identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= identifier[comment_usergroup_notcreated] + identifier[six] . identifier[text_type] ( identifier[usergroup_create] [ literal[string] ]) keyword[if] identifier[error] : identifier[ret] [ literal[string] ]={} identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= identifier[six] . identifier[text_type] ( identifier[error] ) keyword[return] identifier[ret]
def present(name, **kwargs): """ Creates new user group. NOTE: This function accepts all standard user group properties: keyword argument names differ depending on your zabbix version, see: https://www.zabbix.com/documentation/2.0/manual/appendix/api/usergroup/definitions#user_group .. versionadded:: 2016.3.0 :param name: name of the user group :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) .. code-block:: yaml make_new_thai_monks_usergroup: zabbix_usergroup.present: - name: 'Thai monks' - gui_access: 1 - debug_mode: 0 - users_status: 0 """ connection_args = {} if '_connection_user' in kwargs: connection_args['_connection_user'] = kwargs['_connection_user'] # depends on [control=['if'], data=['kwargs']] if '_connection_password' in kwargs: connection_args['_connection_password'] = kwargs['_connection_password'] # depends on [control=['if'], data=['kwargs']] if '_connection_url' in kwargs: connection_args['_connection_url'] = kwargs['_connection_url'] # depends on [control=['if'], data=['kwargs']] ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} # Comment and change messages comment_usergroup_created = 'User group {0} created.'.format(name) comment_usergroup_updated = 'User group {0} updated.'.format(name) comment_usergroup_notcreated = 'Unable to create user group: {0}. '.format(name) comment_usergroup_exists = 'User group {0} already exists.'.format(name) changes_usergroup_created = {name: {'old': 'User group {0} does not exist.'.format(name), 'new': 'User group {0} created.'.format(name)}} usergroup_exists = __salt__['zabbix.usergroup_exists'](name, **connection_args) if usergroup_exists: usergroup = __salt__['zabbix.usergroup_get'](name, **connection_args)[0] usrgrpid = int(usergroup['usrgrpid']) update_debug_mode = False update_gui_access = False update_users_status = False update_rights = False if 'debug_mode' in kwargs: if int(kwargs['debug_mode']) != int(usergroup['debug_mode']): update_debug_mode = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['kwargs']] if 'gui_access' in kwargs: if int(kwargs['gui_access']) != int(usergroup['gui_access']): update_gui_access = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['kwargs']] if 'rights' in kwargs: # Older versions of Zabbix do not return the list of rights for the user group, handle this gracefully try: if usergroup['rights']: # Make sure right values are strings so we can compare them with the current user group rights for right in kwargs['rights']: for key in right: right[key] = six.text_type(right[key]) # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=['right']] if sorted(kwargs['rights']) != sorted(usergroup['rights']): update_rights = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: update_rights = True # depends on [control=['try'], data=[]] except KeyError: # As we don't know the current permissions, overwrite them as provided in the state. update_rights = True # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['kwargs']] if 'users_status' in kwargs: if int(kwargs['users_status']) != int(usergroup['users_status']): update_users_status = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['kwargs']] # depends on [control=['if'], data=[]] # Dry run, test=true mode if __opts__['test']: if usergroup_exists: if update_debug_mode or update_gui_access or update_rights or update_users_status: ret['result'] = None ret['comment'] = comment_usergroup_updated # depends on [control=['if'], data=[]] else: ret['result'] = True ret['comment'] = comment_usergroup_exists # depends on [control=['if'], data=[]] else: ret['result'] = None ret['comment'] = comment_usergroup_created return ret # depends on [control=['if'], data=[]] error = [] if usergroup_exists: if update_debug_mode or update_gui_access or update_rights or update_users_status: ret['result'] = True ret['comment'] = comment_usergroup_updated if update_debug_mode: updated_debug = __salt__['zabbix.usergroup_update'](usrgrpid, debug_mode=kwargs['debug_mode'], **connection_args) if 'error' in updated_debug: error.append(updated_debug['error']) # depends on [control=['if'], data=['updated_debug']] else: ret['changes']['debug_mode'] = kwargs['debug_mode'] # depends on [control=['if'], data=[]] if update_gui_access: updated_gui = __salt__['zabbix.usergroup_update'](usrgrpid, gui_access=kwargs['gui_access'], **connection_args) if 'error' in updated_gui: error.append(updated_gui['error']) # depends on [control=['if'], data=['updated_gui']] else: ret['changes']['gui_access'] = kwargs['gui_access'] # depends on [control=['if'], data=[]] if update_rights: updated_rights = __salt__['zabbix.usergroup_update'](usrgrpid, rights=kwargs['rights'], **connection_args) if 'error' in updated_rights: error.append(updated_rights['error']) # depends on [control=['if'], data=['updated_rights']] else: ret['changes']['rights'] = kwargs['rights'] # depends on [control=['if'], data=[]] if update_users_status: updated_status = __salt__['zabbix.usergroup_update'](usrgrpid, users_status=kwargs['users_status'], **connection_args) if 'error' in updated_status: error.append(updated_status['error']) # depends on [control=['if'], data=['updated_status']] else: ret['changes']['users_status'] = kwargs['users_status'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: ret['result'] = True ret['comment'] = comment_usergroup_exists # depends on [control=['if'], data=[]] else: usergroup_create = __salt__['zabbix.usergroup_create'](name, **kwargs) if 'error' not in usergroup_create: ret['result'] = True ret['comment'] = comment_usergroup_created ret['changes'] = changes_usergroup_created # depends on [control=['if'], data=[]] else: ret['result'] = False ret['comment'] = comment_usergroup_notcreated + six.text_type(usergroup_create['error']) # error detected if error: ret['changes'] = {} ret['result'] = False ret['comment'] = six.text_type(error) # depends on [control=['if'], data=[]] return ret
def _main(self, client, copy_source, bucket, key, extra_args, callbacks, size): """ :param client: The client to use when calling PutObject :param copy_source: The CopySource parameter to use :param bucket: The name of the bucket to copy to :param key: The name of the key to copy to :param extra_args: A dictionary of any extra arguments that may be used in the upload. :param callbacks: List of callbacks to call after copy :param size: The size of the transfer. This value is passed into the callbacks """ client.copy_object( CopySource=copy_source, Bucket=bucket, Key=key, **extra_args) for callback in callbacks: callback(bytes_transferred=size)
def function[_main, parameter[self, client, copy_source, bucket, key, extra_args, callbacks, size]]: constant[ :param client: The client to use when calling PutObject :param copy_source: The CopySource parameter to use :param bucket: The name of the bucket to copy to :param key: The name of the key to copy to :param extra_args: A dictionary of any extra arguments that may be used in the upload. :param callbacks: List of callbacks to call after copy :param size: The size of the transfer. This value is passed into the callbacks ] call[name[client].copy_object, parameter[]] for taget[name[callback]] in starred[name[callbacks]] begin[:] call[name[callback], parameter[]]
keyword[def] identifier[_main] ( identifier[self] , identifier[client] , identifier[copy_source] , identifier[bucket] , identifier[key] , identifier[extra_args] , identifier[callbacks] , identifier[size] ): literal[string] identifier[client] . identifier[copy_object] ( identifier[CopySource] = identifier[copy_source] , identifier[Bucket] = identifier[bucket] , identifier[Key] = identifier[key] ,** identifier[extra_args] ) keyword[for] identifier[callback] keyword[in] identifier[callbacks] : identifier[callback] ( identifier[bytes_transferred] = identifier[size] )
def _main(self, client, copy_source, bucket, key, extra_args, callbacks, size): """ :param client: The client to use when calling PutObject :param copy_source: The CopySource parameter to use :param bucket: The name of the bucket to copy to :param key: The name of the key to copy to :param extra_args: A dictionary of any extra arguments that may be used in the upload. :param callbacks: List of callbacks to call after copy :param size: The size of the transfer. This value is passed into the callbacks """ client.copy_object(CopySource=copy_source, Bucket=bucket, Key=key, **extra_args) for callback in callbacks: callback(bytes_transferred=size) # depends on [control=['for'], data=['callback']]
def DragDrop(x1: int, y1: int, x2: int, y2: int, moveSpeed: float = 1, waitTime: float = OPERATION_WAIT_TIME) -> None: """ Simulate mouse left button drag from point x1, y1 drop to point x2, y2. x1: int. y1: int. x2: int. y2: int. moveSpeed: float, 1 normal speed, < 1 move slower, > 1 move faster. waitTime: float. """ PressMouse(x1, y1, 0.05) MoveTo(x2, y2, moveSpeed, 0.05) ReleaseMouse(waitTime)
def function[DragDrop, parameter[x1, y1, x2, y2, moveSpeed, waitTime]]: constant[ Simulate mouse left button drag from point x1, y1 drop to point x2, y2. x1: int. y1: int. x2: int. y2: int. moveSpeed: float, 1 normal speed, < 1 move slower, > 1 move faster. waitTime: float. ] call[name[PressMouse], parameter[name[x1], name[y1], constant[0.05]]] call[name[MoveTo], parameter[name[x2], name[y2], name[moveSpeed], constant[0.05]]] call[name[ReleaseMouse], parameter[name[waitTime]]]
keyword[def] identifier[DragDrop] ( identifier[x1] : identifier[int] , identifier[y1] : identifier[int] , identifier[x2] : identifier[int] , identifier[y2] : identifier[int] , identifier[moveSpeed] : identifier[float] = literal[int] , identifier[waitTime] : identifier[float] = identifier[OPERATION_WAIT_TIME] )-> keyword[None] : literal[string] identifier[PressMouse] ( identifier[x1] , identifier[y1] , literal[int] ) identifier[MoveTo] ( identifier[x2] , identifier[y2] , identifier[moveSpeed] , literal[int] ) identifier[ReleaseMouse] ( identifier[waitTime] )
def DragDrop(x1: int, y1: int, x2: int, y2: int, moveSpeed: float=1, waitTime: float=OPERATION_WAIT_TIME) -> None: """ Simulate mouse left button drag from point x1, y1 drop to point x2, y2. x1: int. y1: int. x2: int. y2: int. moveSpeed: float, 1 normal speed, < 1 move slower, > 1 move faster. waitTime: float. """ PressMouse(x1, y1, 0.05) MoveTo(x2, y2, moveSpeed, 0.05) ReleaseMouse(waitTime)
def weekofyear(self, first_day_of_week=SATURDAY): """weekofyear(first_day_of_week=SATURDAY) :param first_day_of_week: One of the :py:data:`khayyam.SATURDAY`, :py:data:`khayyam.SUNDAY`, :py:data:`khayyam.MONDAY`, :py:data:`khayyam.TUESDAY`, :py:data:`khayyam.WEDNESDAY`, :py:data:`khayyam.THURSDAY` or :py:data:`khayyam.FRIDAY` :return: The week number of the year. :rtype: int """ first_day_of_year = self.firstdayofyear() days = (self - first_day_of_year).days offset = first_day_of_week - first_day_of_year.weekday() if offset < 0: offset += 7 if days < offset: return 0 return int((days - offset) / 7 + 1)
def function[weekofyear, parameter[self, first_day_of_week]]: constant[weekofyear(first_day_of_week=SATURDAY) :param first_day_of_week: One of the :py:data:`khayyam.SATURDAY`, :py:data:`khayyam.SUNDAY`, :py:data:`khayyam.MONDAY`, :py:data:`khayyam.TUESDAY`, :py:data:`khayyam.WEDNESDAY`, :py:data:`khayyam.THURSDAY` or :py:data:`khayyam.FRIDAY` :return: The week number of the year. :rtype: int ] variable[first_day_of_year] assign[=] call[name[self].firstdayofyear, parameter[]] variable[days] assign[=] binary_operation[name[self] - name[first_day_of_year]].days variable[offset] assign[=] binary_operation[name[first_day_of_week] - call[name[first_day_of_year].weekday, parameter[]]] if compare[name[offset] less[<] constant[0]] begin[:] <ast.AugAssign object at 0x7da18ede56f0> if compare[name[days] less[<] name[offset]] begin[:] return[constant[0]] return[call[name[int], parameter[binary_operation[binary_operation[binary_operation[name[days] - name[offset]] / constant[7]] + constant[1]]]]]
keyword[def] identifier[weekofyear] ( identifier[self] , identifier[first_day_of_week] = identifier[SATURDAY] ): literal[string] identifier[first_day_of_year] = identifier[self] . identifier[firstdayofyear] () identifier[days] =( identifier[self] - identifier[first_day_of_year] ). identifier[days] identifier[offset] = identifier[first_day_of_week] - identifier[first_day_of_year] . identifier[weekday] () keyword[if] identifier[offset] < literal[int] : identifier[offset] += literal[int] keyword[if] identifier[days] < identifier[offset] : keyword[return] literal[int] keyword[return] identifier[int] (( identifier[days] - identifier[offset] )/ literal[int] + literal[int] )
def weekofyear(self, first_day_of_week=SATURDAY): """weekofyear(first_day_of_week=SATURDAY) :param first_day_of_week: One of the :py:data:`khayyam.SATURDAY`, :py:data:`khayyam.SUNDAY`, :py:data:`khayyam.MONDAY`, :py:data:`khayyam.TUESDAY`, :py:data:`khayyam.WEDNESDAY`, :py:data:`khayyam.THURSDAY` or :py:data:`khayyam.FRIDAY` :return: The week number of the year. :rtype: int """ first_day_of_year = self.firstdayofyear() days = (self - first_day_of_year).days offset = first_day_of_week - first_day_of_year.weekday() if offset < 0: offset += 7 # depends on [control=['if'], data=['offset']] if days < offset: return 0 # depends on [control=['if'], data=[]] return int((days - offset) / 7 + 1)
def create_indexed_document(index_instance, model_items, action): ''' Creates the document that will be passed into the bulk index function. Either a list of serialized objects to index, or a a dictionary specifying the primary keys of items to be delete. ''' data = [] if action == 'delete': for pk in model_items: data.append({'_id': pk, '_op_type': action}) else: for doc in model_items: if index_instance.matches_indexing_condition(doc): data.append(index_instance.serialize_object(doc)) return data
def function[create_indexed_document, parameter[index_instance, model_items, action]]: constant[ Creates the document that will be passed into the bulk index function. Either a list of serialized objects to index, or a a dictionary specifying the primary keys of items to be delete. ] variable[data] assign[=] list[[]] if compare[name[action] equal[==] constant[delete]] begin[:] for taget[name[pk]] in starred[name[model_items]] begin[:] call[name[data].append, parameter[dictionary[[<ast.Constant object at 0x7da2044c26b0>, <ast.Constant object at 0x7da18eb57580>], [<ast.Name object at 0x7da18eb55f00>, <ast.Name object at 0x7da18eb550f0>]]]] return[name[data]]
keyword[def] identifier[create_indexed_document] ( identifier[index_instance] , identifier[model_items] , identifier[action] ): literal[string] identifier[data] =[] keyword[if] identifier[action] == literal[string] : keyword[for] identifier[pk] keyword[in] identifier[model_items] : identifier[data] . identifier[append] ({ literal[string] : identifier[pk] , literal[string] : identifier[action] }) keyword[else] : keyword[for] identifier[doc] keyword[in] identifier[model_items] : keyword[if] identifier[index_instance] . identifier[matches_indexing_condition] ( identifier[doc] ): identifier[data] . identifier[append] ( identifier[index_instance] . identifier[serialize_object] ( identifier[doc] )) keyword[return] identifier[data]
def create_indexed_document(index_instance, model_items, action): """ Creates the document that will be passed into the bulk index function. Either a list of serialized objects to index, or a a dictionary specifying the primary keys of items to be delete. """ data = [] if action == 'delete': for pk in model_items: data.append({'_id': pk, '_op_type': action}) # depends on [control=['for'], data=['pk']] # depends on [control=['if'], data=['action']] else: for doc in model_items: if index_instance.matches_indexing_condition(doc): data.append(index_instance.serialize_object(doc)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['doc']] return data
def add_override(self, partname, content_type): """ Add a child ``<Override>`` element with attributes set to parameter values. """ override = CT_Override.new(partname, content_type) self.append(override)
def function[add_override, parameter[self, partname, content_type]]: constant[ Add a child ``<Override>`` element with attributes set to parameter values. ] variable[override] assign[=] call[name[CT_Override].new, parameter[name[partname], name[content_type]]] call[name[self].append, parameter[name[override]]]
keyword[def] identifier[add_override] ( identifier[self] , identifier[partname] , identifier[content_type] ): literal[string] identifier[override] = identifier[CT_Override] . identifier[new] ( identifier[partname] , identifier[content_type] ) identifier[self] . identifier[append] ( identifier[override] )
def add_override(self, partname, content_type): """ Add a child ``<Override>`` element with attributes set to parameter values. """ override = CT_Override.new(partname, content_type) self.append(override)
def generate_shared_public_key(my_private_key, their_public_pair, generator): """ Two parties each generate a private key and share their public key with the other party over an insecure channel. The shared public key can be generated by either side, but not by eavesdroppers. You can then use the entropy from the shared public key to created a common symmetric key for encryption. (This is beyond of the scope of pycoin.) See also <https://en.wikipedia.org/wiki/Key_exchange> :param my_private_key: an integer private key :param their_public_pair: a pair ``(x, y)`` representing a public key for the ``generator`` :param generator: a :class:`Generator <pycoin.ecdsa.Generator.Generator>` :returns: a :class:`Point <pycoin.ecdsa.Point.Point>`, which can be used as a shared public key. """ p = generator.Point(*their_public_pair) return my_private_key * p
def function[generate_shared_public_key, parameter[my_private_key, their_public_pair, generator]]: constant[ Two parties each generate a private key and share their public key with the other party over an insecure channel. The shared public key can be generated by either side, but not by eavesdroppers. You can then use the entropy from the shared public key to created a common symmetric key for encryption. (This is beyond of the scope of pycoin.) See also <https://en.wikipedia.org/wiki/Key_exchange> :param my_private_key: an integer private key :param their_public_pair: a pair ``(x, y)`` representing a public key for the ``generator`` :param generator: a :class:`Generator <pycoin.ecdsa.Generator.Generator>` :returns: a :class:`Point <pycoin.ecdsa.Point.Point>`, which can be used as a shared public key. ] variable[p] assign[=] call[name[generator].Point, parameter[<ast.Starred object at 0x7da1b1d8b580>]] return[binary_operation[name[my_private_key] * name[p]]]
keyword[def] identifier[generate_shared_public_key] ( identifier[my_private_key] , identifier[their_public_pair] , identifier[generator] ): literal[string] identifier[p] = identifier[generator] . identifier[Point] (* identifier[their_public_pair] ) keyword[return] identifier[my_private_key] * identifier[p]
def generate_shared_public_key(my_private_key, their_public_pair, generator): """ Two parties each generate a private key and share their public key with the other party over an insecure channel. The shared public key can be generated by either side, but not by eavesdroppers. You can then use the entropy from the shared public key to created a common symmetric key for encryption. (This is beyond of the scope of pycoin.) See also <https://en.wikipedia.org/wiki/Key_exchange> :param my_private_key: an integer private key :param their_public_pair: a pair ``(x, y)`` representing a public key for the ``generator`` :param generator: a :class:`Generator <pycoin.ecdsa.Generator.Generator>` :returns: a :class:`Point <pycoin.ecdsa.Point.Point>`, which can be used as a shared public key. """ p = generator.Point(*their_public_pair) return my_private_key * p
def ingest(self): """ingest the veron catalogue into the catalogues database See class docstring for usage. """ self.log.debug('starting the ``get`` method') dictList = self._create_dictionary_of_veron() tableName = self.dbTableName createStatement = """ CREATE TABLE `%(tableName)s` ( `primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter', `B_V` float DEFAULT NULL, `U_B` float DEFAULT NULL, `abs_magnitude` float DEFAULT NULL, `dateCreated` datetime DEFAULT CURRENT_TIMESTAMP, `decDeg` double DEFAULT NULL, `magnitude` float DEFAULT NULL, `raDeg` double DEFAULT NULL, `class` varchar(10) COLLATE utf8_unicode_ci DEFAULT NULL, `name` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL, `redshift` float DEFAULT NULL, `not_radio` varchar(10) COLLATE utf8_unicode_ci DEFAULT NULL, `magnitude_filter` varchar(10) COLLATE utf8_unicode_ci DEFAULT 'V', `htm16ID` bigint(20) DEFAULT NULL, `redshift_flag` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL, `spectral_classification` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL, `dateLastModified` datetime DEFAULT CURRENT_TIMESTAMP, `updated` varchar(45) DEFAULT '0', `htm10ID` bigint(20) DEFAULT NULL, `htm13ID` bigint(20) DEFAULT NULL, PRIMARY KEY (`primaryId`), UNIQUE KEY `radeg_decdeg` (`raDeg`,`decDeg`), KEY `idx_htm16ID` (`htm16ID`), KEY `idx_htm10ID` (`htm10ID`), KEY `idx_htm13ID` (`htm13ID`) ) ENGINE=MyISAM AUTO_INCREMENT=168945 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; """ % locals() self.add_data_to_database_table( dictList=dictList, createStatement=createStatement ) self.log.debug('completed the ``get`` method') return None
def function[ingest, parameter[self]]: constant[ingest the veron catalogue into the catalogues database See class docstring for usage. ] call[name[self].log.debug, parameter[constant[starting the ``get`` method]]] variable[dictList] assign[=] call[name[self]._create_dictionary_of_veron, parameter[]] variable[tableName] assign[=] name[self].dbTableName variable[createStatement] assign[=] binary_operation[constant[ CREATE TABLE `%(tableName)s` ( `primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter', `B_V` float DEFAULT NULL, `U_B` float DEFAULT NULL, `abs_magnitude` float DEFAULT NULL, `dateCreated` datetime DEFAULT CURRENT_TIMESTAMP, `decDeg` double DEFAULT NULL, `magnitude` float DEFAULT NULL, `raDeg` double DEFAULT NULL, `class` varchar(10) COLLATE utf8_unicode_ci DEFAULT NULL, `name` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL, `redshift` float DEFAULT NULL, `not_radio` varchar(10) COLLATE utf8_unicode_ci DEFAULT NULL, `magnitude_filter` varchar(10) COLLATE utf8_unicode_ci DEFAULT 'V', `htm16ID` bigint(20) DEFAULT NULL, `redshift_flag` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL, `spectral_classification` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL, `dateLastModified` datetime DEFAULT CURRENT_TIMESTAMP, `updated` varchar(45) DEFAULT '0', `htm10ID` bigint(20) DEFAULT NULL, `htm13ID` bigint(20) DEFAULT NULL, PRIMARY KEY (`primaryId`), UNIQUE KEY `radeg_decdeg` (`raDeg`,`decDeg`), KEY `idx_htm16ID` (`htm16ID`), KEY `idx_htm10ID` (`htm10ID`), KEY `idx_htm13ID` (`htm13ID`) ) ENGINE=MyISAM AUTO_INCREMENT=168945 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; ] <ast.Mod object at 0x7da2590d6920> call[name[locals], parameter[]]] call[name[self].add_data_to_database_table, parameter[]] call[name[self].log.debug, parameter[constant[completed the ``get`` method]]] return[constant[None]]
keyword[def] identifier[ingest] ( identifier[self] ): literal[string] identifier[self] . identifier[log] . identifier[debug] ( literal[string] ) identifier[dictList] = identifier[self] . identifier[_create_dictionary_of_veron] () identifier[tableName] = identifier[self] . identifier[dbTableName] identifier[createStatement] = literal[string] % identifier[locals] () identifier[self] . identifier[add_data_to_database_table] ( identifier[dictList] = identifier[dictList] , identifier[createStatement] = identifier[createStatement] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] ) keyword[return] keyword[None]
def ingest(self): """ingest the veron catalogue into the catalogues database See class docstring for usage. """ self.log.debug('starting the ``get`` method') dictList = self._create_dictionary_of_veron() tableName = self.dbTableName createStatement = "\n CREATE TABLE `%(tableName)s` (\n `primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',\n `B_V` float DEFAULT NULL,\n `U_B` float DEFAULT NULL,\n `abs_magnitude` float DEFAULT NULL,\n `dateCreated` datetime DEFAULT CURRENT_TIMESTAMP,\n `decDeg` double DEFAULT NULL,\n `magnitude` float DEFAULT NULL,\n `raDeg` double DEFAULT NULL,\n `class` varchar(10) COLLATE utf8_unicode_ci DEFAULT NULL,\n `name` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL,\n `redshift` float DEFAULT NULL,\n `not_radio` varchar(10) COLLATE utf8_unicode_ci DEFAULT NULL,\n `magnitude_filter` varchar(10) COLLATE utf8_unicode_ci DEFAULT 'V',\n `htm16ID` bigint(20) DEFAULT NULL,\n `redshift_flag` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL,\n `spectral_classification` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL,\n `dateLastModified` datetime DEFAULT CURRENT_TIMESTAMP,\n `updated` varchar(45) DEFAULT '0',\n `htm10ID` bigint(20) DEFAULT NULL,\n `htm13ID` bigint(20) DEFAULT NULL,\n PRIMARY KEY (`primaryId`),\n UNIQUE KEY `radeg_decdeg` (`raDeg`,`decDeg`),\n KEY `idx_htm16ID` (`htm16ID`),\n KEY `idx_htm10ID` (`htm10ID`),\n KEY `idx_htm13ID` (`htm13ID`)\n ) ENGINE=MyISAM AUTO_INCREMENT=168945 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;\n" % locals() self.add_data_to_database_table(dictList=dictList, createStatement=createStatement) self.log.debug('completed the ``get`` method') return None
def add_sldId(self, rId): """ Return a reference to a newly created <p:sldId> child element having its r:id attribute set to *rId*. """ return self._add_sldId(id=self._next_id, rId=rId)
def function[add_sldId, parameter[self, rId]]: constant[ Return a reference to a newly created <p:sldId> child element having its r:id attribute set to *rId*. ] return[call[name[self]._add_sldId, parameter[]]]
keyword[def] identifier[add_sldId] ( identifier[self] , identifier[rId] ): literal[string] keyword[return] identifier[self] . identifier[_add_sldId] ( identifier[id] = identifier[self] . identifier[_next_id] , identifier[rId] = identifier[rId] )
def add_sldId(self, rId): """ Return a reference to a newly created <p:sldId> child element having its r:id attribute set to *rId*. """ return self._add_sldId(id=self._next_id, rId=rId)
def reindex(self, axis, labels, **kwargs): """Fits a new index for this Manger. Args: axis: The axis index object to target the reindex on. labels: New labels to conform 'axis' on to. Returns: A new QueryCompiler with updated data and new index. """ # To reindex, we need a function that will be shipped to each of the # partitions. def reindex_builer(df, axis, old_labels, new_labels, **kwargs): if axis: while len(df.columns) < len(old_labels): df[len(df.columns)] = np.nan df.columns = old_labels new_df = df.reindex(columns=new_labels, **kwargs) # reset the internal columns back to a RangeIndex new_df.columns = pandas.RangeIndex(len(new_df.columns)) return new_df else: while len(df.index) < len(old_labels): df.loc[len(df.index)] = np.nan df.index = old_labels new_df = df.reindex(index=new_labels, **kwargs) # reset the internal index back to a RangeIndex new_df.reset_index(inplace=True, drop=True) return new_df old_labels = self.columns if axis else self.index new_index = self.index if axis else labels new_columns = labels if axis else self.columns func = self._prepare_method( lambda df: reindex_builer(df, axis, old_labels, labels, **kwargs) ) # The reindex can just be mapped over the axis we are modifying. This # is for simplicity in implementation. We specify num_splits here # because if we are repartitioning we should (in the future). # Additionally this operation is often followed by an operation that # assumes identical partitioning. Internally, we *may* change the # partitioning during a map across a full axis. new_data = self._map_across_full_axis(axis, func) return self.__constructor__(new_data, new_index, new_columns)
def function[reindex, parameter[self, axis, labels]]: constant[Fits a new index for this Manger. Args: axis: The axis index object to target the reindex on. labels: New labels to conform 'axis' on to. Returns: A new QueryCompiler with updated data and new index. ] def function[reindex_builer, parameter[df, axis, old_labels, new_labels]]: if name[axis] begin[:] while compare[call[name[len], parameter[name[df].columns]] less[<] call[name[len], parameter[name[old_labels]]]] begin[:] call[name[df]][call[name[len], parameter[name[df].columns]]] assign[=] name[np].nan name[df].columns assign[=] name[old_labels] variable[new_df] assign[=] call[name[df].reindex, parameter[]] name[new_df].columns assign[=] call[name[pandas].RangeIndex, parameter[call[name[len], parameter[name[new_df].columns]]]] return[name[new_df]] variable[old_labels] assign[=] <ast.IfExp object at 0x7da2041d8100> variable[new_index] assign[=] <ast.IfExp object at 0x7da2041db3a0> variable[new_columns] assign[=] <ast.IfExp object at 0x7da2041d8670> variable[func] assign[=] call[name[self]._prepare_method, parameter[<ast.Lambda object at 0x7da2041d9b10>]] variable[new_data] assign[=] call[name[self]._map_across_full_axis, parameter[name[axis], name[func]]] return[call[name[self].__constructor__, parameter[name[new_data], name[new_index], name[new_columns]]]]
keyword[def] identifier[reindex] ( identifier[self] , identifier[axis] , identifier[labels] ,** identifier[kwargs] ): literal[string] keyword[def] identifier[reindex_builer] ( identifier[df] , identifier[axis] , identifier[old_labels] , identifier[new_labels] ,** identifier[kwargs] ): keyword[if] identifier[axis] : keyword[while] identifier[len] ( identifier[df] . identifier[columns] )< identifier[len] ( identifier[old_labels] ): identifier[df] [ identifier[len] ( identifier[df] . identifier[columns] )]= identifier[np] . identifier[nan] identifier[df] . identifier[columns] = identifier[old_labels] identifier[new_df] = identifier[df] . identifier[reindex] ( identifier[columns] = identifier[new_labels] ,** identifier[kwargs] ) identifier[new_df] . identifier[columns] = identifier[pandas] . identifier[RangeIndex] ( identifier[len] ( identifier[new_df] . identifier[columns] )) keyword[return] identifier[new_df] keyword[else] : keyword[while] identifier[len] ( identifier[df] . identifier[index] )< identifier[len] ( identifier[old_labels] ): identifier[df] . identifier[loc] [ identifier[len] ( identifier[df] . identifier[index] )]= identifier[np] . identifier[nan] identifier[df] . identifier[index] = identifier[old_labels] identifier[new_df] = identifier[df] . identifier[reindex] ( identifier[index] = identifier[new_labels] ,** identifier[kwargs] ) identifier[new_df] . identifier[reset_index] ( identifier[inplace] = keyword[True] , identifier[drop] = keyword[True] ) keyword[return] identifier[new_df] identifier[old_labels] = identifier[self] . identifier[columns] keyword[if] identifier[axis] keyword[else] identifier[self] . identifier[index] identifier[new_index] = identifier[self] . identifier[index] keyword[if] identifier[axis] keyword[else] identifier[labels] identifier[new_columns] = identifier[labels] keyword[if] identifier[axis] keyword[else] identifier[self] . identifier[columns] identifier[func] = identifier[self] . identifier[_prepare_method] ( keyword[lambda] identifier[df] : identifier[reindex_builer] ( identifier[df] , identifier[axis] , identifier[old_labels] , identifier[labels] ,** identifier[kwargs] ) ) identifier[new_data] = identifier[self] . identifier[_map_across_full_axis] ( identifier[axis] , identifier[func] ) keyword[return] identifier[self] . identifier[__constructor__] ( identifier[new_data] , identifier[new_index] , identifier[new_columns] )
def reindex(self, axis, labels, **kwargs): """Fits a new index for this Manger. Args: axis: The axis index object to target the reindex on. labels: New labels to conform 'axis' on to. Returns: A new QueryCompiler with updated data and new index. """ # To reindex, we need a function that will be shipped to each of the # partitions. def reindex_builer(df, axis, old_labels, new_labels, **kwargs): if axis: while len(df.columns) < len(old_labels): df[len(df.columns)] = np.nan # depends on [control=['while'], data=[]] df.columns = old_labels new_df = df.reindex(columns=new_labels, **kwargs) # reset the internal columns back to a RangeIndex new_df.columns = pandas.RangeIndex(len(new_df.columns)) return new_df # depends on [control=['if'], data=[]] else: while len(df.index) < len(old_labels): df.loc[len(df.index)] = np.nan # depends on [control=['while'], data=[]] df.index = old_labels new_df = df.reindex(index=new_labels, **kwargs) # reset the internal index back to a RangeIndex new_df.reset_index(inplace=True, drop=True) return new_df old_labels = self.columns if axis else self.index new_index = self.index if axis else labels new_columns = labels if axis else self.columns func = self._prepare_method(lambda df: reindex_builer(df, axis, old_labels, labels, **kwargs)) # The reindex can just be mapped over the axis we are modifying. This # is for simplicity in implementation. We specify num_splits here # because if we are repartitioning we should (in the future). # Additionally this operation is often followed by an operation that # assumes identical partitioning. Internally, we *may* change the # partitioning during a map across a full axis. new_data = self._map_across_full_axis(axis, func) return self.__constructor__(new_data, new_index, new_columns)
def do_march_all(self): """ Recursive march in the case that we have a fragmented shape. Returns ------- perimeters : [perimeter1, ...] The perimeters of all the regions in the image. See Also -------- :func:`AegeanTools.msq2.MarchingSquares.do_march` """ # copy the data since we are going to be modifying it data_copy = copy(self.data) # iterate through finding an island, creating a perimeter, # and then blanking the island perimeters = [] p = self.find_start_point() while p is not None: x, y = p perim = self.walk_perimeter(x, y) perimeters.append(perim) self._blank_within(perim) p = self.find_start_point() # restore the data self.data = data_copy return perimeters
def function[do_march_all, parameter[self]]: constant[ Recursive march in the case that we have a fragmented shape. Returns ------- perimeters : [perimeter1, ...] The perimeters of all the regions in the image. See Also -------- :func:`AegeanTools.msq2.MarchingSquares.do_march` ] variable[data_copy] assign[=] call[name[copy], parameter[name[self].data]] variable[perimeters] assign[=] list[[]] variable[p] assign[=] call[name[self].find_start_point, parameter[]] while compare[name[p] is_not constant[None]] begin[:] <ast.Tuple object at 0x7da20c991b70> assign[=] name[p] variable[perim] assign[=] call[name[self].walk_perimeter, parameter[name[x], name[y]]] call[name[perimeters].append, parameter[name[perim]]] call[name[self]._blank_within, parameter[name[perim]]] variable[p] assign[=] call[name[self].find_start_point, parameter[]] name[self].data assign[=] name[data_copy] return[name[perimeters]]
keyword[def] identifier[do_march_all] ( identifier[self] ): literal[string] identifier[data_copy] = identifier[copy] ( identifier[self] . identifier[data] ) identifier[perimeters] =[] identifier[p] = identifier[self] . identifier[find_start_point] () keyword[while] identifier[p] keyword[is] keyword[not] keyword[None] : identifier[x] , identifier[y] = identifier[p] identifier[perim] = identifier[self] . identifier[walk_perimeter] ( identifier[x] , identifier[y] ) identifier[perimeters] . identifier[append] ( identifier[perim] ) identifier[self] . identifier[_blank_within] ( identifier[perim] ) identifier[p] = identifier[self] . identifier[find_start_point] () identifier[self] . identifier[data] = identifier[data_copy] keyword[return] identifier[perimeters]
def do_march_all(self): """ Recursive march in the case that we have a fragmented shape. Returns ------- perimeters : [perimeter1, ...] The perimeters of all the regions in the image. See Also -------- :func:`AegeanTools.msq2.MarchingSquares.do_march` """ # copy the data since we are going to be modifying it data_copy = copy(self.data) # iterate through finding an island, creating a perimeter, # and then blanking the island perimeters = [] p = self.find_start_point() while p is not None: (x, y) = p perim = self.walk_perimeter(x, y) perimeters.append(perim) self._blank_within(perim) p = self.find_start_point() # depends on [control=['while'], data=['p']] # restore the data self.data = data_copy return perimeters
def member_status(): ''' Get cluster member status .. versionchanged:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' riak.member_status ''' ret = {'membership': {}, 'summary': {'Valid': 0, 'Leaving': 0, 'Exiting': 0, 'Joining': 0, 'Down': 0, }} out = __execute_cmd('riak-admin', 'member-status')['stdout'].splitlines() for line in out: if line.startswith(('=', '-', 'Status')): continue if '/' in line: # We're in the summary line for item in line.split('/'): key, val = item.split(':') ret['summary'][key.strip()] = val.strip() if len(line.split()) == 4: # We're on a node status line (status, ring, pending, node) = line.split() ret['membership'][node] = { 'Status': status, 'Ring': ring, 'Pending': pending } return ret
def function[member_status, parameter[]]: constant[ Get cluster member status .. versionchanged:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' riak.member_status ] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b2113490>, <ast.Constant object at 0x7da1b2112c50>], [<ast.Dict object at 0x7da1b2112c80>, <ast.Dict object at 0x7da1b2112c20>]] variable[out] assign[=] call[call[call[name[__execute_cmd], parameter[constant[riak-admin], constant[member-status]]]][constant[stdout]].splitlines, parameter[]] for taget[name[line]] in starred[name[out]] begin[:] if call[name[line].startswith, parameter[tuple[[<ast.Constant object at 0x7da1b2111180>, <ast.Constant object at 0x7da1b2110a60>, <ast.Constant object at 0x7da1b2110a30>]]]] begin[:] continue if compare[constant[/] in name[line]] begin[:] for taget[name[item]] in starred[call[name[line].split, parameter[constant[/]]]] begin[:] <ast.Tuple object at 0x7da1b2110b80> assign[=] call[name[item].split, parameter[constant[:]]] call[call[name[ret]][constant[summary]]][call[name[key].strip, parameter[]]] assign[=] call[name[val].strip, parameter[]] if compare[call[name[len], parameter[call[name[line].split, parameter[]]]] equal[==] constant[4]] begin[:] <ast.Tuple object at 0x7da1b2111d50> assign[=] call[name[line].split, parameter[]] call[call[name[ret]][constant[membership]]][name[node]] assign[=] dictionary[[<ast.Constant object at 0x7da1b2113af0>, <ast.Constant object at 0x7da1b2113b50>, <ast.Constant object at 0x7da1b2113070>], [<ast.Name object at 0x7da1b2112f20>, <ast.Name object at 0x7da1b2113a90>, <ast.Name object at 0x7da1b21121a0>]] return[name[ret]]
keyword[def] identifier[member_status] (): literal[string] identifier[ret] ={ literal[string] :{}, literal[string] :{ literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , }} identifier[out] = identifier[__execute_cmd] ( literal[string] , literal[string] )[ literal[string] ]. identifier[splitlines] () keyword[for] identifier[line] keyword[in] identifier[out] : keyword[if] identifier[line] . identifier[startswith] (( literal[string] , literal[string] , literal[string] )): keyword[continue] keyword[if] literal[string] keyword[in] identifier[line] : keyword[for] identifier[item] keyword[in] identifier[line] . identifier[split] ( literal[string] ): identifier[key] , identifier[val] = identifier[item] . identifier[split] ( literal[string] ) identifier[ret] [ literal[string] ][ identifier[key] . identifier[strip] ()]= identifier[val] . identifier[strip] () keyword[if] identifier[len] ( identifier[line] . identifier[split] ())== literal[int] : ( identifier[status] , identifier[ring] , identifier[pending] , identifier[node] )= identifier[line] . identifier[split] () identifier[ret] [ literal[string] ][ identifier[node] ]={ literal[string] : identifier[status] , literal[string] : identifier[ring] , literal[string] : identifier[pending] } keyword[return] identifier[ret]
def member_status(): """ Get cluster member status .. versionchanged:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' riak.member_status """ ret = {'membership': {}, 'summary': {'Valid': 0, 'Leaving': 0, 'Exiting': 0, 'Joining': 0, 'Down': 0}} out = __execute_cmd('riak-admin', 'member-status')['stdout'].splitlines() for line in out: if line.startswith(('=', '-', 'Status')): continue # depends on [control=['if'], data=[]] if '/' in line: # We're in the summary line for item in line.split('/'): (key, val) = item.split(':') ret['summary'][key.strip()] = val.strip() # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=['line']] if len(line.split()) == 4: # We're on a node status line (status, ring, pending, node) = line.split() ret['membership'][node] = {'Status': status, 'Ring': ring, 'Pending': pending} # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] return ret
def blend_value(data, i, j, k, keys=None): """Computes the average value of the three vertices of a triangle in the simplex triangulation, where two of the vertices are on the lower horizontal.""" key_size = len(list(data.keys())[0]) if not keys: keys = triangle_coordinates(i, j, k) # Reduce key from (i, j, k) to (i, j) if necessary keys = [tuple(key[:key_size]) for key in keys] # Sum over the values of the points to blend try: s = sum(data[key] for key in keys) value = s / 3. except KeyError: value = None return value
def function[blend_value, parameter[data, i, j, k, keys]]: constant[Computes the average value of the three vertices of a triangle in the simplex triangulation, where two of the vertices are on the lower horizontal.] variable[key_size] assign[=] call[name[len], parameter[call[call[name[list], parameter[call[name[data].keys, parameter[]]]]][constant[0]]]] if <ast.UnaryOp object at 0x7da207f012d0> begin[:] variable[keys] assign[=] call[name[triangle_coordinates], parameter[name[i], name[j], name[k]]] variable[keys] assign[=] <ast.ListComp object at 0x7da207f03b80> <ast.Try object at 0x7da207f03a60> return[name[value]]
keyword[def] identifier[blend_value] ( identifier[data] , identifier[i] , identifier[j] , identifier[k] , identifier[keys] = keyword[None] ): literal[string] identifier[key_size] = identifier[len] ( identifier[list] ( identifier[data] . identifier[keys] ())[ literal[int] ]) keyword[if] keyword[not] identifier[keys] : identifier[keys] = identifier[triangle_coordinates] ( identifier[i] , identifier[j] , identifier[k] ) identifier[keys] =[ identifier[tuple] ( identifier[key] [: identifier[key_size] ]) keyword[for] identifier[key] keyword[in] identifier[keys] ] keyword[try] : identifier[s] = identifier[sum] ( identifier[data] [ identifier[key] ] keyword[for] identifier[key] keyword[in] identifier[keys] ) identifier[value] = identifier[s] / literal[int] keyword[except] identifier[KeyError] : identifier[value] = keyword[None] keyword[return] identifier[value]
def blend_value(data, i, j, k, keys=None): """Computes the average value of the three vertices of a triangle in the simplex triangulation, where two of the vertices are on the lower horizontal.""" key_size = len(list(data.keys())[0]) if not keys: keys = triangle_coordinates(i, j, k) # depends on [control=['if'], data=[]] # Reduce key from (i, j, k) to (i, j) if necessary keys = [tuple(key[:key_size]) for key in keys] # Sum over the values of the points to blend try: s = sum((data[key] for key in keys)) value = s / 3.0 # depends on [control=['try'], data=[]] except KeyError: value = None # depends on [control=['except'], data=[]] return value
def fetchThreadMessages(self, thread_id=None, limit=20, before=None): """ Get the last messages in a thread :param thread_id: User/Group ID to get messages from. See :ref:`intro_threads` :param limit: Max. number of messages to retrieve :param before: A timestamp, indicating from which point to retrieve messages :type limit: int :type before: int :return: :class:`models.Message` objects :rtype: list :raises: FBchatException if request failed """ thread_id, thread_type = self._getThread(thread_id, None) params = { "id": thread_id, "message_limit": limit, "load_messages": True, "load_read_receipts": True, "before": before, } j = self.graphql_request(GraphQL(doc_id="1860982147341344", params=params)) if j.get("message_thread") is None: raise FBchatException("Could not fetch thread {}: {}".format(thread_id, j)) messages = [ Message._from_graphql(message) for message in j["message_thread"]["messages"]["nodes"] ] messages.reverse() read_receipts = j["message_thread"]["read_receipts"]["nodes"] for message in messages: for receipt in read_receipts: if int(receipt["watermark"]) >= int(message.timestamp): message.read_by.append(receipt["actor"]["id"]) return messages
def function[fetchThreadMessages, parameter[self, thread_id, limit, before]]: constant[ Get the last messages in a thread :param thread_id: User/Group ID to get messages from. See :ref:`intro_threads` :param limit: Max. number of messages to retrieve :param before: A timestamp, indicating from which point to retrieve messages :type limit: int :type before: int :return: :class:`models.Message` objects :rtype: list :raises: FBchatException if request failed ] <ast.Tuple object at 0x7da1b18bd7b0> assign[=] call[name[self]._getThread, parameter[name[thread_id], constant[None]]] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b18bc9a0>, <ast.Constant object at 0x7da1b18be9e0>, <ast.Constant object at 0x7da1b18bee30>, <ast.Constant object at 0x7da1b18bdba0>, <ast.Constant object at 0x7da1b18bd720>], [<ast.Name object at 0x7da1b18be860>, <ast.Name object at 0x7da1b18bed10>, <ast.Constant object at 0x7da1b18befb0>, <ast.Constant object at 0x7da1b18bd7e0>, <ast.Name object at 0x7da1b18bc0a0>]] variable[j] assign[=] call[name[self].graphql_request, parameter[call[name[GraphQL], parameter[]]]] if compare[call[name[j].get, parameter[constant[message_thread]]] is constant[None]] begin[:] <ast.Raise object at 0x7da1b18bef80> variable[messages] assign[=] <ast.ListComp object at 0x7da1b18be710> call[name[messages].reverse, parameter[]] variable[read_receipts] assign[=] call[call[call[name[j]][constant[message_thread]]][constant[read_receipts]]][constant[nodes]] for taget[name[message]] in starred[name[messages]] begin[:] for taget[name[receipt]] in starred[name[read_receipts]] begin[:] if compare[call[name[int], parameter[call[name[receipt]][constant[watermark]]]] greater_or_equal[>=] call[name[int], parameter[name[message].timestamp]]] begin[:] call[name[message].read_by.append, parameter[call[call[name[receipt]][constant[actor]]][constant[id]]]] return[name[messages]]
keyword[def] identifier[fetchThreadMessages] ( identifier[self] , identifier[thread_id] = keyword[None] , identifier[limit] = literal[int] , identifier[before] = keyword[None] ): literal[string] identifier[thread_id] , identifier[thread_type] = identifier[self] . identifier[_getThread] ( identifier[thread_id] , keyword[None] ) identifier[params] ={ literal[string] : identifier[thread_id] , literal[string] : identifier[limit] , literal[string] : keyword[True] , literal[string] : keyword[True] , literal[string] : identifier[before] , } identifier[j] = identifier[self] . identifier[graphql_request] ( identifier[GraphQL] ( identifier[doc_id] = literal[string] , identifier[params] = identifier[params] )) keyword[if] identifier[j] . identifier[get] ( literal[string] ) keyword[is] keyword[None] : keyword[raise] identifier[FBchatException] ( literal[string] . identifier[format] ( identifier[thread_id] , identifier[j] )) identifier[messages] =[ identifier[Message] . identifier[_from_graphql] ( identifier[message] ) keyword[for] identifier[message] keyword[in] identifier[j] [ literal[string] ][ literal[string] ][ literal[string] ] ] identifier[messages] . identifier[reverse] () identifier[read_receipts] = identifier[j] [ literal[string] ][ literal[string] ][ literal[string] ] keyword[for] identifier[message] keyword[in] identifier[messages] : keyword[for] identifier[receipt] keyword[in] identifier[read_receipts] : keyword[if] identifier[int] ( identifier[receipt] [ literal[string] ])>= identifier[int] ( identifier[message] . identifier[timestamp] ): identifier[message] . identifier[read_by] . identifier[append] ( identifier[receipt] [ literal[string] ][ literal[string] ]) keyword[return] identifier[messages]
def fetchThreadMessages(self, thread_id=None, limit=20, before=None): """ Get the last messages in a thread :param thread_id: User/Group ID to get messages from. See :ref:`intro_threads` :param limit: Max. number of messages to retrieve :param before: A timestamp, indicating from which point to retrieve messages :type limit: int :type before: int :return: :class:`models.Message` objects :rtype: list :raises: FBchatException if request failed """ (thread_id, thread_type) = self._getThread(thread_id, None) params = {'id': thread_id, 'message_limit': limit, 'load_messages': True, 'load_read_receipts': True, 'before': before} j = self.graphql_request(GraphQL(doc_id='1860982147341344', params=params)) if j.get('message_thread') is None: raise FBchatException('Could not fetch thread {}: {}'.format(thread_id, j)) # depends on [control=['if'], data=[]] messages = [Message._from_graphql(message) for message in j['message_thread']['messages']['nodes']] messages.reverse() read_receipts = j['message_thread']['read_receipts']['nodes'] for message in messages: for receipt in read_receipts: if int(receipt['watermark']) >= int(message.timestamp): message.read_by.append(receipt['actor']['id']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['receipt']] # depends on [control=['for'], data=['message']] return messages
def error(self, message): """Overrides error to control printing output""" if self._debug: import pdb _, _, tb = sys.exc_info() if tb: pdb.post_mortem(tb) else: pdb.set_trace() self.print_usage(sys.stderr) self.exit(2, ('\nERROR: {}\n').format(message))
def function[error, parameter[self, message]]: constant[Overrides error to control printing output] if name[self]._debug begin[:] import module[pdb] <ast.Tuple object at 0x7da1b135f670> assign[=] call[name[sys].exc_info, parameter[]] if name[tb] begin[:] call[name[pdb].post_mortem, parameter[name[tb]]] call[name[self].print_usage, parameter[name[sys].stderr]] call[name[self].exit, parameter[constant[2], call[constant[ ERROR: {} ].format, parameter[name[message]]]]]
keyword[def] identifier[error] ( identifier[self] , identifier[message] ): literal[string] keyword[if] identifier[self] . identifier[_debug] : keyword[import] identifier[pdb] identifier[_] , identifier[_] , identifier[tb] = identifier[sys] . identifier[exc_info] () keyword[if] identifier[tb] : identifier[pdb] . identifier[post_mortem] ( identifier[tb] ) keyword[else] : identifier[pdb] . identifier[set_trace] () identifier[self] . identifier[print_usage] ( identifier[sys] . identifier[stderr] ) identifier[self] . identifier[exit] ( literal[int] ,( literal[string] ). identifier[format] ( identifier[message] ))
def error(self, message): """Overrides error to control printing output""" if self._debug: import pdb (_, _, tb) = sys.exc_info() if tb: pdb.post_mortem(tb) # depends on [control=['if'], data=[]] else: pdb.set_trace() # depends on [control=['if'], data=[]] self.print_usage(sys.stderr) self.exit(2, '\nERROR: {}\n'.format(message))
def in_resource(self, field, resource): """ Return True if resource contains a valid value for the field (not an empty or None value) """ resource_field = resource.get(field, None) return resource_field is not None and resource_field != ''
def function[in_resource, parameter[self, field, resource]]: constant[ Return True if resource contains a valid value for the field (not an empty or None value) ] variable[resource_field] assign[=] call[name[resource].get, parameter[name[field], constant[None]]] return[<ast.BoolOp object at 0x7da2046202e0>]
keyword[def] identifier[in_resource] ( identifier[self] , identifier[field] , identifier[resource] ): literal[string] identifier[resource_field] = identifier[resource] . identifier[get] ( identifier[field] , keyword[None] ) keyword[return] identifier[resource_field] keyword[is] keyword[not] keyword[None] keyword[and] identifier[resource_field] != literal[string]
def in_resource(self, field, resource): """ Return True if resource contains a valid value for the field (not an empty or None value) """ resource_field = resource.get(field, None) return resource_field is not None and resource_field != ''
def recordings(self): """ Access the recordings :returns: twilio.rest.video.v1.room.recording.RoomRecordingList :rtype: twilio.rest.video.v1.room.recording.RoomRecordingList """ if self._recordings is None: self._recordings = RoomRecordingList(self._version, room_sid=self._solution['sid'], ) return self._recordings
def function[recordings, parameter[self]]: constant[ Access the recordings :returns: twilio.rest.video.v1.room.recording.RoomRecordingList :rtype: twilio.rest.video.v1.room.recording.RoomRecordingList ] if compare[name[self]._recordings is constant[None]] begin[:] name[self]._recordings assign[=] call[name[RoomRecordingList], parameter[name[self]._version]] return[name[self]._recordings]
keyword[def] identifier[recordings] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_recordings] keyword[is] keyword[None] : identifier[self] . identifier[_recordings] = identifier[RoomRecordingList] ( identifier[self] . identifier[_version] , identifier[room_sid] = identifier[self] . identifier[_solution] [ literal[string] ],) keyword[return] identifier[self] . identifier[_recordings]
def recordings(self): """ Access the recordings :returns: twilio.rest.video.v1.room.recording.RoomRecordingList :rtype: twilio.rest.video.v1.room.recording.RoomRecordingList """ if self._recordings is None: self._recordings = RoomRecordingList(self._version, room_sid=self._solution['sid']) # depends on [control=['if'], data=[]] return self._recordings
def dataset_create_version_cli(self, folder, version_notes, quiet=False, convert_to_csv=True, delete_old_versions=False, dir_mode='skip'): """ client wrapper for creating a version of a dataset Parameters ========== folder: the folder with the dataset configuration / data files version_notes: notes to add for the version quiet: suppress verbose output (default is False) convert_to_csv: on upload, if data should be converted to csv delete_old_versions: if True, do that (default False) dir_mode: What to do with directories: "skip" - ignore; "zip" - compress and upload """ folder = folder or os.getcwd() result = self.dataset_create_version( folder, version_notes, quiet=quiet, convert_to_csv=convert_to_csv, delete_old_versions=delete_old_versions, dir_mode=dir_mode) if result.invalidTags: print( ('The following are not valid tags and could not be added to ' 'the dataset: ') + str(result.invalidTags)) if result is None: print('Dataset version creation error: See previous output') elif result.status.lower() == 'ok': print('Dataset version is being created. Please check progress at ' + result.url) else: print('Dataset version creation error: ' + result.error)
def function[dataset_create_version_cli, parameter[self, folder, version_notes, quiet, convert_to_csv, delete_old_versions, dir_mode]]: constant[ client wrapper for creating a version of a dataset Parameters ========== folder: the folder with the dataset configuration / data files version_notes: notes to add for the version quiet: suppress verbose output (default is False) convert_to_csv: on upload, if data should be converted to csv delete_old_versions: if True, do that (default False) dir_mode: What to do with directories: "skip" - ignore; "zip" - compress and upload ] variable[folder] assign[=] <ast.BoolOp object at 0x7da1b21a5a50> variable[result] assign[=] call[name[self].dataset_create_version, parameter[name[folder], name[version_notes]]] if name[result].invalidTags begin[:] call[name[print], parameter[binary_operation[constant[The following are not valid tags and could not be added to the dataset: ] + call[name[str], parameter[name[result].invalidTags]]]]] if compare[name[result] is constant[None]] begin[:] call[name[print], parameter[constant[Dataset version creation error: See previous output]]]
keyword[def] identifier[dataset_create_version_cli] ( identifier[self] , identifier[folder] , identifier[version_notes] , identifier[quiet] = keyword[False] , identifier[convert_to_csv] = keyword[True] , identifier[delete_old_versions] = keyword[False] , identifier[dir_mode] = literal[string] ): literal[string] identifier[folder] = identifier[folder] keyword[or] identifier[os] . identifier[getcwd] () identifier[result] = identifier[self] . identifier[dataset_create_version] ( identifier[folder] , identifier[version_notes] , identifier[quiet] = identifier[quiet] , identifier[convert_to_csv] = identifier[convert_to_csv] , identifier[delete_old_versions] = identifier[delete_old_versions] , identifier[dir_mode] = identifier[dir_mode] ) keyword[if] identifier[result] . identifier[invalidTags] : identifier[print] ( ( literal[string] literal[string] )+ identifier[str] ( identifier[result] . identifier[invalidTags] )) keyword[if] identifier[result] keyword[is] keyword[None] : identifier[print] ( literal[string] ) keyword[elif] identifier[result] . identifier[status] . identifier[lower] ()== literal[string] : identifier[print] ( literal[string] + identifier[result] . identifier[url] ) keyword[else] : identifier[print] ( literal[string] + identifier[result] . identifier[error] )
def dataset_create_version_cli(self, folder, version_notes, quiet=False, convert_to_csv=True, delete_old_versions=False, dir_mode='skip'): """ client wrapper for creating a version of a dataset Parameters ========== folder: the folder with the dataset configuration / data files version_notes: notes to add for the version quiet: suppress verbose output (default is False) convert_to_csv: on upload, if data should be converted to csv delete_old_versions: if True, do that (default False) dir_mode: What to do with directories: "skip" - ignore; "zip" - compress and upload """ folder = folder or os.getcwd() result = self.dataset_create_version(folder, version_notes, quiet=quiet, convert_to_csv=convert_to_csv, delete_old_versions=delete_old_versions, dir_mode=dir_mode) if result.invalidTags: print('The following are not valid tags and could not be added to the dataset: ' + str(result.invalidTags)) # depends on [control=['if'], data=[]] if result is None: print('Dataset version creation error: See previous output') # depends on [control=['if'], data=[]] elif result.status.lower() == 'ok': print('Dataset version is being created. Please check progress at ' + result.url) # depends on [control=['if'], data=[]] else: print('Dataset version creation error: ' + result.error)
def free(self): """Free the memory referred to by the file-like, any subsequent operations on this file-like or slices of it will fail. """ # Free the memory self._machine_controller.sdram_free(self._start_address, self._x, self._y) # Mark as freed self._freed = True
def function[free, parameter[self]]: constant[Free the memory referred to by the file-like, any subsequent operations on this file-like or slices of it will fail. ] call[name[self]._machine_controller.sdram_free, parameter[name[self]._start_address, name[self]._x, name[self]._y]] name[self]._freed assign[=] constant[True]
keyword[def] identifier[free] ( identifier[self] ): literal[string] identifier[self] . identifier[_machine_controller] . identifier[sdram_free] ( identifier[self] . identifier[_start_address] , identifier[self] . identifier[_x] , identifier[self] . identifier[_y] ) identifier[self] . identifier[_freed] = keyword[True]
def free(self): """Free the memory referred to by the file-like, any subsequent operations on this file-like or slices of it will fail. """ # Free the memory self._machine_controller.sdram_free(self._start_address, self._x, self._y) # Mark as freed self._freed = True
def train(): """Training function.""" trainer = gluon.Trainer(model.collect_params(), args.optimizer, {'learning_rate': args.lr}) train_data_loader, val_data_loader, test_data_loader \ = dataprocessor.make_dataloader(data_train, data_val, data_test, args) best_valid_bleu = 0.0 for epoch_id in range(args.epochs): log_avg_loss = 0 log_avg_gnorm = 0 log_wc = 0 log_start_time = time.time() for batch_id, (src_seq, tgt_seq, src_valid_length, tgt_valid_length)\ in enumerate(train_data_loader): # logging.info(src_seq.context) Context suddenly becomes GPU. src_seq = src_seq.as_in_context(ctx) tgt_seq = tgt_seq.as_in_context(ctx) src_valid_length = src_valid_length.as_in_context(ctx) tgt_valid_length = tgt_valid_length.as_in_context(ctx) with mx.autograd.record(): out, _ = model(src_seq, tgt_seq[:, :-1], src_valid_length, tgt_valid_length - 1) loss = loss_function(out, tgt_seq[:, 1:], tgt_valid_length - 1).mean() loss = loss * (tgt_seq.shape[1] - 1) / (tgt_valid_length - 1).mean() loss.backward() grads = [p.grad(ctx) for p in model.collect_params().values()] gnorm = gluon.utils.clip_global_norm(grads, args.clip) trainer.step(1) src_wc = src_valid_length.sum().asscalar() tgt_wc = (tgt_valid_length - 1).sum().asscalar() step_loss = loss.asscalar() log_avg_loss += step_loss log_avg_gnorm += gnorm log_wc += src_wc + tgt_wc if (batch_id + 1) % args.log_interval == 0: wps = log_wc / (time.time() - log_start_time) logging.info('[Epoch {} Batch {}/{}] loss={:.4f}, ppl={:.4f}, gnorm={:.4f}, ' 'throughput={:.2f}K wps, wc={:.2f}K' .format(epoch_id, batch_id + 1, len(train_data_loader), log_avg_loss / args.log_interval, np.exp(log_avg_loss / args.log_interval), log_avg_gnorm / args.log_interval, wps / 1000, log_wc / 1000)) log_start_time = time.time() log_avg_loss = 0 log_avg_gnorm = 0 log_wc = 0 valid_loss, valid_translation_out = evaluate(val_data_loader) valid_bleu_score, _, _, _, _ = compute_bleu([val_tgt_sentences], valid_translation_out) logging.info('[Epoch {}] valid Loss={:.4f}, valid ppl={:.4f}, valid bleu={:.2f}' .format(epoch_id, valid_loss, np.exp(valid_loss), valid_bleu_score * 100)) test_loss, test_translation_out = evaluate(test_data_loader) test_bleu_score, _, _, _, _ = compute_bleu([test_tgt_sentences], test_translation_out) logging.info('[Epoch {}] test Loss={:.4f}, test ppl={:.4f}, test bleu={:.2f}' .format(epoch_id, test_loss, np.exp(test_loss), test_bleu_score * 100)) dataprocessor.write_sentences(valid_translation_out, os.path.join(args.save_dir, 'epoch{:d}_valid_out.txt').format(epoch_id)) dataprocessor.write_sentences(test_translation_out, os.path.join(args.save_dir, 'epoch{:d}_test_out.txt').format(epoch_id)) if valid_bleu_score > best_valid_bleu: best_valid_bleu = valid_bleu_score save_path = os.path.join(args.save_dir, 'valid_best.params') logging.info('Save best parameters to {}'.format(save_path)) model.save_parameters(save_path) if epoch_id + 1 >= (args.epochs * 2) // 3: new_lr = trainer.learning_rate * args.lr_update_factor logging.info('Learning rate change to {}'.format(new_lr)) trainer.set_learning_rate(new_lr) if os.path.exists(os.path.join(args.save_dir, 'valid_best.params')): model.load_parameters(os.path.join(args.save_dir, 'valid_best.params')) valid_loss, valid_translation_out = evaluate(val_data_loader) valid_bleu_score, _, _, _, _ = compute_bleu([val_tgt_sentences], valid_translation_out) logging.info('Best model valid Loss={:.4f}, valid ppl={:.4f}, valid bleu={:.2f}' .format(valid_loss, np.exp(valid_loss), valid_bleu_score * 100)) test_loss, test_translation_out = evaluate(test_data_loader) test_bleu_score, _, _, _, _ = compute_bleu([test_tgt_sentences], test_translation_out) logging.info('Best model test Loss={:.4f}, test ppl={:.4f}, test bleu={:.2f}' .format(test_loss, np.exp(test_loss), test_bleu_score * 100)) dataprocessor.write_sentences(valid_translation_out, os.path.join(args.save_dir, 'best_valid_out.txt')) dataprocessor.write_sentences(test_translation_out, os.path.join(args.save_dir, 'best_test_out.txt'))
def function[train, parameter[]]: constant[Training function.] variable[trainer] assign[=] call[name[gluon].Trainer, parameter[call[name[model].collect_params, parameter[]], name[args].optimizer, dictionary[[<ast.Constant object at 0x7da1b2179270>], [<ast.Attribute object at 0x7da1b2178c10>]]]] <ast.Tuple object at 0x7da1b217b4c0> assign[=] call[name[dataprocessor].make_dataloader, parameter[name[data_train], name[data_val], name[data_test], name[args]]] variable[best_valid_bleu] assign[=] constant[0.0] for taget[name[epoch_id]] in starred[call[name[range], parameter[name[args].epochs]]] begin[:] variable[log_avg_loss] assign[=] constant[0] variable[log_avg_gnorm] assign[=] constant[0] variable[log_wc] assign[=] constant[0] variable[log_start_time] assign[=] call[name[time].time, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b217ad70>, <ast.Tuple object at 0x7da1b211c580>]]] in starred[call[name[enumerate], parameter[name[train_data_loader]]]] begin[:] variable[src_seq] assign[=] call[name[src_seq].as_in_context, parameter[name[ctx]]] variable[tgt_seq] assign[=] call[name[tgt_seq].as_in_context, parameter[name[ctx]]] variable[src_valid_length] assign[=] call[name[src_valid_length].as_in_context, parameter[name[ctx]]] variable[tgt_valid_length] assign[=] call[name[tgt_valid_length].as_in_context, parameter[name[ctx]]] with call[name[mx].autograd.record, parameter[]] begin[:] <ast.Tuple object at 0x7da1b211d9c0> assign[=] call[name[model], parameter[name[src_seq], call[name[tgt_seq]][tuple[[<ast.Slice object at 0x7da1b211c9d0>, <ast.Slice object at 0x7da1b211df00>]]], name[src_valid_length], binary_operation[name[tgt_valid_length] - constant[1]]]] variable[loss] assign[=] call[call[name[loss_function], parameter[name[out], call[name[tgt_seq]][tuple[[<ast.Slice object at 0x7da1b211c880>, <ast.Slice object at 0x7da1b211d090>]]], binary_operation[name[tgt_valid_length] - constant[1]]]].mean, parameter[]] variable[loss] assign[=] binary_operation[binary_operation[name[loss] * binary_operation[call[name[tgt_seq].shape][constant[1]] - constant[1]]] / call[binary_operation[name[tgt_valid_length] - constant[1]].mean, parameter[]]] call[name[loss].backward, parameter[]] variable[grads] assign[=] <ast.ListComp object at 0x7da1b211d360> variable[gnorm] assign[=] call[name[gluon].utils.clip_global_norm, parameter[name[grads], name[args].clip]] call[name[trainer].step, parameter[constant[1]]] variable[src_wc] assign[=] call[call[name[src_valid_length].sum, parameter[]].asscalar, parameter[]] variable[tgt_wc] assign[=] call[call[binary_operation[name[tgt_valid_length] - constant[1]].sum, parameter[]].asscalar, parameter[]] variable[step_loss] assign[=] call[name[loss].asscalar, parameter[]] <ast.AugAssign object at 0x7da1b21ebfa0> <ast.AugAssign object at 0x7da1b21e9450> <ast.AugAssign object at 0x7da1b21ebe20> if compare[binary_operation[binary_operation[name[batch_id] + constant[1]] <ast.Mod object at 0x7da2590d6920> name[args].log_interval] equal[==] constant[0]] begin[:] variable[wps] assign[=] binary_operation[name[log_wc] / binary_operation[call[name[time].time, parameter[]] - name[log_start_time]]] call[name[logging].info, parameter[call[constant[[Epoch {} Batch {}/{}] loss={:.4f}, ppl={:.4f}, gnorm={:.4f}, throughput={:.2f}K wps, wc={:.2f}K].format, parameter[name[epoch_id], binary_operation[name[batch_id] + constant[1]], call[name[len], parameter[name[train_data_loader]]], binary_operation[name[log_avg_loss] / name[args].log_interval], call[name[np].exp, parameter[binary_operation[name[log_avg_loss] / name[args].log_interval]]], binary_operation[name[log_avg_gnorm] / name[args].log_interval], binary_operation[name[wps] / constant[1000]], binary_operation[name[log_wc] / constant[1000]]]]]] variable[log_start_time] assign[=] call[name[time].time, parameter[]] variable[log_avg_loss] assign[=] constant[0] variable[log_avg_gnorm] assign[=] constant[0] variable[log_wc] assign[=] constant[0] <ast.Tuple object at 0x7da1b1c2c760> assign[=] call[name[evaluate], parameter[name[val_data_loader]]] <ast.Tuple object at 0x7da1b1c2c8b0> assign[=] call[name[compute_bleu], parameter[list[[<ast.Name object at 0x7da1b1c2ca90>]], name[valid_translation_out]]] call[name[logging].info, parameter[call[constant[[Epoch {}] valid Loss={:.4f}, valid ppl={:.4f}, valid bleu={:.2f}].format, parameter[name[epoch_id], name[valid_loss], call[name[np].exp, parameter[name[valid_loss]]], binary_operation[name[valid_bleu_score] * constant[100]]]]]] <ast.Tuple object at 0x7da1b1c2ce50> assign[=] call[name[evaluate], parameter[name[test_data_loader]]] <ast.Tuple object at 0x7da1b1c2cfa0> assign[=] call[name[compute_bleu], parameter[list[[<ast.Name object at 0x7da1b1c2d180>]], name[test_translation_out]]] call[name[logging].info, parameter[call[constant[[Epoch {}] test Loss={:.4f}, test ppl={:.4f}, test bleu={:.2f}].format, parameter[name[epoch_id], name[test_loss], call[name[np].exp, parameter[name[test_loss]]], binary_operation[name[test_bleu_score] * constant[100]]]]]] call[name[dataprocessor].write_sentences, parameter[name[valid_translation_out], call[call[name[os].path.join, parameter[name[args].save_dir, constant[epoch{:d}_valid_out.txt]]].format, parameter[name[epoch_id]]]]] call[name[dataprocessor].write_sentences, parameter[name[test_translation_out], call[call[name[os].path.join, parameter[name[args].save_dir, constant[epoch{:d}_test_out.txt]]].format, parameter[name[epoch_id]]]]] if compare[name[valid_bleu_score] greater[>] name[best_valid_bleu]] begin[:] variable[best_valid_bleu] assign[=] name[valid_bleu_score] variable[save_path] assign[=] call[name[os].path.join, parameter[name[args].save_dir, constant[valid_best.params]]] call[name[logging].info, parameter[call[constant[Save best parameters to {}].format, parameter[name[save_path]]]]] call[name[model].save_parameters, parameter[name[save_path]]] if compare[binary_operation[name[epoch_id] + constant[1]] greater_or_equal[>=] binary_operation[binary_operation[name[args].epochs * constant[2]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[3]]] begin[:] variable[new_lr] assign[=] binary_operation[name[trainer].learning_rate * name[args].lr_update_factor] call[name[logging].info, parameter[call[constant[Learning rate change to {}].format, parameter[name[new_lr]]]]] call[name[trainer].set_learning_rate, parameter[name[new_lr]]] if call[name[os].path.exists, parameter[call[name[os].path.join, parameter[name[args].save_dir, constant[valid_best.params]]]]] begin[:] call[name[model].load_parameters, parameter[call[name[os].path.join, parameter[name[args].save_dir, constant[valid_best.params]]]]] <ast.Tuple object at 0x7da1b219ffa0> assign[=] call[name[evaluate], parameter[name[val_data_loader]]] <ast.Tuple object at 0x7da1b219fe50> assign[=] call[name[compute_bleu], parameter[list[[<ast.Name object at 0x7da1b219fc70>]], name[valid_translation_out]]] call[name[logging].info, parameter[call[constant[Best model valid Loss={:.4f}, valid ppl={:.4f}, valid bleu={:.2f}].format, parameter[name[valid_loss], call[name[np].exp, parameter[name[valid_loss]]], binary_operation[name[valid_bleu_score] * constant[100]]]]]] <ast.Tuple object at 0x7da1b219f8e0> assign[=] call[name[evaluate], parameter[name[test_data_loader]]] <ast.Tuple object at 0x7da1b219f790> assign[=] call[name[compute_bleu], parameter[list[[<ast.Name object at 0x7da1b219f5b0>]], name[test_translation_out]]] call[name[logging].info, parameter[call[constant[Best model test Loss={:.4f}, test ppl={:.4f}, test bleu={:.2f}].format, parameter[name[test_loss], call[name[np].exp, parameter[name[test_loss]]], binary_operation[name[test_bleu_score] * constant[100]]]]]] call[name[dataprocessor].write_sentences, parameter[name[valid_translation_out], call[name[os].path.join, parameter[name[args].save_dir, constant[best_valid_out.txt]]]]] call[name[dataprocessor].write_sentences, parameter[name[test_translation_out], call[name[os].path.join, parameter[name[args].save_dir, constant[best_test_out.txt]]]]]
keyword[def] identifier[train] (): literal[string] identifier[trainer] = identifier[gluon] . identifier[Trainer] ( identifier[model] . identifier[collect_params] (), identifier[args] . identifier[optimizer] ,{ literal[string] : identifier[args] . identifier[lr] }) identifier[train_data_loader] , identifier[val_data_loader] , identifier[test_data_loader] = identifier[dataprocessor] . identifier[make_dataloader] ( identifier[data_train] , identifier[data_val] , identifier[data_test] , identifier[args] ) identifier[best_valid_bleu] = literal[int] keyword[for] identifier[epoch_id] keyword[in] identifier[range] ( identifier[args] . identifier[epochs] ): identifier[log_avg_loss] = literal[int] identifier[log_avg_gnorm] = literal[int] identifier[log_wc] = literal[int] identifier[log_start_time] = identifier[time] . identifier[time] () keyword[for] identifier[batch_id] ,( identifier[src_seq] , identifier[tgt_seq] , identifier[src_valid_length] , identifier[tgt_valid_length] ) keyword[in] identifier[enumerate] ( identifier[train_data_loader] ): identifier[src_seq] = identifier[src_seq] . identifier[as_in_context] ( identifier[ctx] ) identifier[tgt_seq] = identifier[tgt_seq] . identifier[as_in_context] ( identifier[ctx] ) identifier[src_valid_length] = identifier[src_valid_length] . identifier[as_in_context] ( identifier[ctx] ) identifier[tgt_valid_length] = identifier[tgt_valid_length] . identifier[as_in_context] ( identifier[ctx] ) keyword[with] identifier[mx] . identifier[autograd] . identifier[record] (): identifier[out] , identifier[_] = identifier[model] ( identifier[src_seq] , identifier[tgt_seq] [:,:- literal[int] ], identifier[src_valid_length] , identifier[tgt_valid_length] - literal[int] ) identifier[loss] = identifier[loss_function] ( identifier[out] , identifier[tgt_seq] [:, literal[int] :], identifier[tgt_valid_length] - literal[int] ). identifier[mean] () identifier[loss] = identifier[loss] *( identifier[tgt_seq] . identifier[shape] [ literal[int] ]- literal[int] )/( identifier[tgt_valid_length] - literal[int] ). identifier[mean] () identifier[loss] . identifier[backward] () identifier[grads] =[ identifier[p] . identifier[grad] ( identifier[ctx] ) keyword[for] identifier[p] keyword[in] identifier[model] . identifier[collect_params] (). identifier[values] ()] identifier[gnorm] = identifier[gluon] . identifier[utils] . identifier[clip_global_norm] ( identifier[grads] , identifier[args] . identifier[clip] ) identifier[trainer] . identifier[step] ( literal[int] ) identifier[src_wc] = identifier[src_valid_length] . identifier[sum] (). identifier[asscalar] () identifier[tgt_wc] =( identifier[tgt_valid_length] - literal[int] ). identifier[sum] (). identifier[asscalar] () identifier[step_loss] = identifier[loss] . identifier[asscalar] () identifier[log_avg_loss] += identifier[step_loss] identifier[log_avg_gnorm] += identifier[gnorm] identifier[log_wc] += identifier[src_wc] + identifier[tgt_wc] keyword[if] ( identifier[batch_id] + literal[int] )% identifier[args] . identifier[log_interval] == literal[int] : identifier[wps] = identifier[log_wc] /( identifier[time] . identifier[time] ()- identifier[log_start_time] ) identifier[logging] . identifier[info] ( literal[string] literal[string] . identifier[format] ( identifier[epoch_id] , identifier[batch_id] + literal[int] , identifier[len] ( identifier[train_data_loader] ), identifier[log_avg_loss] / identifier[args] . identifier[log_interval] , identifier[np] . identifier[exp] ( identifier[log_avg_loss] / identifier[args] . identifier[log_interval] ), identifier[log_avg_gnorm] / identifier[args] . identifier[log_interval] , identifier[wps] / literal[int] , identifier[log_wc] / literal[int] )) identifier[log_start_time] = identifier[time] . identifier[time] () identifier[log_avg_loss] = literal[int] identifier[log_avg_gnorm] = literal[int] identifier[log_wc] = literal[int] identifier[valid_loss] , identifier[valid_translation_out] = identifier[evaluate] ( identifier[val_data_loader] ) identifier[valid_bleu_score] , identifier[_] , identifier[_] , identifier[_] , identifier[_] = identifier[compute_bleu] ([ identifier[val_tgt_sentences] ], identifier[valid_translation_out] ) identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[epoch_id] , identifier[valid_loss] , identifier[np] . identifier[exp] ( identifier[valid_loss] ), identifier[valid_bleu_score] * literal[int] )) identifier[test_loss] , identifier[test_translation_out] = identifier[evaluate] ( identifier[test_data_loader] ) identifier[test_bleu_score] , identifier[_] , identifier[_] , identifier[_] , identifier[_] = identifier[compute_bleu] ([ identifier[test_tgt_sentences] ], identifier[test_translation_out] ) identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[epoch_id] , identifier[test_loss] , identifier[np] . identifier[exp] ( identifier[test_loss] ), identifier[test_bleu_score] * literal[int] )) identifier[dataprocessor] . identifier[write_sentences] ( identifier[valid_translation_out] , identifier[os] . identifier[path] . identifier[join] ( identifier[args] . identifier[save_dir] , literal[string] ). identifier[format] ( identifier[epoch_id] )) identifier[dataprocessor] . identifier[write_sentences] ( identifier[test_translation_out] , identifier[os] . identifier[path] . identifier[join] ( identifier[args] . identifier[save_dir] , literal[string] ). identifier[format] ( identifier[epoch_id] )) keyword[if] identifier[valid_bleu_score] > identifier[best_valid_bleu] : identifier[best_valid_bleu] = identifier[valid_bleu_score] identifier[save_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[args] . identifier[save_dir] , literal[string] ) identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[save_path] )) identifier[model] . identifier[save_parameters] ( identifier[save_path] ) keyword[if] identifier[epoch_id] + literal[int] >=( identifier[args] . identifier[epochs] * literal[int] )// literal[int] : identifier[new_lr] = identifier[trainer] . identifier[learning_rate] * identifier[args] . identifier[lr_update_factor] identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[new_lr] )) identifier[trainer] . identifier[set_learning_rate] ( identifier[new_lr] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[join] ( identifier[args] . identifier[save_dir] , literal[string] )): identifier[model] . identifier[load_parameters] ( identifier[os] . identifier[path] . identifier[join] ( identifier[args] . identifier[save_dir] , literal[string] )) identifier[valid_loss] , identifier[valid_translation_out] = identifier[evaluate] ( identifier[val_data_loader] ) identifier[valid_bleu_score] , identifier[_] , identifier[_] , identifier[_] , identifier[_] = identifier[compute_bleu] ([ identifier[val_tgt_sentences] ], identifier[valid_translation_out] ) identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[valid_loss] , identifier[np] . identifier[exp] ( identifier[valid_loss] ), identifier[valid_bleu_score] * literal[int] )) identifier[test_loss] , identifier[test_translation_out] = identifier[evaluate] ( identifier[test_data_loader] ) identifier[test_bleu_score] , identifier[_] , identifier[_] , identifier[_] , identifier[_] = identifier[compute_bleu] ([ identifier[test_tgt_sentences] ], identifier[test_translation_out] ) identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[test_loss] , identifier[np] . identifier[exp] ( identifier[test_loss] ), identifier[test_bleu_score] * literal[int] )) identifier[dataprocessor] . identifier[write_sentences] ( identifier[valid_translation_out] , identifier[os] . identifier[path] . identifier[join] ( identifier[args] . identifier[save_dir] , literal[string] )) identifier[dataprocessor] . identifier[write_sentences] ( identifier[test_translation_out] , identifier[os] . identifier[path] . identifier[join] ( identifier[args] . identifier[save_dir] , literal[string] ))
def train(): """Training function.""" trainer = gluon.Trainer(model.collect_params(), args.optimizer, {'learning_rate': args.lr}) (train_data_loader, val_data_loader, test_data_loader) = dataprocessor.make_dataloader(data_train, data_val, data_test, args) best_valid_bleu = 0.0 for epoch_id in range(args.epochs): log_avg_loss = 0 log_avg_gnorm = 0 log_wc = 0 log_start_time = time.time() for (batch_id, (src_seq, tgt_seq, src_valid_length, tgt_valid_length)) in enumerate(train_data_loader): # logging.info(src_seq.context) Context suddenly becomes GPU. src_seq = src_seq.as_in_context(ctx) tgt_seq = tgt_seq.as_in_context(ctx) src_valid_length = src_valid_length.as_in_context(ctx) tgt_valid_length = tgt_valid_length.as_in_context(ctx) with mx.autograd.record(): (out, _) = model(src_seq, tgt_seq[:, :-1], src_valid_length, tgt_valid_length - 1) loss = loss_function(out, tgt_seq[:, 1:], tgt_valid_length - 1).mean() loss = loss * (tgt_seq.shape[1] - 1) / (tgt_valid_length - 1).mean() loss.backward() # depends on [control=['with'], data=[]] grads = [p.grad(ctx) for p in model.collect_params().values()] gnorm = gluon.utils.clip_global_norm(grads, args.clip) trainer.step(1) src_wc = src_valid_length.sum().asscalar() tgt_wc = (tgt_valid_length - 1).sum().asscalar() step_loss = loss.asscalar() log_avg_loss += step_loss log_avg_gnorm += gnorm log_wc += src_wc + tgt_wc if (batch_id + 1) % args.log_interval == 0: wps = log_wc / (time.time() - log_start_time) logging.info('[Epoch {} Batch {}/{}] loss={:.4f}, ppl={:.4f}, gnorm={:.4f}, throughput={:.2f}K wps, wc={:.2f}K'.format(epoch_id, batch_id + 1, len(train_data_loader), log_avg_loss / args.log_interval, np.exp(log_avg_loss / args.log_interval), log_avg_gnorm / args.log_interval, wps / 1000, log_wc / 1000)) log_start_time = time.time() log_avg_loss = 0 log_avg_gnorm = 0 log_wc = 0 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] (valid_loss, valid_translation_out) = evaluate(val_data_loader) (valid_bleu_score, _, _, _, _) = compute_bleu([val_tgt_sentences], valid_translation_out) logging.info('[Epoch {}] valid Loss={:.4f}, valid ppl={:.4f}, valid bleu={:.2f}'.format(epoch_id, valid_loss, np.exp(valid_loss), valid_bleu_score * 100)) (test_loss, test_translation_out) = evaluate(test_data_loader) (test_bleu_score, _, _, _, _) = compute_bleu([test_tgt_sentences], test_translation_out) logging.info('[Epoch {}] test Loss={:.4f}, test ppl={:.4f}, test bleu={:.2f}'.format(epoch_id, test_loss, np.exp(test_loss), test_bleu_score * 100)) dataprocessor.write_sentences(valid_translation_out, os.path.join(args.save_dir, 'epoch{:d}_valid_out.txt').format(epoch_id)) dataprocessor.write_sentences(test_translation_out, os.path.join(args.save_dir, 'epoch{:d}_test_out.txt').format(epoch_id)) if valid_bleu_score > best_valid_bleu: best_valid_bleu = valid_bleu_score save_path = os.path.join(args.save_dir, 'valid_best.params') logging.info('Save best parameters to {}'.format(save_path)) model.save_parameters(save_path) # depends on [control=['if'], data=['valid_bleu_score', 'best_valid_bleu']] if epoch_id + 1 >= args.epochs * 2 // 3: new_lr = trainer.learning_rate * args.lr_update_factor logging.info('Learning rate change to {}'.format(new_lr)) trainer.set_learning_rate(new_lr) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['epoch_id']] if os.path.exists(os.path.join(args.save_dir, 'valid_best.params')): model.load_parameters(os.path.join(args.save_dir, 'valid_best.params')) # depends on [control=['if'], data=[]] (valid_loss, valid_translation_out) = evaluate(val_data_loader) (valid_bleu_score, _, _, _, _) = compute_bleu([val_tgt_sentences], valid_translation_out) logging.info('Best model valid Loss={:.4f}, valid ppl={:.4f}, valid bleu={:.2f}'.format(valid_loss, np.exp(valid_loss), valid_bleu_score * 100)) (test_loss, test_translation_out) = evaluate(test_data_loader) (test_bleu_score, _, _, _, _) = compute_bleu([test_tgt_sentences], test_translation_out) logging.info('Best model test Loss={:.4f}, test ppl={:.4f}, test bleu={:.2f}'.format(test_loss, np.exp(test_loss), test_bleu_score * 100)) dataprocessor.write_sentences(valid_translation_out, os.path.join(args.save_dir, 'best_valid_out.txt')) dataprocessor.write_sentences(test_translation_out, os.path.join(args.save_dir, 'best_test_out.txt'))
def append(self, point): """ appends a copy of the given point to this sequence """ point = Point(point) self._elements.append(point)
def function[append, parameter[self, point]]: constant[ appends a copy of the given point to this sequence ] variable[point] assign[=] call[name[Point], parameter[name[point]]] call[name[self]._elements.append, parameter[name[point]]]
keyword[def] identifier[append] ( identifier[self] , identifier[point] ): literal[string] identifier[point] = identifier[Point] ( identifier[point] ) identifier[self] . identifier[_elements] . identifier[append] ( identifier[point] )
def append(self, point): """ appends a copy of the given point to this sequence """ point = Point(point) self._elements.append(point)
def get_from_clause(self): """ Return the FROM clause, converted the SOQL dialect. It should be only the name of base object, even in parent-to-child and child-to-parent relationships queries. """ self.query_topology() root_table = self.soql_trans[self.root_alias] return [root_table], []
def function[get_from_clause, parameter[self]]: constant[ Return the FROM clause, converted the SOQL dialect. It should be only the name of base object, even in parent-to-child and child-to-parent relationships queries. ] call[name[self].query_topology, parameter[]] variable[root_table] assign[=] call[name[self].soql_trans][name[self].root_alias] return[tuple[[<ast.List object at 0x7da1b1263640>, <ast.List object at 0x7da1b1263670>]]]
keyword[def] identifier[get_from_clause] ( identifier[self] ): literal[string] identifier[self] . identifier[query_topology] () identifier[root_table] = identifier[self] . identifier[soql_trans] [ identifier[self] . identifier[root_alias] ] keyword[return] [ identifier[root_table] ],[]
def get_from_clause(self): """ Return the FROM clause, converted the SOQL dialect. It should be only the name of base object, even in parent-to-child and child-to-parent relationships queries. """ self.query_topology() root_table = self.soql_trans[self.root_alias] return ([root_table], [])