code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def getSize(string): """ This functions opens a web sites and then calculate the total size of the page in bytes. This is for the sake of the example. Do not use this technique in real code as it is not a very bright way to do this.""" try: # We open the web page with urllib.request.urlopen(string, None, 1) as f: return sum(len(line) for line in f) except (urllib.error.URLError, socket.timeout) as e: return 0
def function[getSize, parameter[string]]: constant[ This functions opens a web sites and then calculate the total size of the page in bytes. This is for the sake of the example. Do not use this technique in real code as it is not a very bright way to do this.] <ast.Try object at 0x7da18c4cead0>
keyword[def] identifier[getSize] ( identifier[string] ): literal[string] keyword[try] : keyword[with] identifier[urllib] . identifier[request] . identifier[urlopen] ( identifier[string] , keyword[None] , literal[int] ) keyword[as] identifier[f] : keyword[return] identifier[sum] ( identifier[len] ( identifier[line] ) keyword[for] identifier[line] keyword[in] identifier[f] ) keyword[except] ( identifier[urllib] . identifier[error] . identifier[URLError] , identifier[socket] . identifier[timeout] ) keyword[as] identifier[e] : keyword[return] literal[int]
def getSize(string): """ This functions opens a web sites and then calculate the total size of the page in bytes. This is for the sake of the example. Do not use this technique in real code as it is not a very bright way to do this.""" try: # We open the web page with urllib.request.urlopen(string, None, 1) as f: return sum((len(line) for line in f)) # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]] except (urllib.error.URLError, socket.timeout) as e: return 0 # depends on [control=['except'], data=[]]
def get_page(self, page_id): """ Get short page info and body html code """ try: result = self._request('/getpage/', {'pageid': page_id}) return TildaPage(**result) except NetworkError: return []
def function[get_page, parameter[self, page_id]]: constant[ Get short page info and body html code ] <ast.Try object at 0x7da20c794f70>
keyword[def] identifier[get_page] ( identifier[self] , identifier[page_id] ): literal[string] keyword[try] : identifier[result] = identifier[self] . identifier[_request] ( literal[string] , { literal[string] : identifier[page_id] }) keyword[return] identifier[TildaPage] (** identifier[result] ) keyword[except] identifier[NetworkError] : keyword[return] []
def get_page(self, page_id): """ Get short page info and body html code """ try: result = self._request('/getpage/', {'pageid': page_id}) return TildaPage(**result) # depends on [control=['try'], data=[]] except NetworkError: return [] # depends on [control=['except'], data=[]]
def update_thing_shadow(self, **kwargs): r""" Updates the thing shadow for the specified thing. :Keyword Arguments: * *thingName* (``string``) -- [REQUIRED] The name of the thing. * *payload* (``bytes or seekable file-like object``) -- [REQUIRED] The state information, in JSON format. :returns: (``dict``) -- The output from the UpdateThingShadow operation * *payload* (``bytes``) -- The state information, in JSON format. """ thing_name = self._get_required_parameter('thingName', **kwargs) payload = self._get_required_parameter('payload', **kwargs) return self._shadow_op('update', thing_name, payload)
def function[update_thing_shadow, parameter[self]]: constant[ Updates the thing shadow for the specified thing. :Keyword Arguments: * *thingName* (``string``) -- [REQUIRED] The name of the thing. * *payload* (``bytes or seekable file-like object``) -- [REQUIRED] The state information, in JSON format. :returns: (``dict``) -- The output from the UpdateThingShadow operation * *payload* (``bytes``) -- The state information, in JSON format. ] variable[thing_name] assign[=] call[name[self]._get_required_parameter, parameter[constant[thingName]]] variable[payload] assign[=] call[name[self]._get_required_parameter, parameter[constant[payload]]] return[call[name[self]._shadow_op, parameter[constant[update], name[thing_name], name[payload]]]]
keyword[def] identifier[update_thing_shadow] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[thing_name] = identifier[self] . identifier[_get_required_parameter] ( literal[string] ,** identifier[kwargs] ) identifier[payload] = identifier[self] . identifier[_get_required_parameter] ( literal[string] ,** identifier[kwargs] ) keyword[return] identifier[self] . identifier[_shadow_op] ( literal[string] , identifier[thing_name] , identifier[payload] )
def update_thing_shadow(self, **kwargs): """ Updates the thing shadow for the specified thing. :Keyword Arguments: * *thingName* (``string``) -- [REQUIRED] The name of the thing. * *payload* (``bytes or seekable file-like object``) -- [REQUIRED] The state information, in JSON format. :returns: (``dict``) -- The output from the UpdateThingShadow operation * *payload* (``bytes``) -- The state information, in JSON format. """ thing_name = self._get_required_parameter('thingName', **kwargs) payload = self._get_required_parameter('payload', **kwargs) return self._shadow_op('update', thing_name, payload)
def generate_reset_password_token(user): """Generates a unique reset password token for the specified user. :param user: The user to work with """ password_hash = hash_data(user.password) if user.password else None data = [str(user.id), password_hash] return _security.reset_serializer.dumps(data)
def function[generate_reset_password_token, parameter[user]]: constant[Generates a unique reset password token for the specified user. :param user: The user to work with ] variable[password_hash] assign[=] <ast.IfExp object at 0x7da18dc9b010> variable[data] assign[=] list[[<ast.Call object at 0x7da18dc99b70>, <ast.Name object at 0x7da18dc9baf0>]] return[call[name[_security].reset_serializer.dumps, parameter[name[data]]]]
keyword[def] identifier[generate_reset_password_token] ( identifier[user] ): literal[string] identifier[password_hash] = identifier[hash_data] ( identifier[user] . identifier[password] ) keyword[if] identifier[user] . identifier[password] keyword[else] keyword[None] identifier[data] =[ identifier[str] ( identifier[user] . identifier[id] ), identifier[password_hash] ] keyword[return] identifier[_security] . identifier[reset_serializer] . identifier[dumps] ( identifier[data] )
def generate_reset_password_token(user): """Generates a unique reset password token for the specified user. :param user: The user to work with """ password_hash = hash_data(user.password) if user.password else None data = [str(user.id), password_hash] return _security.reset_serializer.dumps(data)
def _get_distance_term(self, C, rjb, mag): """ Returns the general distance scaling term - equation 2 """ c_3 = self._get_anelastic_coeff(C) rval = np.sqrt(rjb ** 2. + C["h"] ** 2.) return (C["c1"] + C["c2"] * (mag - self.CONSTS["Mref"])) *\ np.log(rval / self.CONSTS["Rref"]) +\ c_3 * (rval - self.CONSTS["Rref"])
def function[_get_distance_term, parameter[self, C, rjb, mag]]: constant[ Returns the general distance scaling term - equation 2 ] variable[c_3] assign[=] call[name[self]._get_anelastic_coeff, parameter[name[C]]] variable[rval] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[name[rjb] ** constant[2.0]] + binary_operation[call[name[C]][constant[h]] ** constant[2.0]]]]] return[binary_operation[binary_operation[binary_operation[call[name[C]][constant[c1]] + binary_operation[call[name[C]][constant[c2]] * binary_operation[name[mag] - call[name[self].CONSTS][constant[Mref]]]]] * call[name[np].log, parameter[binary_operation[name[rval] / call[name[self].CONSTS][constant[Rref]]]]]] + binary_operation[name[c_3] * binary_operation[name[rval] - call[name[self].CONSTS][constant[Rref]]]]]]
keyword[def] identifier[_get_distance_term] ( identifier[self] , identifier[C] , identifier[rjb] , identifier[mag] ): literal[string] identifier[c_3] = identifier[self] . identifier[_get_anelastic_coeff] ( identifier[C] ) identifier[rval] = identifier[np] . identifier[sqrt] ( identifier[rjb] ** literal[int] + identifier[C] [ literal[string] ]** literal[int] ) keyword[return] ( identifier[C] [ literal[string] ]+ identifier[C] [ literal[string] ]*( identifier[mag] - identifier[self] . identifier[CONSTS] [ literal[string] ]))* identifier[np] . identifier[log] ( identifier[rval] / identifier[self] . identifier[CONSTS] [ literal[string] ])+ identifier[c_3] *( identifier[rval] - identifier[self] . identifier[CONSTS] [ literal[string] ])
def _get_distance_term(self, C, rjb, mag): """ Returns the general distance scaling term - equation 2 """ c_3 = self._get_anelastic_coeff(C) rval = np.sqrt(rjb ** 2.0 + C['h'] ** 2.0) return (C['c1'] + C['c2'] * (mag - self.CONSTS['Mref'])) * np.log(rval / self.CONSTS['Rref']) + c_3 * (rval - self.CONSTS['Rref'])
def _parseLine(cls, line): """Parsers a single line of text and returns an AudioClipSpec Line format: <number> <number> [<text>] Returns: list(AudioClipSpec) or None """ r = cls._PROG.match(line) if not r: raise ValueError("Error: parsing '%s'. Correct: \"<number> <number> [<text>]\"" % line) d = r.groupdict() if len(d['begin']) == 0 or len(d['end']) == 0: raise ValueError("Error: parsing '%s'. Correct: \"<number> <number> [<text>]\"" % line) return AudioClipSpec(d['begin'], d['end'], d['text'].strip())
def function[_parseLine, parameter[cls, line]]: constant[Parsers a single line of text and returns an AudioClipSpec Line format: <number> <number> [<text>] Returns: list(AudioClipSpec) or None ] variable[r] assign[=] call[name[cls]._PROG.match, parameter[name[line]]] if <ast.UnaryOp object at 0x7da18dc06410> begin[:] <ast.Raise object at 0x7da18dc06890> variable[d] assign[=] call[name[r].groupdict, parameter[]] if <ast.BoolOp object at 0x7da18dc06740> begin[:] <ast.Raise object at 0x7da18dc04eb0> return[call[name[AudioClipSpec], parameter[call[name[d]][constant[begin]], call[name[d]][constant[end]], call[call[name[d]][constant[text]].strip, parameter[]]]]]
keyword[def] identifier[_parseLine] ( identifier[cls] , identifier[line] ): literal[string] identifier[r] = identifier[cls] . identifier[_PROG] . identifier[match] ( identifier[line] ) keyword[if] keyword[not] identifier[r] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[line] ) identifier[d] = identifier[r] . identifier[groupdict] () keyword[if] identifier[len] ( identifier[d] [ literal[string] ])== literal[int] keyword[or] identifier[len] ( identifier[d] [ literal[string] ])== literal[int] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[line] ) keyword[return] identifier[AudioClipSpec] ( identifier[d] [ literal[string] ], identifier[d] [ literal[string] ], identifier[d] [ literal[string] ]. identifier[strip] ())
def _parseLine(cls, line): """Parsers a single line of text and returns an AudioClipSpec Line format: <number> <number> [<text>] Returns: list(AudioClipSpec) or None """ r = cls._PROG.match(line) if not r: raise ValueError('Error: parsing \'%s\'. Correct: "<number> <number> [<text>]"' % line) # depends on [control=['if'], data=[]] d = r.groupdict() if len(d['begin']) == 0 or len(d['end']) == 0: raise ValueError('Error: parsing \'%s\'. Correct: "<number> <number> [<text>]"' % line) # depends on [control=['if'], data=[]] return AudioClipSpec(d['begin'], d['end'], d['text'].strip())
def create(self, name, plugin_name, plugin_version, cluster_template_id=None, default_image_id=None, is_transient=None, description=None, cluster_configs=None, node_groups=None, user_keypair_id=None, anti_affinity=None, net_id=None, count=None, use_autoconfig=None, shares=None, is_public=None, is_protected=None): """Launch a Cluster.""" data = { 'name': name, 'plugin_name': plugin_name, 'plugin_version': plugin_version, } return self._do_create(data, cluster_template_id, default_image_id, is_transient, description, cluster_configs, node_groups, user_keypair_id, anti_affinity, net_id, count, use_autoconfig, shares, is_public, is_protected, api_ver=2)
def function[create, parameter[self, name, plugin_name, plugin_version, cluster_template_id, default_image_id, is_transient, description, cluster_configs, node_groups, user_keypair_id, anti_affinity, net_id, count, use_autoconfig, shares, is_public, is_protected]]: constant[Launch a Cluster.] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c62770>, <ast.Constant object at 0x7da1b1c60070>, <ast.Constant object at 0x7da18c4ce9b0>], [<ast.Name object at 0x7da18c4cc850>, <ast.Name object at 0x7da18c4cf3a0>, <ast.Name object at 0x7da18c4cc3a0>]] return[call[name[self]._do_create, parameter[name[data], name[cluster_template_id], name[default_image_id], name[is_transient], name[description], name[cluster_configs], name[node_groups], name[user_keypair_id], name[anti_affinity], name[net_id], name[count], name[use_autoconfig], name[shares], name[is_public], name[is_protected]]]]
keyword[def] identifier[create] ( identifier[self] , identifier[name] , identifier[plugin_name] , identifier[plugin_version] , identifier[cluster_template_id] = keyword[None] , identifier[default_image_id] = keyword[None] , identifier[is_transient] = keyword[None] , identifier[description] = keyword[None] , identifier[cluster_configs] = keyword[None] , identifier[node_groups] = keyword[None] , identifier[user_keypair_id] = keyword[None] , identifier[anti_affinity] = keyword[None] , identifier[net_id] = keyword[None] , identifier[count] = keyword[None] , identifier[use_autoconfig] = keyword[None] , identifier[shares] = keyword[None] , identifier[is_public] = keyword[None] , identifier[is_protected] = keyword[None] ): literal[string] identifier[data] ={ literal[string] : identifier[name] , literal[string] : identifier[plugin_name] , literal[string] : identifier[plugin_version] , } keyword[return] identifier[self] . identifier[_do_create] ( identifier[data] , identifier[cluster_template_id] , identifier[default_image_id] , identifier[is_transient] , identifier[description] , identifier[cluster_configs] , identifier[node_groups] , identifier[user_keypair_id] , identifier[anti_affinity] , identifier[net_id] , identifier[count] , identifier[use_autoconfig] , identifier[shares] , identifier[is_public] , identifier[is_protected] , identifier[api_ver] = literal[int] )
def create(self, name, plugin_name, plugin_version, cluster_template_id=None, default_image_id=None, is_transient=None, description=None, cluster_configs=None, node_groups=None, user_keypair_id=None, anti_affinity=None, net_id=None, count=None, use_autoconfig=None, shares=None, is_public=None, is_protected=None): """Launch a Cluster.""" data = {'name': name, 'plugin_name': plugin_name, 'plugin_version': plugin_version} return self._do_create(data, cluster_template_id, default_image_id, is_transient, description, cluster_configs, node_groups, user_keypair_id, anti_affinity, net_id, count, use_autoconfig, shares, is_public, is_protected, api_ver=2)
def update_inspection(self): """Wrapper method that calls the appropriate main updating methods of the inspection. It is meant to be used inside a loop (like while), so that it can continuously update the class attributes from the trace and log files. It already implements checks to parse these files only when they change, and they ignore entries that have been previously processes. """ try: self.log_parser() except (FileNotFoundError, StopIteration) as e: logger.debug("ERROR: " + str(sys.exc_info()[0])) self.log_retry += 1 if self.log_retry == self.MAX_RETRIES: raise e try: self.trace_parser() except (FileNotFoundError, StopIteration) as e: logger.debug("ERROR: " + str(sys.exc_info()[0])) self.trace_retry += 1 if self.trace_retry == self.MAX_RETRIES: raise e
def function[update_inspection, parameter[self]]: constant[Wrapper method that calls the appropriate main updating methods of the inspection. It is meant to be used inside a loop (like while), so that it can continuously update the class attributes from the trace and log files. It already implements checks to parse these files only when they change, and they ignore entries that have been previously processes. ] <ast.Try object at 0x7da1b02840a0> <ast.Try object at 0x7da1b0219e70>
keyword[def] identifier[update_inspection] ( identifier[self] ): literal[string] keyword[try] : identifier[self] . identifier[log_parser] () keyword[except] ( identifier[FileNotFoundError] , identifier[StopIteration] ) keyword[as] identifier[e] : identifier[logger] . identifier[debug] ( literal[string] + identifier[str] ( identifier[sys] . identifier[exc_info] ()[ literal[int] ])) identifier[self] . identifier[log_retry] += literal[int] keyword[if] identifier[self] . identifier[log_retry] == identifier[self] . identifier[MAX_RETRIES] : keyword[raise] identifier[e] keyword[try] : identifier[self] . identifier[trace_parser] () keyword[except] ( identifier[FileNotFoundError] , identifier[StopIteration] ) keyword[as] identifier[e] : identifier[logger] . identifier[debug] ( literal[string] + identifier[str] ( identifier[sys] . identifier[exc_info] ()[ literal[int] ])) identifier[self] . identifier[trace_retry] += literal[int] keyword[if] identifier[self] . identifier[trace_retry] == identifier[self] . identifier[MAX_RETRIES] : keyword[raise] identifier[e]
def update_inspection(self): """Wrapper method that calls the appropriate main updating methods of the inspection. It is meant to be used inside a loop (like while), so that it can continuously update the class attributes from the trace and log files. It already implements checks to parse these files only when they change, and they ignore entries that have been previously processes. """ try: self.log_parser() # depends on [control=['try'], data=[]] except (FileNotFoundError, StopIteration) as e: logger.debug('ERROR: ' + str(sys.exc_info()[0])) self.log_retry += 1 if self.log_retry == self.MAX_RETRIES: raise e # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] try: self.trace_parser() # depends on [control=['try'], data=[]] except (FileNotFoundError, StopIteration) as e: logger.debug('ERROR: ' + str(sys.exc_info()[0])) self.trace_retry += 1 if self.trace_retry == self.MAX_RETRIES: raise e # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']]
def wheel(self, load): ''' Send a master control function back to the wheel system ''' # All wheel ops pass through eauth auth_type, err_name, key = self._prep_auth_info(load) # Authenticate auth_check = self.loadauth.check_authentication( load, auth_type, key=key, show_username=True ) error = auth_check.get('error') if error: # Authentication error occurred: do not continue. return {'error': error} # Authorize username = auth_check.get('username') if auth_type != 'user': wheel_check = self.ckminions.wheel_check( auth_check.get('auth_list', []), load['fun'], load['kwarg'] ) if not wheel_check: return {'error': {'name': err_name, 'message': 'Authentication failure of type "{0}" occurred for ' 'user {1}.'.format(auth_type, username)}} elif isinstance(wheel_check, dict) and 'error' in wheel_check: # A dictionary with an error name/message was handled by ckminions.wheel_check return wheel_check # Authenticated. Do the job. jid = salt.utils.jid.gen_jid(self.opts) fun = load.pop('fun') tag = salt.utils.event.tagify(jid, prefix='wheel') data = {'fun': "wheel.{0}".format(fun), 'jid': jid, 'tag': tag, 'user': username} try: self.event.fire_event(data, salt.utils.event.tagify([jid, 'new'], 'wheel')) ret = self.wheel_.call_func(fun, **load) data['return'] = ret data['success'] = True self.event.fire_event(data, salt.utils.event.tagify([jid, 'ret'], 'wheel')) return {'tag': tag, 'data': data} except Exception as exc: log.exception('Exception occurred while introspecting %s', fun) data['return'] = 'Exception occurred in wheel {0}: {1}: {2}'.format( fun, exc.__class__.__name__, exc, ) data['success'] = False self.event.fire_event(data, salt.utils.event.tagify([jid, 'ret'], 'wheel')) return {'tag': tag, 'data': data}
def function[wheel, parameter[self, load]]: constant[ Send a master control function back to the wheel system ] <ast.Tuple object at 0x7da1b21a89d0> assign[=] call[name[self]._prep_auth_info, parameter[name[load]]] variable[auth_check] assign[=] call[name[self].loadauth.check_authentication, parameter[name[load], name[auth_type]]] variable[error] assign[=] call[name[auth_check].get, parameter[constant[error]]] if name[error] begin[:] return[dictionary[[<ast.Constant object at 0x7da1b21aa380>], [<ast.Name object at 0x7da1b21a99c0>]]] variable[username] assign[=] call[name[auth_check].get, parameter[constant[username]]] if compare[name[auth_type] not_equal[!=] constant[user]] begin[:] variable[wheel_check] assign[=] call[name[self].ckminions.wheel_check, parameter[call[name[auth_check].get, parameter[constant[auth_list], list[[]]]], call[name[load]][constant[fun]], call[name[load]][constant[kwarg]]]] if <ast.UnaryOp object at 0x7da1b21ede10> begin[:] return[dictionary[[<ast.Constant object at 0x7da1b21ed6c0>], [<ast.Dict object at 0x7da1b21eeef0>]]] variable[jid] assign[=] call[name[salt].utils.jid.gen_jid, parameter[name[self].opts]] variable[fun] assign[=] call[name[load].pop, parameter[constant[fun]]] variable[tag] assign[=] call[name[salt].utils.event.tagify, parameter[name[jid]]] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b206a3e0>, <ast.Constant object at 0x7da1b2069420>, <ast.Constant object at 0x7da1b2069240>, <ast.Constant object at 0x7da1b2068d90>], [<ast.Call object at 0x7da1b206a170>, <ast.Name object at 0x7da1b2069e70>, <ast.Name object at 0x7da1b206a080>, <ast.Name object at 0x7da1b2035c90>]] <ast.Try object at 0x7da1b2035b10>
keyword[def] identifier[wheel] ( identifier[self] , identifier[load] ): literal[string] identifier[auth_type] , identifier[err_name] , identifier[key] = identifier[self] . identifier[_prep_auth_info] ( identifier[load] ) identifier[auth_check] = identifier[self] . identifier[loadauth] . identifier[check_authentication] ( identifier[load] , identifier[auth_type] , identifier[key] = identifier[key] , identifier[show_username] = keyword[True] ) identifier[error] = identifier[auth_check] . identifier[get] ( literal[string] ) keyword[if] identifier[error] : keyword[return] { literal[string] : identifier[error] } identifier[username] = identifier[auth_check] . identifier[get] ( literal[string] ) keyword[if] identifier[auth_type] != literal[string] : identifier[wheel_check] = identifier[self] . identifier[ckminions] . identifier[wheel_check] ( identifier[auth_check] . identifier[get] ( literal[string] ,[]), identifier[load] [ literal[string] ], identifier[load] [ literal[string] ] ) keyword[if] keyword[not] identifier[wheel_check] : keyword[return] { literal[string] :{ literal[string] : identifier[err_name] , literal[string] : literal[string] literal[string] . identifier[format] ( identifier[auth_type] , identifier[username] )}} keyword[elif] identifier[isinstance] ( identifier[wheel_check] , identifier[dict] ) keyword[and] literal[string] keyword[in] identifier[wheel_check] : keyword[return] identifier[wheel_check] identifier[jid] = identifier[salt] . identifier[utils] . identifier[jid] . identifier[gen_jid] ( identifier[self] . identifier[opts] ) identifier[fun] = identifier[load] . identifier[pop] ( literal[string] ) identifier[tag] = identifier[salt] . identifier[utils] . identifier[event] . identifier[tagify] ( identifier[jid] , identifier[prefix] = literal[string] ) identifier[data] ={ literal[string] : literal[string] . identifier[format] ( identifier[fun] ), literal[string] : identifier[jid] , literal[string] : identifier[tag] , literal[string] : identifier[username] } keyword[try] : identifier[self] . identifier[event] . identifier[fire_event] ( identifier[data] , identifier[salt] . identifier[utils] . identifier[event] . identifier[tagify] ([ identifier[jid] , literal[string] ], literal[string] )) identifier[ret] = identifier[self] . identifier[wheel_] . identifier[call_func] ( identifier[fun] ,** identifier[load] ) identifier[data] [ literal[string] ]= identifier[ret] identifier[data] [ literal[string] ]= keyword[True] identifier[self] . identifier[event] . identifier[fire_event] ( identifier[data] , identifier[salt] . identifier[utils] . identifier[event] . identifier[tagify] ([ identifier[jid] , literal[string] ], literal[string] )) keyword[return] { literal[string] : identifier[tag] , literal[string] : identifier[data] } keyword[except] identifier[Exception] keyword[as] identifier[exc] : identifier[log] . identifier[exception] ( literal[string] , identifier[fun] ) identifier[data] [ literal[string] ]= literal[string] . identifier[format] ( identifier[fun] , identifier[exc] . identifier[__class__] . identifier[__name__] , identifier[exc] , ) identifier[data] [ literal[string] ]= keyword[False] identifier[self] . identifier[event] . identifier[fire_event] ( identifier[data] , identifier[salt] . identifier[utils] . identifier[event] . identifier[tagify] ([ identifier[jid] , literal[string] ], literal[string] )) keyword[return] { literal[string] : identifier[tag] , literal[string] : identifier[data] }
def wheel(self, load): """ Send a master control function back to the wheel system """ # All wheel ops pass through eauth (auth_type, err_name, key) = self._prep_auth_info(load) # Authenticate auth_check = self.loadauth.check_authentication(load, auth_type, key=key, show_username=True) error = auth_check.get('error') if error: # Authentication error occurred: do not continue. return {'error': error} # depends on [control=['if'], data=[]] # Authorize username = auth_check.get('username') if auth_type != 'user': wheel_check = self.ckminions.wheel_check(auth_check.get('auth_list', []), load['fun'], load['kwarg']) if not wheel_check: return {'error': {'name': err_name, 'message': 'Authentication failure of type "{0}" occurred for user {1}.'.format(auth_type, username)}} # depends on [control=['if'], data=[]] elif isinstance(wheel_check, dict) and 'error' in wheel_check: # A dictionary with an error name/message was handled by ckminions.wheel_check return wheel_check # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['auth_type']] # Authenticated. Do the job. jid = salt.utils.jid.gen_jid(self.opts) fun = load.pop('fun') tag = salt.utils.event.tagify(jid, prefix='wheel') data = {'fun': 'wheel.{0}'.format(fun), 'jid': jid, 'tag': tag, 'user': username} try: self.event.fire_event(data, salt.utils.event.tagify([jid, 'new'], 'wheel')) ret = self.wheel_.call_func(fun, **load) data['return'] = ret data['success'] = True self.event.fire_event(data, salt.utils.event.tagify([jid, 'ret'], 'wheel')) return {'tag': tag, 'data': data} # depends on [control=['try'], data=[]] except Exception as exc: log.exception('Exception occurred while introspecting %s', fun) data['return'] = 'Exception occurred in wheel {0}: {1}: {2}'.format(fun, exc.__class__.__name__, exc) data['success'] = False self.event.fire_event(data, salt.utils.event.tagify([jid, 'ret'], 'wheel')) return {'tag': tag, 'data': data} # depends on [control=['except'], data=['exc']]
def static2dplot_timeaveraged(var, time): """ If the static_taverage option is set in tplot, and is supplied with a time range, then the spectrogram plot(s) for which it is set will have another window pop up, where the displayed y and z values are averaged by the number of seconds between the specified time range. """ # Grab names of data loaded in as tplot variables. names = list(pytplot.data_quants.keys()) # Get data we'll actually work with here. valid_variables = tplot_utilities.get_data(names) # Don't plot anything unless we have spectrograms with which to work. if valid_variables: # Get z label labels = tplot_utilities.get_labels_axis_types(names) # Put together data in easy-to-access format for plots. data = {} for name in valid_variables: bins = tplot_utilities.get_bins(name) time_values, z_values = tplot_utilities.get_z_t_values(name) data[name] = [bins, z_values, time_values] # Set up the 2D static plot pytplot.static_tavg_window = pg.GraphicsWindow() pytplot.static_tavg_window.resize(1000, 600) pytplot.static_tavg_window.setWindowTitle('Time-Averaged Values Static Window') plot = pytplot.static_tavg_window.addPlot(title='2D Static Plot for Time-Averaged Values', row=0, col=0) # Make it so that whenever this first starts up, you just have an empty plot plot_data = plot.plot([], []) if var in valid_variables: # Get min/max values of data's time range (in both datetime and seconds since epoch) t_min = np.nanmin(time_values) t_min_str = tplot_utilities.int_to_str(np.nanmin(time_values)) t_min_conv_back = tplot_utilities.str_to_int(t_min_str) t_max = np.nanmax(time_values) t_max_str = tplot_utilities.int_to_str(np.nanmax(time_values)) t_max_conv_back = tplot_utilities.str_to_int(t_max_str) # Convert user input to seconds since epoch user_time = [tplot_utilities.str_to_int(i) for i in time] # Covering situation where user entered a time not in the dataset! # As long as they used a time in the dataset, this will not trigger. for t, datetime in enumerate(user_time): if datetime not in range(t_min_conv_back, t_max_conv_back+1): while True: try: if t == 0: time_bound = 'left bound' else: time_bound = 'right bound' user_time[t] = tplot_utilities.str_to_int(input( 'Chosen {} time [{}] not in range of data [{} to {}]. Input new time (%Y-%m-%d %H:%M:%S).'.format( time_bound, tplot_utilities.int_to_str(datetime), t_min_str, t_max_str))) except ValueError: continue else: if user_time[t] not in range(int(t_min), int(t_max)): continue else: break # Get index of the time closest to the user's time choice time_array = np.array(data[var][2]) array = np.asarray(time_array) idx = [(np.abs(array - i)).argmin() for i in user_time] # Average values based on the chosen time range's indices time_diff = abs(idx[0]-idx[1]) # Make sure to account for edge problem if idx[1] != -1: y_values_slice = data[name][1][idx[0]:idx[1]+1] else: y_values_slice = data[name][1][idx[0]:] y_values_avgd = np.nansum(y_values_slice, axis=0)/np.float(time_diff) # If user indicated they wanted the interactive plot's axes to be logged, log 'em. # But first make sure that values in x and y are loggable! x_axis = False y_axis = False # Checking x axis if np.nanmin(data[name][0][:]) < 0: print('Negative data is incompatible with log plotting.') elif np.nanmin(data[name][0][:]) >= 0 and labels[name][2] == 'log': x_axis = True # Checking y axis if np.nanmin(list(data[name][1][idx[0]])) < 0 or np.nanmin(list(data[name][1][idx[1]])) < 0: print('Negative data is incompatible with log plotting') elif np.nanmin(list(data[name][1][idx[0]])) >= 0 and np.nanmin(list(data[name][1][idx[1]])) >= 0 and \ labels[name][3] == 'log': y_axis = True # Set plot labels plot.setLabel('bottom', '{}'.format(labels[name][0])) plot.setLabel('left', '{}'.format(labels[name][1])) plot.setLogMode(x=x_axis, y=y_axis) # Update x and y range if user modified it tplot_utilities.set_x_range(name, x_axis, plot) tplot_utilities.set_y_range(name, y_axis, plot) # Plot data based on time we're hovering over plot_data.setData(data[var][0][:], y_values_avgd)
def function[static2dplot_timeaveraged, parameter[var, time]]: constant[ If the static_taverage option is set in tplot, and is supplied with a time range, then the spectrogram plot(s) for which it is set will have another window pop up, where the displayed y and z values are averaged by the number of seconds between the specified time range. ] variable[names] assign[=] call[name[list], parameter[call[name[pytplot].data_quants.keys, parameter[]]]] variable[valid_variables] assign[=] call[name[tplot_utilities].get_data, parameter[name[names]]] if name[valid_variables] begin[:] variable[labels] assign[=] call[name[tplot_utilities].get_labels_axis_types, parameter[name[names]]] variable[data] assign[=] dictionary[[], []] for taget[name[name]] in starred[name[valid_variables]] begin[:] variable[bins] assign[=] call[name[tplot_utilities].get_bins, parameter[name[name]]] <ast.Tuple object at 0x7da1b06528c0> assign[=] call[name[tplot_utilities].get_z_t_values, parameter[name[name]]] call[name[data]][name[name]] assign[=] list[[<ast.Name object at 0x7da1b0653940>, <ast.Name object at 0x7da1b0651e10>, <ast.Name object at 0x7da1b0653670>]] name[pytplot].static_tavg_window assign[=] call[name[pg].GraphicsWindow, parameter[]] call[name[pytplot].static_tavg_window.resize, parameter[constant[1000], constant[600]]] call[name[pytplot].static_tavg_window.setWindowTitle, parameter[constant[Time-Averaged Values Static Window]]] variable[plot] assign[=] call[name[pytplot].static_tavg_window.addPlot, parameter[]] variable[plot_data] assign[=] call[name[plot].plot, parameter[list[[]], list[[]]]] if compare[name[var] in name[valid_variables]] begin[:] variable[t_min] assign[=] call[name[np].nanmin, parameter[name[time_values]]] variable[t_min_str] assign[=] call[name[tplot_utilities].int_to_str, parameter[call[name[np].nanmin, parameter[name[time_values]]]]] variable[t_min_conv_back] assign[=] call[name[tplot_utilities].str_to_int, parameter[name[t_min_str]]] variable[t_max] assign[=] call[name[np].nanmax, parameter[name[time_values]]] variable[t_max_str] assign[=] call[name[tplot_utilities].int_to_str, parameter[call[name[np].nanmax, parameter[name[time_values]]]]] variable[t_max_conv_back] assign[=] call[name[tplot_utilities].str_to_int, parameter[name[t_max_str]]] variable[user_time] assign[=] <ast.ListComp object at 0x7da18bccb070> for taget[tuple[[<ast.Name object at 0x7da18bccafe0>, <ast.Name object at 0x7da18bcca740>]]] in starred[call[name[enumerate], parameter[name[user_time]]]] begin[:] if compare[name[datetime] <ast.NotIn object at 0x7da2590d7190> call[name[range], parameter[name[t_min_conv_back], binary_operation[name[t_max_conv_back] + constant[1]]]]] begin[:] while constant[True] begin[:] <ast.Try object at 0x7da18bcc8100> variable[time_array] assign[=] call[name[np].array, parameter[call[call[name[data]][name[var]]][constant[2]]]] variable[array] assign[=] call[name[np].asarray, parameter[name[time_array]]] variable[idx] assign[=] <ast.ListComp object at 0x7da1b05060b0> variable[time_diff] assign[=] call[name[abs], parameter[binary_operation[call[name[idx]][constant[0]] - call[name[idx]][constant[1]]]]] if compare[call[name[idx]][constant[1]] not_equal[!=] <ast.UnaryOp object at 0x7da1b0505150>] begin[:] variable[y_values_slice] assign[=] call[call[call[name[data]][name[name]]][constant[1]]][<ast.Slice object at 0x7da1b0504eb0>] variable[y_values_avgd] assign[=] binary_operation[call[name[np].nansum, parameter[name[y_values_slice]]] / call[name[np].float, parameter[name[time_diff]]]] variable[x_axis] assign[=] constant[False] variable[y_axis] assign[=] constant[False] if compare[call[name[np].nanmin, parameter[call[call[call[name[data]][name[name]]][constant[0]]][<ast.Slice object at 0x7da1b0507430>]]] less[<] constant[0]] begin[:] call[name[print], parameter[constant[Negative data is incompatible with log plotting.]]] if <ast.BoolOp object at 0x7da1b06ff250> begin[:] call[name[print], parameter[constant[Negative data is incompatible with log plotting]]] call[name[plot].setLabel, parameter[constant[bottom], call[constant[{}].format, parameter[call[call[name[labels]][name[name]]][constant[0]]]]]] call[name[plot].setLabel, parameter[constant[left], call[constant[{}].format, parameter[call[call[name[labels]][name[name]]][constant[1]]]]]] call[name[plot].setLogMode, parameter[]] call[name[tplot_utilities].set_x_range, parameter[name[name], name[x_axis], name[plot]]] call[name[tplot_utilities].set_y_range, parameter[name[name], name[y_axis], name[plot]]] call[name[plot_data].setData, parameter[call[call[call[name[data]][name[var]]][constant[0]]][<ast.Slice object at 0x7da1b06907c0>], name[y_values_avgd]]]
keyword[def] identifier[static2dplot_timeaveraged] ( identifier[var] , identifier[time] ): literal[string] identifier[names] = identifier[list] ( identifier[pytplot] . identifier[data_quants] . identifier[keys] ()) identifier[valid_variables] = identifier[tplot_utilities] . identifier[get_data] ( identifier[names] ) keyword[if] identifier[valid_variables] : identifier[labels] = identifier[tplot_utilities] . identifier[get_labels_axis_types] ( identifier[names] ) identifier[data] ={} keyword[for] identifier[name] keyword[in] identifier[valid_variables] : identifier[bins] = identifier[tplot_utilities] . identifier[get_bins] ( identifier[name] ) identifier[time_values] , identifier[z_values] = identifier[tplot_utilities] . identifier[get_z_t_values] ( identifier[name] ) identifier[data] [ identifier[name] ]=[ identifier[bins] , identifier[z_values] , identifier[time_values] ] identifier[pytplot] . identifier[static_tavg_window] = identifier[pg] . identifier[GraphicsWindow] () identifier[pytplot] . identifier[static_tavg_window] . identifier[resize] ( literal[int] , literal[int] ) identifier[pytplot] . identifier[static_tavg_window] . identifier[setWindowTitle] ( literal[string] ) identifier[plot] = identifier[pytplot] . identifier[static_tavg_window] . identifier[addPlot] ( identifier[title] = literal[string] , identifier[row] = literal[int] , identifier[col] = literal[int] ) identifier[plot_data] = identifier[plot] . identifier[plot] ([],[]) keyword[if] identifier[var] keyword[in] identifier[valid_variables] : identifier[t_min] = identifier[np] . identifier[nanmin] ( identifier[time_values] ) identifier[t_min_str] = identifier[tplot_utilities] . identifier[int_to_str] ( identifier[np] . identifier[nanmin] ( identifier[time_values] )) identifier[t_min_conv_back] = identifier[tplot_utilities] . identifier[str_to_int] ( identifier[t_min_str] ) identifier[t_max] = identifier[np] . identifier[nanmax] ( identifier[time_values] ) identifier[t_max_str] = identifier[tplot_utilities] . identifier[int_to_str] ( identifier[np] . identifier[nanmax] ( identifier[time_values] )) identifier[t_max_conv_back] = identifier[tplot_utilities] . identifier[str_to_int] ( identifier[t_max_str] ) identifier[user_time] =[ identifier[tplot_utilities] . identifier[str_to_int] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[time] ] keyword[for] identifier[t] , identifier[datetime] keyword[in] identifier[enumerate] ( identifier[user_time] ): keyword[if] identifier[datetime] keyword[not] keyword[in] identifier[range] ( identifier[t_min_conv_back] , identifier[t_max_conv_back] + literal[int] ): keyword[while] keyword[True] : keyword[try] : keyword[if] identifier[t] == literal[int] : identifier[time_bound] = literal[string] keyword[else] : identifier[time_bound] = literal[string] identifier[user_time] [ identifier[t] ]= identifier[tplot_utilities] . identifier[str_to_int] ( identifier[input] ( literal[string] . identifier[format] ( identifier[time_bound] , identifier[tplot_utilities] . identifier[int_to_str] ( identifier[datetime] ), identifier[t_min_str] , identifier[t_max_str] ))) keyword[except] identifier[ValueError] : keyword[continue] keyword[else] : keyword[if] identifier[user_time] [ identifier[t] ] keyword[not] keyword[in] identifier[range] ( identifier[int] ( identifier[t_min] ), identifier[int] ( identifier[t_max] )): keyword[continue] keyword[else] : keyword[break] identifier[time_array] = identifier[np] . identifier[array] ( identifier[data] [ identifier[var] ][ literal[int] ]) identifier[array] = identifier[np] . identifier[asarray] ( identifier[time_array] ) identifier[idx] =[( identifier[np] . identifier[abs] ( identifier[array] - identifier[i] )). identifier[argmin] () keyword[for] identifier[i] keyword[in] identifier[user_time] ] identifier[time_diff] = identifier[abs] ( identifier[idx] [ literal[int] ]- identifier[idx] [ literal[int] ]) keyword[if] identifier[idx] [ literal[int] ]!=- literal[int] : identifier[y_values_slice] = identifier[data] [ identifier[name] ][ literal[int] ][ identifier[idx] [ literal[int] ]: identifier[idx] [ literal[int] ]+ literal[int] ] keyword[else] : identifier[y_values_slice] = identifier[data] [ identifier[name] ][ literal[int] ][ identifier[idx] [ literal[int] ]:] identifier[y_values_avgd] = identifier[np] . identifier[nansum] ( identifier[y_values_slice] , identifier[axis] = literal[int] )/ identifier[np] . identifier[float] ( identifier[time_diff] ) identifier[x_axis] = keyword[False] identifier[y_axis] = keyword[False] keyword[if] identifier[np] . identifier[nanmin] ( identifier[data] [ identifier[name] ][ literal[int] ][:])< literal[int] : identifier[print] ( literal[string] ) keyword[elif] identifier[np] . identifier[nanmin] ( identifier[data] [ identifier[name] ][ literal[int] ][:])>= literal[int] keyword[and] identifier[labels] [ identifier[name] ][ literal[int] ]== literal[string] : identifier[x_axis] = keyword[True] keyword[if] identifier[np] . identifier[nanmin] ( identifier[list] ( identifier[data] [ identifier[name] ][ literal[int] ][ identifier[idx] [ literal[int] ]]))< literal[int] keyword[or] identifier[np] . identifier[nanmin] ( identifier[list] ( identifier[data] [ identifier[name] ][ literal[int] ][ identifier[idx] [ literal[int] ]]))< literal[int] : identifier[print] ( literal[string] ) keyword[elif] identifier[np] . identifier[nanmin] ( identifier[list] ( identifier[data] [ identifier[name] ][ literal[int] ][ identifier[idx] [ literal[int] ]]))>= literal[int] keyword[and] identifier[np] . identifier[nanmin] ( identifier[list] ( identifier[data] [ identifier[name] ][ literal[int] ][ identifier[idx] [ literal[int] ]]))>= literal[int] keyword[and] identifier[labels] [ identifier[name] ][ literal[int] ]== literal[string] : identifier[y_axis] = keyword[True] identifier[plot] . identifier[setLabel] ( literal[string] , literal[string] . identifier[format] ( identifier[labels] [ identifier[name] ][ literal[int] ])) identifier[plot] . identifier[setLabel] ( literal[string] , literal[string] . identifier[format] ( identifier[labels] [ identifier[name] ][ literal[int] ])) identifier[plot] . identifier[setLogMode] ( identifier[x] = identifier[x_axis] , identifier[y] = identifier[y_axis] ) identifier[tplot_utilities] . identifier[set_x_range] ( identifier[name] , identifier[x_axis] , identifier[plot] ) identifier[tplot_utilities] . identifier[set_y_range] ( identifier[name] , identifier[y_axis] , identifier[plot] ) identifier[plot_data] . identifier[setData] ( identifier[data] [ identifier[var] ][ literal[int] ][:], identifier[y_values_avgd] )
def static2dplot_timeaveraged(var, time): """ If the static_taverage option is set in tplot, and is supplied with a time range, then the spectrogram plot(s) for which it is set will have another window pop up, where the displayed y and z values are averaged by the number of seconds between the specified time range. """ # Grab names of data loaded in as tplot variables. names = list(pytplot.data_quants.keys()) # Get data we'll actually work with here. valid_variables = tplot_utilities.get_data(names) # Don't plot anything unless we have spectrograms with which to work. if valid_variables: # Get z label labels = tplot_utilities.get_labels_axis_types(names) # Put together data in easy-to-access format for plots. data = {} for name in valid_variables: bins = tplot_utilities.get_bins(name) (time_values, z_values) = tplot_utilities.get_z_t_values(name) data[name] = [bins, z_values, time_values] # depends on [control=['for'], data=['name']] # Set up the 2D static plot pytplot.static_tavg_window = pg.GraphicsWindow() pytplot.static_tavg_window.resize(1000, 600) pytplot.static_tavg_window.setWindowTitle('Time-Averaged Values Static Window') plot = pytplot.static_tavg_window.addPlot(title='2D Static Plot for Time-Averaged Values', row=0, col=0) # Make it so that whenever this first starts up, you just have an empty plot plot_data = plot.plot([], []) if var in valid_variables: # Get min/max values of data's time range (in both datetime and seconds since epoch) t_min = np.nanmin(time_values) t_min_str = tplot_utilities.int_to_str(np.nanmin(time_values)) t_min_conv_back = tplot_utilities.str_to_int(t_min_str) t_max = np.nanmax(time_values) t_max_str = tplot_utilities.int_to_str(np.nanmax(time_values)) t_max_conv_back = tplot_utilities.str_to_int(t_max_str) # Convert user input to seconds since epoch user_time = [tplot_utilities.str_to_int(i) for i in time] # Covering situation where user entered a time not in the dataset! # As long as they used a time in the dataset, this will not trigger. for (t, datetime) in enumerate(user_time): if datetime not in range(t_min_conv_back, t_max_conv_back + 1): while True: try: if t == 0: time_bound = 'left bound' # depends on [control=['if'], data=[]] else: time_bound = 'right bound' user_time[t] = tplot_utilities.str_to_int(input('Chosen {} time [{}] not in range of data [{} to {}]. Input new time (%Y-%m-%d %H:%M:%S).'.format(time_bound, tplot_utilities.int_to_str(datetime), t_min_str, t_max_str))) # depends on [control=['try'], data=[]] except ValueError: continue # depends on [control=['except'], data=[]] else: if user_time[t] not in range(int(t_min), int(t_max)): continue # depends on [control=['if'], data=[]] else: break # depends on [control=['while'], data=[]] # depends on [control=['if'], data=['datetime']] # depends on [control=['for'], data=[]] # Get index of the time closest to the user's time choice time_array = np.array(data[var][2]) array = np.asarray(time_array) idx = [np.abs(array - i).argmin() for i in user_time] # Average values based on the chosen time range's indices time_diff = abs(idx[0] - idx[1]) # Make sure to account for edge problem if idx[1] != -1: y_values_slice = data[name][1][idx[0]:idx[1] + 1] # depends on [control=['if'], data=[]] else: y_values_slice = data[name][1][idx[0]:] y_values_avgd = np.nansum(y_values_slice, axis=0) / np.float(time_diff) # If user indicated they wanted the interactive plot's axes to be logged, log 'em. # But first make sure that values in x and y are loggable! x_axis = False y_axis = False # Checking x axis if np.nanmin(data[name][0][:]) < 0: print('Negative data is incompatible with log plotting.') # depends on [control=['if'], data=[]] elif np.nanmin(data[name][0][:]) >= 0 and labels[name][2] == 'log': x_axis = True # depends on [control=['if'], data=[]] # Checking y axis if np.nanmin(list(data[name][1][idx[0]])) < 0 or np.nanmin(list(data[name][1][idx[1]])) < 0: print('Negative data is incompatible with log plotting') # depends on [control=['if'], data=[]] elif np.nanmin(list(data[name][1][idx[0]])) >= 0 and np.nanmin(list(data[name][1][idx[1]])) >= 0 and (labels[name][3] == 'log'): y_axis = True # depends on [control=['if'], data=[]] # Set plot labels plot.setLabel('bottom', '{}'.format(labels[name][0])) plot.setLabel('left', '{}'.format(labels[name][1])) plot.setLogMode(x=x_axis, y=y_axis) # Update x and y range if user modified it tplot_utilities.set_x_range(name, x_axis, plot) tplot_utilities.set_y_range(name, y_axis, plot) # Plot data based on time we're hovering over plot_data.setData(data[var][0][:], y_values_avgd) # depends on [control=['if'], data=['var']] # depends on [control=['if'], data=[]]
def _sanitize_url_components(comp_list, field): ''' Recursive function to sanitize each component of the url. ''' if not comp_list: return '' elif comp_list[0].startswith('{0}='.format(field)): ret = '{0}=XXXXXXXXXX&'.format(field) comp_list.remove(comp_list[0]) return ret + _sanitize_url_components(comp_list, field) else: ret = '{0}&'.format(comp_list[0]) comp_list.remove(comp_list[0]) return ret + _sanitize_url_components(comp_list, field)
def function[_sanitize_url_components, parameter[comp_list, field]]: constant[ Recursive function to sanitize each component of the url. ] if <ast.UnaryOp object at 0x7da18f00c880> begin[:] return[constant[]]
keyword[def] identifier[_sanitize_url_components] ( identifier[comp_list] , identifier[field] ): literal[string] keyword[if] keyword[not] identifier[comp_list] : keyword[return] literal[string] keyword[elif] identifier[comp_list] [ literal[int] ]. identifier[startswith] ( literal[string] . identifier[format] ( identifier[field] )): identifier[ret] = literal[string] . identifier[format] ( identifier[field] ) identifier[comp_list] . identifier[remove] ( identifier[comp_list] [ literal[int] ]) keyword[return] identifier[ret] + identifier[_sanitize_url_components] ( identifier[comp_list] , identifier[field] ) keyword[else] : identifier[ret] = literal[string] . identifier[format] ( identifier[comp_list] [ literal[int] ]) identifier[comp_list] . identifier[remove] ( identifier[comp_list] [ literal[int] ]) keyword[return] identifier[ret] + identifier[_sanitize_url_components] ( identifier[comp_list] , identifier[field] )
def _sanitize_url_components(comp_list, field): """ Recursive function to sanitize each component of the url. """ if not comp_list: return '' # depends on [control=['if'], data=[]] elif comp_list[0].startswith('{0}='.format(field)): ret = '{0}=XXXXXXXXXX&'.format(field) comp_list.remove(comp_list[0]) return ret + _sanitize_url_components(comp_list, field) # depends on [control=['if'], data=[]] else: ret = '{0}&'.format(comp_list[0]) comp_list.remove(comp_list[0]) return ret + _sanitize_url_components(comp_list, field)
def add_sources_from_roi(self, names, roi, free=False, **kwargs): """Add multiple sources to the current ROI model copied from another ROI model. Parameters ---------- names : list List of str source names to add. roi : `~fermipy.roi_model.ROIModel` object The roi model from which to add sources. free : bool Initialize the source with a free normalization paramter. """ for name in names: self.add_source(name, roi[name].data, free=free, **kwargs)
def function[add_sources_from_roi, parameter[self, names, roi, free]]: constant[Add multiple sources to the current ROI model copied from another ROI model. Parameters ---------- names : list List of str source names to add. roi : `~fermipy.roi_model.ROIModel` object The roi model from which to add sources. free : bool Initialize the source with a free normalization paramter. ] for taget[name[name]] in starred[name[names]] begin[:] call[name[self].add_source, parameter[name[name], call[name[roi]][name[name]].data]]
keyword[def] identifier[add_sources_from_roi] ( identifier[self] , identifier[names] , identifier[roi] , identifier[free] = keyword[False] ,** identifier[kwargs] ): literal[string] keyword[for] identifier[name] keyword[in] identifier[names] : identifier[self] . identifier[add_source] ( identifier[name] , identifier[roi] [ identifier[name] ]. identifier[data] , identifier[free] = identifier[free] ,** identifier[kwargs] )
def add_sources_from_roi(self, names, roi, free=False, **kwargs): """Add multiple sources to the current ROI model copied from another ROI model. Parameters ---------- names : list List of str source names to add. roi : `~fermipy.roi_model.ROIModel` object The roi model from which to add sources. free : bool Initialize the source with a free normalization paramter. """ for name in names: self.add_source(name, roi[name].data, free=free, **kwargs) # depends on [control=['for'], data=['name']]
def deploy(target): """Deploys the package and documentation. Proceeds in the following steps: 1. Ensures proper environment variables are set and checks that we are on Circle CI 2. Tags the repository with the new version 3. Creates a standard distribution and a wheel 4. Updates version.py to have the proper version 5. Commits the ChangeLog, AUTHORS, and version.py file 6. Pushes to PyPI 7. Pushes the tags and newly committed files Raises: `EnvironmentError`: - Not running on CircleCI - `*_PYPI_USERNAME` and/or `*_PYPI_PASSWORD` environment variables are missing - Attempting to deploy to production from a branch that isn't master """ # Ensure proper environment if not os.getenv(CIRCLECI_ENV_VAR): # pragma: no cover raise EnvironmentError('Must be on CircleCI to run this script') current_branch = os.getenv('CIRCLE_BRANCH') if (target == 'PROD') and (current_branch != 'master'): raise EnvironmentError(( 'Refusing to deploy to production from branch {current_branch!r}. ' 'Production deploys can only be made from master.' ).format(current_branch=current_branch)) if target in ('PROD', 'TEST'): pypi_username = os.getenv('{target}_PYPI_USERNAME'.format(target=target)) pypi_password = os.getenv('{target}_PYPI_PASSWORD'.format(target=target)) else: raise ValueError( "Deploy target must be 'PROD' or 'TEST', got {target!r}.".format(target=target)) if not (pypi_username and pypi_password): # pragma: no cover raise EnvironmentError(( "Missing '{target}_PYPI_USERNAME' and/or '{target}_PYPI_PASSWORD' " "environment variables. These are required to push to PyPI." ).format(target=target)) # Twine requires these environment variables to be set. Subprocesses will # inherit these when we invoke them, so no need to pass them on the command # line. We want to avoid that in case something's logging each command run. os.environ['TWINE_USERNAME'] = pypi_username os.environ['TWINE_PASSWORD'] = pypi_password # Set up git on circle to push to the current branch _shell('git config --global user.email "oss@cloverhealth.com"') _shell('git config --global user.name "Circle CI"') _shell('git config push.default current') # Obtain the version to deploy ret = _shell('make version', stdout=subprocess.PIPE) version = ret.stdout.decode('utf-8').strip() print('Deploying version {version!r}...'.format(version=version)) # Tag the version _shell('git tag -f -a {version} -m "Version {version}"'.format(version=version)) # Update the version _shell( 'sed -i.bak "s/^__version__ = .*/__version__ = {version!r}/" */version.py'.format( version=version)) # Create a standard distribution and a wheel _shell('python setup.py sdist bdist_wheel') # Add the updated ChangeLog and AUTHORS _shell('git add ChangeLog AUTHORS */version.py') # Start the commit message with "Merge" so that PBR will ignore it in the # ChangeLog. Use [skip ci] to ensure CircleCI doesn't recursively deploy. _shell('git commit --no-verify -m "Merge autogenerated files [skip ci]"') # Push the distributions to PyPI. _pypi_push('dist') # Push the tag and AUTHORS / ChangeLog after successful PyPI deploy _shell('git push --follow-tags') print('Deployment complete. Latest version is {version}.'.format(version=version))
def function[deploy, parameter[target]]: constant[Deploys the package and documentation. Proceeds in the following steps: 1. Ensures proper environment variables are set and checks that we are on Circle CI 2. Tags the repository with the new version 3. Creates a standard distribution and a wheel 4. Updates version.py to have the proper version 5. Commits the ChangeLog, AUTHORS, and version.py file 6. Pushes to PyPI 7. Pushes the tags and newly committed files Raises: `EnvironmentError`: - Not running on CircleCI - `*_PYPI_USERNAME` and/or `*_PYPI_PASSWORD` environment variables are missing - Attempting to deploy to production from a branch that isn't master ] if <ast.UnaryOp object at 0x7da204962920> begin[:] <ast.Raise object at 0x7da204961de0> variable[current_branch] assign[=] call[name[os].getenv, parameter[constant[CIRCLE_BRANCH]]] if <ast.BoolOp object at 0x7da1b26acee0> begin[:] <ast.Raise object at 0x7da1b26af580> if compare[name[target] in tuple[[<ast.Constant object at 0x7da1b26af850>, <ast.Constant object at 0x7da1b26ac910>]]] begin[:] variable[pypi_username] assign[=] call[name[os].getenv, parameter[call[constant[{target}_PYPI_USERNAME].format, parameter[]]]] variable[pypi_password] assign[=] call[name[os].getenv, parameter[call[constant[{target}_PYPI_PASSWORD].format, parameter[]]]] if <ast.UnaryOp object at 0x7da1b26af5e0> begin[:] <ast.Raise object at 0x7da1b26ad960> call[name[os].environ][constant[TWINE_USERNAME]] assign[=] name[pypi_username] call[name[os].environ][constant[TWINE_PASSWORD]] assign[=] name[pypi_password] call[name[_shell], parameter[constant[git config --global user.email "oss@cloverhealth.com"]]] call[name[_shell], parameter[constant[git config --global user.name "Circle CI"]]] call[name[_shell], parameter[constant[git config push.default current]]] variable[ret] assign[=] call[name[_shell], parameter[constant[make version]]] variable[version] assign[=] call[call[name[ret].stdout.decode, parameter[constant[utf-8]]].strip, parameter[]] call[name[print], parameter[call[constant[Deploying version {version!r}...].format, parameter[]]]] call[name[_shell], parameter[call[constant[git tag -f -a {version} -m "Version {version}"].format, parameter[]]]] call[name[_shell], parameter[call[constant[sed -i.bak "s/^__version__ = .*/__version__ = {version!r}/" */version.py].format, parameter[]]]] call[name[_shell], parameter[constant[python setup.py sdist bdist_wheel]]] call[name[_shell], parameter[constant[git add ChangeLog AUTHORS */version.py]]] call[name[_shell], parameter[constant[git commit --no-verify -m "Merge autogenerated files [skip ci]"]]] call[name[_pypi_push], parameter[constant[dist]]] call[name[_shell], parameter[constant[git push --follow-tags]]] call[name[print], parameter[call[constant[Deployment complete. Latest version is {version}.].format, parameter[]]]]
keyword[def] identifier[deploy] ( identifier[target] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[getenv] ( identifier[CIRCLECI_ENV_VAR] ): keyword[raise] identifier[EnvironmentError] ( literal[string] ) identifier[current_branch] = identifier[os] . identifier[getenv] ( literal[string] ) keyword[if] ( identifier[target] == literal[string] ) keyword[and] ( identifier[current_branch] != literal[string] ): keyword[raise] identifier[EnvironmentError] (( literal[string] literal[string] ). identifier[format] ( identifier[current_branch] = identifier[current_branch] )) keyword[if] identifier[target] keyword[in] ( literal[string] , literal[string] ): identifier[pypi_username] = identifier[os] . identifier[getenv] ( literal[string] . identifier[format] ( identifier[target] = identifier[target] )) identifier[pypi_password] = identifier[os] . identifier[getenv] ( literal[string] . identifier[format] ( identifier[target] = identifier[target] )) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[target] = identifier[target] )) keyword[if] keyword[not] ( identifier[pypi_username] keyword[and] identifier[pypi_password] ): keyword[raise] identifier[EnvironmentError] (( literal[string] literal[string] ). identifier[format] ( identifier[target] = identifier[target] )) identifier[os] . identifier[environ] [ literal[string] ]= identifier[pypi_username] identifier[os] . identifier[environ] [ literal[string] ]= identifier[pypi_password] identifier[_shell] ( literal[string] ) identifier[_shell] ( literal[string] ) identifier[_shell] ( literal[string] ) identifier[ret] = identifier[_shell] ( literal[string] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] ) identifier[version] = identifier[ret] . identifier[stdout] . identifier[decode] ( literal[string] ). identifier[strip] () identifier[print] ( literal[string] . identifier[format] ( identifier[version] = identifier[version] )) identifier[_shell] ( literal[string] . identifier[format] ( identifier[version] = identifier[version] )) identifier[_shell] ( literal[string] . identifier[format] ( identifier[version] = identifier[version] )) identifier[_shell] ( literal[string] ) identifier[_shell] ( literal[string] ) identifier[_shell] ( literal[string] ) identifier[_pypi_push] ( literal[string] ) identifier[_shell] ( literal[string] ) identifier[print] ( literal[string] . identifier[format] ( identifier[version] = identifier[version] ))
def deploy(target): """Deploys the package and documentation. Proceeds in the following steps: 1. Ensures proper environment variables are set and checks that we are on Circle CI 2. Tags the repository with the new version 3. Creates a standard distribution and a wheel 4. Updates version.py to have the proper version 5. Commits the ChangeLog, AUTHORS, and version.py file 6. Pushes to PyPI 7. Pushes the tags and newly committed files Raises: `EnvironmentError`: - Not running on CircleCI - `*_PYPI_USERNAME` and/or `*_PYPI_PASSWORD` environment variables are missing - Attempting to deploy to production from a branch that isn't master """ # Ensure proper environment if not os.getenv(CIRCLECI_ENV_VAR): # pragma: no cover raise EnvironmentError('Must be on CircleCI to run this script') # depends on [control=['if'], data=[]] current_branch = os.getenv('CIRCLE_BRANCH') if target == 'PROD' and current_branch != 'master': raise EnvironmentError('Refusing to deploy to production from branch {current_branch!r}. Production deploys can only be made from master.'.format(current_branch=current_branch)) # depends on [control=['if'], data=[]] if target in ('PROD', 'TEST'): pypi_username = os.getenv('{target}_PYPI_USERNAME'.format(target=target)) pypi_password = os.getenv('{target}_PYPI_PASSWORD'.format(target=target)) # depends on [control=['if'], data=['target']] else: raise ValueError("Deploy target must be 'PROD' or 'TEST', got {target!r}.".format(target=target)) if not (pypi_username and pypi_password): # pragma: no cover raise EnvironmentError("Missing '{target}_PYPI_USERNAME' and/or '{target}_PYPI_PASSWORD' environment variables. These are required to push to PyPI.".format(target=target)) # depends on [control=['if'], data=[]] # Twine requires these environment variables to be set. Subprocesses will # inherit these when we invoke them, so no need to pass them on the command # line. We want to avoid that in case something's logging each command run. os.environ['TWINE_USERNAME'] = pypi_username os.environ['TWINE_PASSWORD'] = pypi_password # Set up git on circle to push to the current branch _shell('git config --global user.email "oss@cloverhealth.com"') _shell('git config --global user.name "Circle CI"') _shell('git config push.default current') # Obtain the version to deploy ret = _shell('make version', stdout=subprocess.PIPE) version = ret.stdout.decode('utf-8').strip() print('Deploying version {version!r}...'.format(version=version)) # Tag the version _shell('git tag -f -a {version} -m "Version {version}"'.format(version=version)) # Update the version _shell('sed -i.bak "s/^__version__ = .*/__version__ = {version!r}/" */version.py'.format(version=version)) # Create a standard distribution and a wheel _shell('python setup.py sdist bdist_wheel') # Add the updated ChangeLog and AUTHORS _shell('git add ChangeLog AUTHORS */version.py') # Start the commit message with "Merge" so that PBR will ignore it in the # ChangeLog. Use [skip ci] to ensure CircleCI doesn't recursively deploy. _shell('git commit --no-verify -m "Merge autogenerated files [skip ci]"') # Push the distributions to PyPI. _pypi_push('dist') # Push the tag and AUTHORS / ChangeLog after successful PyPI deploy _shell('git push --follow-tags') print('Deployment complete. Latest version is {version}.'.format(version=version))
def register_helper(self, func): """ A helper is a task that is not directly exposed to the command line :param func: registers func as a helper :return: invalid accessor """ self._helper_names.add(func.__name__) return self.register(func)
def function[register_helper, parameter[self, func]]: constant[ A helper is a task that is not directly exposed to the command line :param func: registers func as a helper :return: invalid accessor ] call[name[self]._helper_names.add, parameter[name[func].__name__]] return[call[name[self].register, parameter[name[func]]]]
keyword[def] identifier[register_helper] ( identifier[self] , identifier[func] ): literal[string] identifier[self] . identifier[_helper_names] . identifier[add] ( identifier[func] . identifier[__name__] ) keyword[return] identifier[self] . identifier[register] ( identifier[func] )
def register_helper(self, func): """ A helper is a task that is not directly exposed to the command line :param func: registers func as a helper :return: invalid accessor """ self._helper_names.add(func.__name__) return self.register(func)
def update_cursor_position(self, line, index): """Update cursor position.""" value = 'Line {}, Col {}'.format(line + 1, index + 1) self.set_value(value)
def function[update_cursor_position, parameter[self, line, index]]: constant[Update cursor position.] variable[value] assign[=] call[constant[Line {}, Col {}].format, parameter[binary_operation[name[line] + constant[1]], binary_operation[name[index] + constant[1]]]] call[name[self].set_value, parameter[name[value]]]
keyword[def] identifier[update_cursor_position] ( identifier[self] , identifier[line] , identifier[index] ): literal[string] identifier[value] = literal[string] . identifier[format] ( identifier[line] + literal[int] , identifier[index] + literal[int] ) identifier[self] . identifier[set_value] ( identifier[value] )
def update_cursor_position(self, line, index): """Update cursor position.""" value = 'Line {}, Col {}'.format(line + 1, index + 1) self.set_value(value)
def save_load(jid, load, minions=None): ''' Save the load to the specified jid ''' log.debug('sdstack_etcd returner <save_load> called jid: %s', jid) write_profile = __opts__.get('etcd.returner_write_profile') client, path = _get_conn(__opts__, write_profile) if write_profile: ttl = __opts__.get(write_profile, {}).get('etcd.ttl') else: ttl = __opts__.get('etcd.ttl') client.set( '/'.join((path, 'jobs', jid, '.load.p')), salt.utils.json.dumps(load), ttl=ttl, )
def function[save_load, parameter[jid, load, minions]]: constant[ Save the load to the specified jid ] call[name[log].debug, parameter[constant[sdstack_etcd returner <save_load> called jid: %s], name[jid]]] variable[write_profile] assign[=] call[name[__opts__].get, parameter[constant[etcd.returner_write_profile]]] <ast.Tuple object at 0x7da1b1c227a0> assign[=] call[name[_get_conn], parameter[name[__opts__], name[write_profile]]] if name[write_profile] begin[:] variable[ttl] assign[=] call[call[name[__opts__].get, parameter[name[write_profile], dictionary[[], []]]].get, parameter[constant[etcd.ttl]]] call[name[client].set, parameter[call[constant[/].join, parameter[tuple[[<ast.Name object at 0x7da1b1c23b50>, <ast.Constant object at 0x7da1b1c21660>, <ast.Name object at 0x7da1b1c228c0>, <ast.Constant object at 0x7da1b1c22b00>]]]], call[name[salt].utils.json.dumps, parameter[name[load]]]]]
keyword[def] identifier[save_load] ( identifier[jid] , identifier[load] , identifier[minions] = keyword[None] ): literal[string] identifier[log] . identifier[debug] ( literal[string] , identifier[jid] ) identifier[write_profile] = identifier[__opts__] . identifier[get] ( literal[string] ) identifier[client] , identifier[path] = identifier[_get_conn] ( identifier[__opts__] , identifier[write_profile] ) keyword[if] identifier[write_profile] : identifier[ttl] = identifier[__opts__] . identifier[get] ( identifier[write_profile] ,{}). identifier[get] ( literal[string] ) keyword[else] : identifier[ttl] = identifier[__opts__] . identifier[get] ( literal[string] ) identifier[client] . identifier[set] ( literal[string] . identifier[join] (( identifier[path] , literal[string] , identifier[jid] , literal[string] )), identifier[salt] . identifier[utils] . identifier[json] . identifier[dumps] ( identifier[load] ), identifier[ttl] = identifier[ttl] , )
def save_load(jid, load, minions=None): """ Save the load to the specified jid """ log.debug('sdstack_etcd returner <save_load> called jid: %s', jid) write_profile = __opts__.get('etcd.returner_write_profile') (client, path) = _get_conn(__opts__, write_profile) if write_profile: ttl = __opts__.get(write_profile, {}).get('etcd.ttl') # depends on [control=['if'], data=[]] else: ttl = __opts__.get('etcd.ttl') client.set('/'.join((path, 'jobs', jid, '.load.p')), salt.utils.json.dumps(load), ttl=ttl)
def voxelized(self, pitch, **kwargs): """ Return a Voxel object representing the current mesh discretized into voxels at the specified pitch Parameters ---------- pitch : float The edge length of a single voxel Returns ---------- voxelized : Voxel object Representing the current mesh """ voxelized = voxel.VoxelMesh(self, pitch=pitch, **kwargs) return voxelized
def function[voxelized, parameter[self, pitch]]: constant[ Return a Voxel object representing the current mesh discretized into voxels at the specified pitch Parameters ---------- pitch : float The edge length of a single voxel Returns ---------- voxelized : Voxel object Representing the current mesh ] variable[voxelized] assign[=] call[name[voxel].VoxelMesh, parameter[name[self]]] return[name[voxelized]]
keyword[def] identifier[voxelized] ( identifier[self] , identifier[pitch] ,** identifier[kwargs] ): literal[string] identifier[voxelized] = identifier[voxel] . identifier[VoxelMesh] ( identifier[self] , identifier[pitch] = identifier[pitch] , ** identifier[kwargs] ) keyword[return] identifier[voxelized]
def voxelized(self, pitch, **kwargs): """ Return a Voxel object representing the current mesh discretized into voxels at the specified pitch Parameters ---------- pitch : float The edge length of a single voxel Returns ---------- voxelized : Voxel object Representing the current mesh """ voxelized = voxel.VoxelMesh(self, pitch=pitch, **kwargs) return voxelized
def traceback_plot(self,fsize=(6,4)): """ Plots a path of the possible last 4 states. Parameters ---------- fsize : Plot size for matplotlib. Examples -------- >>> import matplotlib.pyplot as plt >>> from sk_dsp_comm.fec_conv import fec_conv >>> from sk_dsp_comm import digitalcom as dc >>> import numpy as np >>> cc = fec_conv() >>> x = np.random.randint(0,2,100) >>> state = '00' >>> y,state = cc.conv_encoder(x,state) >>> # Add channel noise to bits translated to +1/-1 >>> yn = dc.cpx_AWGN(2*y-1,5,1) # SNR = 5 dB >>> # Translate noisy +1/-1 bits to soft values on [0,7] >>> yn = (yn.real+1)/2*7 >>> z = cc.viterbi_decoder(yn) >>> cc.traceback_plot() >>> plt.show() """ traceback_states = self.paths.traceback_states plt.figure(figsize=fsize) plt.axis([-self.decision_depth+1, 0, -(self.Nstates-1)-0.5, 0.5]) M,N = traceback_states.shape traceback_states = -traceback_states[:,::-1] plt.plot(range(-(N-1),0+1),traceback_states.T) plt.xlabel('Traceback Symbol Periods') plt.ylabel('State Index $0$ to -$2^{(K-1)}$') plt.title('Survivor Paths Traced Back From All %d States' % self.Nstates) plt.grid()
def function[traceback_plot, parameter[self, fsize]]: constant[ Plots a path of the possible last 4 states. Parameters ---------- fsize : Plot size for matplotlib. Examples -------- >>> import matplotlib.pyplot as plt >>> from sk_dsp_comm.fec_conv import fec_conv >>> from sk_dsp_comm import digitalcom as dc >>> import numpy as np >>> cc = fec_conv() >>> x = np.random.randint(0,2,100) >>> state = '00' >>> y,state = cc.conv_encoder(x,state) >>> # Add channel noise to bits translated to +1/-1 >>> yn = dc.cpx_AWGN(2*y-1,5,1) # SNR = 5 dB >>> # Translate noisy +1/-1 bits to soft values on [0,7] >>> yn = (yn.real+1)/2*7 >>> z = cc.viterbi_decoder(yn) >>> cc.traceback_plot() >>> plt.show() ] variable[traceback_states] assign[=] name[self].paths.traceback_states call[name[plt].figure, parameter[]] call[name[plt].axis, parameter[list[[<ast.BinOp object at 0x7da18f00c070>, <ast.Constant object at 0x7da18f00c460>, <ast.BinOp object at 0x7da18f00e440>, <ast.Constant object at 0x7da18f00c280>]]]] <ast.Tuple object at 0x7da18f00d6f0> assign[=] name[traceback_states].shape variable[traceback_states] assign[=] <ast.UnaryOp object at 0x7da18f00cdf0> call[name[plt].plot, parameter[call[name[range], parameter[<ast.UnaryOp object at 0x7da18f00ee90>, binary_operation[constant[0] + constant[1]]]], name[traceback_states].T]] call[name[plt].xlabel, parameter[constant[Traceback Symbol Periods]]] call[name[plt].ylabel, parameter[constant[State Index $0$ to -$2^{(K-1)}$]]] call[name[plt].title, parameter[binary_operation[constant[Survivor Paths Traced Back From All %d States] <ast.Mod object at 0x7da2590d6920> name[self].Nstates]]] call[name[plt].grid, parameter[]]
keyword[def] identifier[traceback_plot] ( identifier[self] , identifier[fsize] =( literal[int] , literal[int] )): literal[string] identifier[traceback_states] = identifier[self] . identifier[paths] . identifier[traceback_states] identifier[plt] . identifier[figure] ( identifier[figsize] = identifier[fsize] ) identifier[plt] . identifier[axis] ([- identifier[self] . identifier[decision_depth] + literal[int] , literal[int] , -( identifier[self] . identifier[Nstates] - literal[int] )- literal[int] , literal[int] ]) identifier[M] , identifier[N] = identifier[traceback_states] . identifier[shape] identifier[traceback_states] =- identifier[traceback_states] [:,::- literal[int] ] identifier[plt] . identifier[plot] ( identifier[range] (-( identifier[N] - literal[int] ), literal[int] + literal[int] ), identifier[traceback_states] . identifier[T] ) identifier[plt] . identifier[xlabel] ( literal[string] ) identifier[plt] . identifier[ylabel] ( literal[string] ) identifier[plt] . identifier[title] ( literal[string] % identifier[self] . identifier[Nstates] ) identifier[plt] . identifier[grid] ()
def traceback_plot(self, fsize=(6, 4)): """ Plots a path of the possible last 4 states. Parameters ---------- fsize : Plot size for matplotlib. Examples -------- >>> import matplotlib.pyplot as plt >>> from sk_dsp_comm.fec_conv import fec_conv >>> from sk_dsp_comm import digitalcom as dc >>> import numpy as np >>> cc = fec_conv() >>> x = np.random.randint(0,2,100) >>> state = '00' >>> y,state = cc.conv_encoder(x,state) >>> # Add channel noise to bits translated to +1/-1 >>> yn = dc.cpx_AWGN(2*y-1,5,1) # SNR = 5 dB >>> # Translate noisy +1/-1 bits to soft values on [0,7] >>> yn = (yn.real+1)/2*7 >>> z = cc.viterbi_decoder(yn) >>> cc.traceback_plot() >>> plt.show() """ traceback_states = self.paths.traceback_states plt.figure(figsize=fsize) plt.axis([-self.decision_depth + 1, 0, -(self.Nstates - 1) - 0.5, 0.5]) (M, N) = traceback_states.shape traceback_states = -traceback_states[:, ::-1] plt.plot(range(-(N - 1), 0 + 1), traceback_states.T) plt.xlabel('Traceback Symbol Periods') plt.ylabel('State Index $0$ to -$2^{(K-1)}$') plt.title('Survivor Paths Traced Back From All %d States' % self.Nstates) plt.grid()
def validate_events(events, max_time=30000.): """Checks that a 1-d event location ndarray is well-formed, and raises errors if not. Parameters ---------- events : np.ndarray, shape=(n,) Array of event times max_time : float If an event is found above this time, a ValueError will be raised. (Default value = 30000.) """ # Make sure no event times are huge if (events > max_time).any(): raise ValueError('An event at time {} was found which is greater than ' 'the maximum allowable time of max_time = {} (did you' ' supply event times in ' 'seconds?)'.format(events.max(), max_time)) # Make sure event locations are 1-d np ndarrays if events.ndim != 1: raise ValueError('Event times should be 1-d numpy ndarray, ' 'but shape={}'.format(events.shape)) # Make sure event times are increasing if (np.diff(events) < 0).any(): raise ValueError('Events should be in increasing order.')
def function[validate_events, parameter[events, max_time]]: constant[Checks that a 1-d event location ndarray is well-formed, and raises errors if not. Parameters ---------- events : np.ndarray, shape=(n,) Array of event times max_time : float If an event is found above this time, a ValueError will be raised. (Default value = 30000.) ] if call[compare[name[events] greater[>] name[max_time]].any, parameter[]] begin[:] <ast.Raise object at 0x7da1b0ff15d0> if compare[name[events].ndim not_equal[!=] constant[1]] begin[:] <ast.Raise object at 0x7da1b0ff0760> if call[compare[call[name[np].diff, parameter[name[events]]] less[<] constant[0]].any, parameter[]] begin[:] <ast.Raise object at 0x7da1b0ff28f0>
keyword[def] identifier[validate_events] ( identifier[events] , identifier[max_time] = literal[int] ): literal[string] keyword[if] ( identifier[events] > identifier[max_time] ). identifier[any] (): keyword[raise] identifier[ValueError] ( literal[string] literal[string] literal[string] literal[string] . identifier[format] ( identifier[events] . identifier[max] (), identifier[max_time] )) keyword[if] identifier[events] . identifier[ndim] != literal[int] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[events] . identifier[shape] )) keyword[if] ( identifier[np] . identifier[diff] ( identifier[events] )< literal[int] ). identifier[any] (): keyword[raise] identifier[ValueError] ( literal[string] )
def validate_events(events, max_time=30000.0): """Checks that a 1-d event location ndarray is well-formed, and raises errors if not. Parameters ---------- events : np.ndarray, shape=(n,) Array of event times max_time : float If an event is found above this time, a ValueError will be raised. (Default value = 30000.) """ # Make sure no event times are huge if (events > max_time).any(): raise ValueError('An event at time {} was found which is greater than the maximum allowable time of max_time = {} (did you supply event times in seconds?)'.format(events.max(), max_time)) # depends on [control=['if'], data=[]] # Make sure event locations are 1-d np ndarrays if events.ndim != 1: raise ValueError('Event times should be 1-d numpy ndarray, but shape={}'.format(events.shape)) # depends on [control=['if'], data=[]] # Make sure event times are increasing if (np.diff(events) < 0).any(): raise ValueError('Events should be in increasing order.') # depends on [control=['if'], data=[]]
def load(self): """ Extract tabular data as |TableData| instances from a JSON file. |load_source_desc_file| This method can be loading four types of JSON formats: **(1)** Single table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (1): single table { "type": "array", "items": { "type": "object", "additionalProperties": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (1) [ {"attr_b": 4, "attr_c": "a", "attr_a": 1}, {"attr_b": 2.1, "attr_c": "bb", "attr_a": 2}, {"attr_b": 120.9, "attr_c": "ccc", "attr_a": 3} ] The example data will be loaded as the following tabular data: .. table:: +------+------+------+ |attr_a|attr_b|attr_c| +======+======+======+ | 1| 4.0|a | +------+------+------+ | 2| 2.1|bb | +------+------+------+ | 3| 120.9|ccc | +------+------+------+ **(2)** Single table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (2): single table { "type": "object", "additionalProperties": { "type": "array", "items": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (2) { "attr_a": [1, 2, 3], "attr_b": [4, 2.1, 120.9], "attr_c": ["a", "bb", "ccc"] } The example data will be loaded as the following tabular data: .. table:: +------+------+------+ |attr_a|attr_b|attr_c| +======+======+======+ | 1| 4.0|a | +------+------+------+ | 2| 2.1|bb | +------+------+------+ | 3| 120.9|ccc | +------+------+------+ **(3)** Single table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (3): single table { "type": "object", "additionalProperties": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (3) { "num_ratings": 27, "support_threads": 1, "downloaded": 925716, "last_updated":"2017-12-01 6:22am GMT", "added":"2010-01-20", "num": 1.1, "hoge": null } The example data will be loaded as the following tabular data: .. table:: +---------------+---------------------+ | key | value | +===============+=====================+ |num_ratings | 27| +---------------+---------------------+ |support_threads| 1| +---------------+---------------------+ |downloaded | 925716| +---------------+---------------------+ |last_updated |2017-12-01 6:22am GMT| +---------------+---------------------+ |added |2010-01-20 | +---------------+---------------------+ |num | 1.1| +---------------+---------------------+ |hoge |None | +---------------+---------------------+ **(4)** Multiple table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (4): multiple tables { "type": "object", "additionalProperties": { "type": "array", "items": { "type": "object", "additionalProperties": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (4) { "table_a" : [ {"attr_b": 4, "attr_c": "a", "attr_a": 1}, {"attr_b": 2.1, "attr_c": "bb", "attr_a": 2}, {"attr_b": 120.9, "attr_c": "ccc", "attr_a": 3} ], "table_b" : [ {"a": 1, "b": 4}, {"a": 2 }, {"a": 3, "b": 120.9} ] } The example data will be loaded as the following tabular data: .. table:: table_a +------+------+------+ |attr_a|attr_b|attr_c| +======+======+======+ | 1| 4.0|a | +------+------+------+ | 2| 2.1|bb | +------+------+------+ | 3| 120.9|ccc | +------+------+------+ .. table:: table_b +-+-----+ |a| b | +=+=====+ |1| 4.0| +-+-----+ |2| None| +-+-----+ |3|120.9| +-+-----+ **(5)** Multiple table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (5): multiple tables { "type": "object", "additionalProperties": { "type": "object", "additionalProperties": { "type": "array", "items": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (5) { "table_a" : { "attr_a": [1, 2, 3], "attr_b": [4, 2.1, 120.9], "attr_c": ["a", "bb", "ccc"] }, "table_b" : { "a": [1, 3], "b": [4, 120.9] } } The example data will be loaded as the following tabular data: .. table:: table_a +------+------+------+ |attr_a|attr_b|attr_c| +======+======+======+ | 1| 4.0|a | +------+------+------+ | 2| 2.1|bb | +------+------+------+ | 3| 120.9|ccc | +------+------+------+ .. table:: table_b +-+-----+ |a| b | +=+=====+ |1| 4.0| +-+-----+ |3|120.9| +-+-----+ **(6)** Multiple table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (6): multiple tables { "type": "object", "additionalProperties": { "type": "object", "additionalProperties": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (6) { "table_a": { "num_ratings": 27, "support_threads": 1, "downloaded": 925716, "last_updated":"2017-12-01 6:22am GMT", "added":"2010-01-20", "num": 1.1, "hoge": null }, "table_b": { "a": 4, "b": 120.9 } } The example data will be loaded as the following tabular data: .. table:: table_a +---------------+---------------------+ | key | value | +===============+=====================+ |num_ratings | 27| +---------------+---------------------+ |support_threads| 1| +---------------+---------------------+ |downloaded | 925716| +---------------+---------------------+ |last_updated |2017-12-01 6:22am GMT| +---------------+---------------------+ |added |2010-01-20 | +---------------+---------------------+ |num | 1.1| +---------------+---------------------+ |hoge |None | +---------------+---------------------+ .. table:: table_b +---+-----+ |key|value| +===+=====+ |a | 4.0| +---+-----+ |b |120.9| +---+-----+ :return: Loaded table data iterator. |load_table_name_desc| =================== ============================================== Format specifier Value after the replacement =================== ============================================== ``%(filename)s`` |filename_desc| ``%(key)s`` | This replaced the different value | for each single/multiple JSON tables: | [single JSON table] | ``%(format_name)s%(format_id)s`` | [multiple JSON table] Table data key. ``%(format_name)s`` ``"json"`` ``%(format_id)s`` |format_id_desc| ``%(global_id)s`` |global_id| =================== ============================================== :rtype: |TableData| iterator :raises pytablereader.DataError: If the data is invalid JSON. :raises pytablereader.error.ValidationError: If the data is not acceptable JSON format. """ formatter = JsonTableFormatter(self.load_dict()) formatter.accept(self) return formatter.to_table_data()
def function[load, parameter[self]]: constant[ Extract tabular data as |TableData| instances from a JSON file. |load_source_desc_file| This method can be loading four types of JSON formats: **(1)** Single table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (1): single table { "type": "array", "items": { "type": "object", "additionalProperties": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (1) [ {"attr_b": 4, "attr_c": "a", "attr_a": 1}, {"attr_b": 2.1, "attr_c": "bb", "attr_a": 2}, {"attr_b": 120.9, "attr_c": "ccc", "attr_a": 3} ] The example data will be loaded as the following tabular data: .. table:: +------+------+------+ |attr_a|attr_b|attr_c| +======+======+======+ | 1| 4.0|a | +------+------+------+ | 2| 2.1|bb | +------+------+------+ | 3| 120.9|ccc | +------+------+------+ **(2)** Single table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (2): single table { "type": "object", "additionalProperties": { "type": "array", "items": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (2) { "attr_a": [1, 2, 3], "attr_b": [4, 2.1, 120.9], "attr_c": ["a", "bb", "ccc"] } The example data will be loaded as the following tabular data: .. table:: +------+------+------+ |attr_a|attr_b|attr_c| +======+======+======+ | 1| 4.0|a | +------+------+------+ | 2| 2.1|bb | +------+------+------+ | 3| 120.9|ccc | +------+------+------+ **(3)** Single table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (3): single table { "type": "object", "additionalProperties": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (3) { "num_ratings": 27, "support_threads": 1, "downloaded": 925716, "last_updated":"2017-12-01 6:22am GMT", "added":"2010-01-20", "num": 1.1, "hoge": null } The example data will be loaded as the following tabular data: .. table:: +---------------+---------------------+ | key | value | +===============+=====================+ |num_ratings | 27| +---------------+---------------------+ |support_threads| 1| +---------------+---------------------+ |downloaded | 925716| +---------------+---------------------+ |last_updated |2017-12-01 6:22am GMT| +---------------+---------------------+ |added |2010-01-20 | +---------------+---------------------+ |num | 1.1| +---------------+---------------------+ |hoge |None | +---------------+---------------------+ **(4)** Multiple table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (4): multiple tables { "type": "object", "additionalProperties": { "type": "array", "items": { "type": "object", "additionalProperties": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (4) { "table_a" : [ {"attr_b": 4, "attr_c": "a", "attr_a": 1}, {"attr_b": 2.1, "attr_c": "bb", "attr_a": 2}, {"attr_b": 120.9, "attr_c": "ccc", "attr_a": 3} ], "table_b" : [ {"a": 1, "b": 4}, {"a": 2 }, {"a": 3, "b": 120.9} ] } The example data will be loaded as the following tabular data: .. table:: table_a +------+------+------+ |attr_a|attr_b|attr_c| +======+======+======+ | 1| 4.0|a | +------+------+------+ | 2| 2.1|bb | +------+------+------+ | 3| 120.9|ccc | +------+------+------+ .. table:: table_b +-+-----+ |a| b | +=+=====+ |1| 4.0| +-+-----+ |2| None| +-+-----+ |3|120.9| +-+-----+ **(5)** Multiple table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (5): multiple tables { "type": "object", "additionalProperties": { "type": "object", "additionalProperties": { "type": "array", "items": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (5) { "table_a" : { "attr_a": [1, 2, 3], "attr_b": [4, 2.1, 120.9], "attr_c": ["a", "bb", "ccc"] }, "table_b" : { "a": [1, 3], "b": [4, 120.9] } } The example data will be loaded as the following tabular data: .. table:: table_a +------+------+------+ |attr_a|attr_b|attr_c| +======+======+======+ | 1| 4.0|a | +------+------+------+ | 2| 2.1|bb | +------+------+------+ | 3| 120.9|ccc | +------+------+------+ .. table:: table_b +-+-----+ |a| b | +=+=====+ |1| 4.0| +-+-----+ |3|120.9| +-+-----+ **(6)** Multiple table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (6): multiple tables { "type": "object", "additionalProperties": { "type": "object", "additionalProperties": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (6) { "table_a": { "num_ratings": 27, "support_threads": 1, "downloaded": 925716, "last_updated":"2017-12-01 6:22am GMT", "added":"2010-01-20", "num": 1.1, "hoge": null }, "table_b": { "a": 4, "b": 120.9 } } The example data will be loaded as the following tabular data: .. table:: table_a +---------------+---------------------+ | key | value | +===============+=====================+ |num_ratings | 27| +---------------+---------------------+ |support_threads| 1| +---------------+---------------------+ |downloaded | 925716| +---------------+---------------------+ |last_updated |2017-12-01 6:22am GMT| +---------------+---------------------+ |added |2010-01-20 | +---------------+---------------------+ |num | 1.1| +---------------+---------------------+ |hoge |None | +---------------+---------------------+ .. table:: table_b +---+-----+ |key|value| +===+=====+ |a | 4.0| +---+-----+ |b |120.9| +---+-----+ :return: Loaded table data iterator. |load_table_name_desc| =================== ============================================== Format specifier Value after the replacement =================== ============================================== ``%(filename)s`` |filename_desc| ``%(key)s`` | This replaced the different value | for each single/multiple JSON tables: | [single JSON table] | ``%(format_name)s%(format_id)s`` | [multiple JSON table] Table data key. ``%(format_name)s`` ``"json"`` ``%(format_id)s`` |format_id_desc| ``%(global_id)s`` |global_id| =================== ============================================== :rtype: |TableData| iterator :raises pytablereader.DataError: If the data is invalid JSON. :raises pytablereader.error.ValidationError: If the data is not acceptable JSON format. ] variable[formatter] assign[=] call[name[JsonTableFormatter], parameter[call[name[self].load_dict, parameter[]]]] call[name[formatter].accept, parameter[name[self]]] return[call[name[formatter].to_table_data, parameter[]]]
keyword[def] identifier[load] ( identifier[self] ): literal[string] identifier[formatter] = identifier[JsonTableFormatter] ( identifier[self] . identifier[load_dict] ()) identifier[formatter] . identifier[accept] ( identifier[self] ) keyword[return] identifier[formatter] . identifier[to_table_data] ()
def load(self): """ Extract tabular data as |TableData| instances from a JSON file. |load_source_desc_file| This method can be loading four types of JSON formats: **(1)** Single table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (1): single table { "type": "array", "items": { "type": "object", "additionalProperties": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (1) [ {"attr_b": 4, "attr_c": "a", "attr_a": 1}, {"attr_b": 2.1, "attr_c": "bb", "attr_a": 2}, {"attr_b": 120.9, "attr_c": "ccc", "attr_a": 3} ] The example data will be loaded as the following tabular data: .. table:: +------+------+------+ |attr_a|attr_b|attr_c| +======+======+======+ | 1| 4.0|a | +------+------+------+ | 2| 2.1|bb | +------+------+------+ | 3| 120.9|ccc | +------+------+------+ **(2)** Single table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (2): single table { "type": "object", "additionalProperties": { "type": "array", "items": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (2) { "attr_a": [1, 2, 3], "attr_b": [4, 2.1, 120.9], "attr_c": ["a", "bb", "ccc"] } The example data will be loaded as the following tabular data: .. table:: +------+------+------+ |attr_a|attr_b|attr_c| +======+======+======+ | 1| 4.0|a | +------+------+------+ | 2| 2.1|bb | +------+------+------+ | 3| 120.9|ccc | +------+------+------+ **(3)** Single table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (3): single table { "type": "object", "additionalProperties": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (3) { "num_ratings": 27, "support_threads": 1, "downloaded": 925716, "last_updated":"2017-12-01 6:22am GMT", "added":"2010-01-20", "num": 1.1, "hoge": null } The example data will be loaded as the following tabular data: .. table:: +---------------+---------------------+ | key | value | +===============+=====================+ |num_ratings | 27| +---------------+---------------------+ |support_threads| 1| +---------------+---------------------+ |downloaded | 925716| +---------------+---------------------+ |last_updated |2017-12-01 6:22am GMT| +---------------+---------------------+ |added |2010-01-20 | +---------------+---------------------+ |num | 1.1| +---------------+---------------------+ |hoge |None | +---------------+---------------------+ **(4)** Multiple table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (4): multiple tables { "type": "object", "additionalProperties": { "type": "array", "items": { "type": "object", "additionalProperties": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (4) { "table_a" : [ {"attr_b": 4, "attr_c": "a", "attr_a": 1}, {"attr_b": 2.1, "attr_c": "bb", "attr_a": 2}, {"attr_b": 120.9, "attr_c": "ccc", "attr_a": 3} ], "table_b" : [ {"a": 1, "b": 4}, {"a": 2 }, {"a": 3, "b": 120.9} ] } The example data will be loaded as the following tabular data: .. table:: table_a +------+------+------+ |attr_a|attr_b|attr_c| +======+======+======+ | 1| 4.0|a | +------+------+------+ | 2| 2.1|bb | +------+------+------+ | 3| 120.9|ccc | +------+------+------+ .. table:: table_b +-+-----+ |a| b | +=+=====+ |1| 4.0| +-+-----+ |2| None| +-+-----+ |3|120.9| +-+-----+ **(5)** Multiple table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (5): multiple tables { "type": "object", "additionalProperties": { "type": "object", "additionalProperties": { "type": "array", "items": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (5) { "table_a" : { "attr_a": [1, 2, 3], "attr_b": [4, 2.1, 120.9], "attr_c": ["a", "bb", "ccc"] }, "table_b" : { "a": [1, 3], "b": [4, 120.9] } } The example data will be loaded as the following tabular data: .. table:: table_a +------+------+------+ |attr_a|attr_b|attr_c| +======+======+======+ | 1| 4.0|a | +------+------+------+ | 2| 2.1|bb | +------+------+------+ | 3| 120.9|ccc | +------+------+------+ .. table:: table_b +-+-----+ |a| b | +=+=====+ |1| 4.0| +-+-----+ |3|120.9| +-+-----+ **(6)** Multiple table data in a file: .. code-block:: json :caption: Acceptable JSON Schema (6): multiple tables { "type": "object", "additionalProperties": { "type": "object", "additionalProperties": { "anyOf": [ {"type": "string"}, {"type": "number"}, {"type": "boolean"}, {"type": "null"} ] } } } .. code-block:: json :caption: Acceptable JSON example for the JSON schema (6) { "table_a": { "num_ratings": 27, "support_threads": 1, "downloaded": 925716, "last_updated":"2017-12-01 6:22am GMT", "added":"2010-01-20", "num": 1.1, "hoge": null }, "table_b": { "a": 4, "b": 120.9 } } The example data will be loaded as the following tabular data: .. table:: table_a +---------------+---------------------+ | key | value | +===============+=====================+ |num_ratings | 27| +---------------+---------------------+ |support_threads| 1| +---------------+---------------------+ |downloaded | 925716| +---------------+---------------------+ |last_updated |2017-12-01 6:22am GMT| +---------------+---------------------+ |added |2010-01-20 | +---------------+---------------------+ |num | 1.1| +---------------+---------------------+ |hoge |None | +---------------+---------------------+ .. table:: table_b +---+-----+ |key|value| +===+=====+ |a | 4.0| +---+-----+ |b |120.9| +---+-----+ :return: Loaded table data iterator. |load_table_name_desc| =================== ============================================== Format specifier Value after the replacement =================== ============================================== ``%(filename)s`` |filename_desc| ``%(key)s`` | This replaced the different value | for each single/multiple JSON tables: | [single JSON table] | ``%(format_name)s%(format_id)s`` | [multiple JSON table] Table data key. ``%(format_name)s`` ``"json"`` ``%(format_id)s`` |format_id_desc| ``%(global_id)s`` |global_id| =================== ============================================== :rtype: |TableData| iterator :raises pytablereader.DataError: If the data is invalid JSON. :raises pytablereader.error.ValidationError: If the data is not acceptable JSON format. """ formatter = JsonTableFormatter(self.load_dict()) formatter.accept(self) return formatter.to_table_data()
def _nutation(date, eop_correction=True, terms=106): """Model 1980 of nutation as described in Vallado p. 224 Args: date (beyond.utils.date.Date) eop_correction (bool): set to ``True`` to include model correction from 'finals' files. terms (int) Return: tuple : 3-elements, all floats in degrees 1. ̄ε 2. Δψ 3. Δε Warning: The good version of the nutation model can be found in the **errata** of the 4th edition of *Fundamentals of Astrodynamics and Applications* by Vallado. """ ttt = date.change_scale('TT').julian_century r = 360. # in arcsecond epsilon_bar = 84381.448 - 46.8150 * ttt - 5.9e-4 * ttt ** 2\ + 1.813e-3 * ttt ** 3 # Conversion to degrees epsilon_bar /= 3600. # mean anomaly of the moon m_m = 134.96298139 + (1325 * r + 198.8673981) * ttt\ + 0.0086972 * ttt ** 2 + 1.78e-5 * ttt ** 3 # mean anomaly of the sun m_s = 357.52772333 + (99 * r + 359.0503400) * ttt\ - 0.0001603 * ttt ** 2 - 3.3e-6 * ttt ** 3 # L - Omega u_m_m = 93.27191028 + (1342 * r + 82.0175381) * ttt\ - 0.0036825 * ttt ** 2 + 3.1e-6 * ttt ** 3 # Mean elongation of the moon from the sun d_s = 297.85036306 + (1236 * r + 307.11148) * ttt\ - 0.0019142 * ttt ** 2 + 5.3e-6 * ttt ** 3 # Mean longitude of the ascending node of the moon om_m = 125.04452222 - (5 * r + 134.1362608) * ttt\ + 0.0020708 * ttt ** 2 + 2.2e-6 * ttt ** 3 delta_psi = 0. delta_eps = 0. for integers, reals in _tab(terms): a1, a2, a3, a4, a5 = integers # Conversion from 0.1 mas to mas A, B, C, D = np.array(list(reals)) / 36000000. a_p = a1 * m_m + a2 * m_s + a3 * u_m_m + a4 * d_s + a5 * om_m # a_p %= 360. delta_psi += (A + B * ttt) * np.sin(np.deg2rad(a_p)) delta_eps += (C + D * ttt) * np.cos(np.deg2rad(a_p)) if eop_correction: delta_eps += date.eop.deps / 3600000. delta_psi += date.eop.dpsi / 3600000. return epsilon_bar, delta_psi, delta_eps
def function[_nutation, parameter[date, eop_correction, terms]]: constant[Model 1980 of nutation as described in Vallado p. 224 Args: date (beyond.utils.date.Date) eop_correction (bool): set to ``True`` to include model correction from 'finals' files. terms (int) Return: tuple : 3-elements, all floats in degrees 1. ̄ε 2. Δψ 3. Δε Warning: The good version of the nutation model can be found in the **errata** of the 4th edition of *Fundamentals of Astrodynamics and Applications* by Vallado. ] variable[ttt] assign[=] call[name[date].change_scale, parameter[constant[TT]]].julian_century variable[r] assign[=] constant[360.0] variable[epsilon_bar] assign[=] binary_operation[binary_operation[binary_operation[constant[84381.448] - binary_operation[constant[46.815] * name[ttt]]] - binary_operation[constant[0.00059] * binary_operation[name[ttt] ** constant[2]]]] + binary_operation[constant[0.001813] * binary_operation[name[ttt] ** constant[3]]]] <ast.AugAssign object at 0x7da1b0ca54b0> variable[m_m] assign[=] binary_operation[binary_operation[binary_operation[constant[134.96298139] + binary_operation[binary_operation[binary_operation[constant[1325] * name[r]] + constant[198.8673981]] * name[ttt]]] + binary_operation[constant[0.0086972] * binary_operation[name[ttt] ** constant[2]]]] + binary_operation[constant[1.78e-05] * binary_operation[name[ttt] ** constant[3]]]] variable[m_s] assign[=] binary_operation[binary_operation[binary_operation[constant[357.52772333] + binary_operation[binary_operation[binary_operation[constant[99] * name[r]] + constant[359.05034]] * name[ttt]]] - binary_operation[constant[0.0001603] * binary_operation[name[ttt] ** constant[2]]]] - binary_operation[constant[3.3e-06] * binary_operation[name[ttt] ** constant[3]]]] variable[u_m_m] assign[=] binary_operation[binary_operation[binary_operation[constant[93.27191028] + binary_operation[binary_operation[binary_operation[constant[1342] * name[r]] + constant[82.0175381]] * name[ttt]]] - binary_operation[constant[0.0036825] * binary_operation[name[ttt] ** constant[2]]]] + binary_operation[constant[3.1e-06] * binary_operation[name[ttt] ** constant[3]]]] variable[d_s] assign[=] binary_operation[binary_operation[binary_operation[constant[297.85036306] + binary_operation[binary_operation[binary_operation[constant[1236] * name[r]] + constant[307.11148]] * name[ttt]]] - binary_operation[constant[0.0019142] * binary_operation[name[ttt] ** constant[2]]]] + binary_operation[constant[5.3e-06] * binary_operation[name[ttt] ** constant[3]]]] variable[om_m] assign[=] binary_operation[binary_operation[binary_operation[constant[125.04452222] - binary_operation[binary_operation[binary_operation[constant[5] * name[r]] + constant[134.1362608]] * name[ttt]]] + binary_operation[constant[0.0020708] * binary_operation[name[ttt] ** constant[2]]]] + binary_operation[constant[2.2e-06] * binary_operation[name[ttt] ** constant[3]]]] variable[delta_psi] assign[=] constant[0.0] variable[delta_eps] assign[=] constant[0.0] for taget[tuple[[<ast.Name object at 0x7da20c6ab2e0>, <ast.Name object at 0x7da20c6a9ae0>]]] in starred[call[name[_tab], parameter[name[terms]]]] begin[:] <ast.Tuple object at 0x7da20c6aa350> assign[=] name[integers] <ast.Tuple object at 0x7da20c6abd90> assign[=] binary_operation[call[name[np].array, parameter[call[name[list], parameter[name[reals]]]]] / constant[36000000.0]] variable[a_p] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[a1] * name[m_m]] + binary_operation[name[a2] * name[m_s]]] + binary_operation[name[a3] * name[u_m_m]]] + binary_operation[name[a4] * name[d_s]]] + binary_operation[name[a5] * name[om_m]]] <ast.AugAssign object at 0x7da20c6a9f30> <ast.AugAssign object at 0x7da20c6ab280> if name[eop_correction] begin[:] <ast.AugAssign object at 0x7da20c6ab310> <ast.AugAssign object at 0x7da20c6a9450> return[tuple[[<ast.Name object at 0x7da20c6ab730>, <ast.Name object at 0x7da20c6ab5b0>, <ast.Name object at 0x7da20c6abca0>]]]
keyword[def] identifier[_nutation] ( identifier[date] , identifier[eop_correction] = keyword[True] , identifier[terms] = literal[int] ): literal[string] identifier[ttt] = identifier[date] . identifier[change_scale] ( literal[string] ). identifier[julian_century] identifier[r] = literal[int] identifier[epsilon_bar] = literal[int] - literal[int] * identifier[ttt] - literal[int] * identifier[ttt] ** literal[int] + literal[int] * identifier[ttt] ** literal[int] identifier[epsilon_bar] /= literal[int] identifier[m_m] = literal[int] +( literal[int] * identifier[r] + literal[int] )* identifier[ttt] + literal[int] * identifier[ttt] ** literal[int] + literal[int] * identifier[ttt] ** literal[int] identifier[m_s] = literal[int] +( literal[int] * identifier[r] + literal[int] )* identifier[ttt] - literal[int] * identifier[ttt] ** literal[int] - literal[int] * identifier[ttt] ** literal[int] identifier[u_m_m] = literal[int] +( literal[int] * identifier[r] + literal[int] )* identifier[ttt] - literal[int] * identifier[ttt] ** literal[int] + literal[int] * identifier[ttt] ** literal[int] identifier[d_s] = literal[int] +( literal[int] * identifier[r] + literal[int] )* identifier[ttt] - literal[int] * identifier[ttt] ** literal[int] + literal[int] * identifier[ttt] ** literal[int] identifier[om_m] = literal[int] -( literal[int] * identifier[r] + literal[int] )* identifier[ttt] + literal[int] * identifier[ttt] ** literal[int] + literal[int] * identifier[ttt] ** literal[int] identifier[delta_psi] = literal[int] identifier[delta_eps] = literal[int] keyword[for] identifier[integers] , identifier[reals] keyword[in] identifier[_tab] ( identifier[terms] ): identifier[a1] , identifier[a2] , identifier[a3] , identifier[a4] , identifier[a5] = identifier[integers] identifier[A] , identifier[B] , identifier[C] , identifier[D] = identifier[np] . identifier[array] ( identifier[list] ( identifier[reals] ))/ literal[int] identifier[a_p] = identifier[a1] * identifier[m_m] + identifier[a2] * identifier[m_s] + identifier[a3] * identifier[u_m_m] + identifier[a4] * identifier[d_s] + identifier[a5] * identifier[om_m] identifier[delta_psi] +=( identifier[A] + identifier[B] * identifier[ttt] )* identifier[np] . identifier[sin] ( identifier[np] . identifier[deg2rad] ( identifier[a_p] )) identifier[delta_eps] +=( identifier[C] + identifier[D] * identifier[ttt] )* identifier[np] . identifier[cos] ( identifier[np] . identifier[deg2rad] ( identifier[a_p] )) keyword[if] identifier[eop_correction] : identifier[delta_eps] += identifier[date] . identifier[eop] . identifier[deps] / literal[int] identifier[delta_psi] += identifier[date] . identifier[eop] . identifier[dpsi] / literal[int] keyword[return] identifier[epsilon_bar] , identifier[delta_psi] , identifier[delta_eps]
def _nutation(date, eop_correction=True, terms=106): """Model 1980 of nutation as described in Vallado p. 224 Args: date (beyond.utils.date.Date) eop_correction (bool): set to ``True`` to include model correction from 'finals' files. terms (int) Return: tuple : 3-elements, all floats in degrees 1. ̄ε 2. Δψ 3. Δε Warning: The good version of the nutation model can be found in the **errata** of the 4th edition of *Fundamentals of Astrodynamics and Applications* by Vallado. """ ttt = date.change_scale('TT').julian_century r = 360.0 # in arcsecond epsilon_bar = 84381.448 - 46.815 * ttt - 0.00059 * ttt ** 2 + 0.001813 * ttt ** 3 # Conversion to degrees epsilon_bar /= 3600.0 # mean anomaly of the moon m_m = 134.96298139 + (1325 * r + 198.8673981) * ttt + 0.0086972 * ttt ** 2 + 1.78e-05 * ttt ** 3 # mean anomaly of the sun m_s = 357.52772333 + (99 * r + 359.05034) * ttt - 0.0001603 * ttt ** 2 - 3.3e-06 * ttt ** 3 # L - Omega u_m_m = 93.27191028 + (1342 * r + 82.0175381) * ttt - 0.0036825 * ttt ** 2 + 3.1e-06 * ttt ** 3 # Mean elongation of the moon from the sun d_s = 297.85036306 + (1236 * r + 307.11148) * ttt - 0.0019142 * ttt ** 2 + 5.3e-06 * ttt ** 3 # Mean longitude of the ascending node of the moon om_m = 125.04452222 - (5 * r + 134.1362608) * ttt + 0.0020708 * ttt ** 2 + 2.2e-06 * ttt ** 3 delta_psi = 0.0 delta_eps = 0.0 for (integers, reals) in _tab(terms): (a1, a2, a3, a4, a5) = integers # Conversion from 0.1 mas to mas (A, B, C, D) = np.array(list(reals)) / 36000000.0 a_p = a1 * m_m + a2 * m_s + a3 * u_m_m + a4 * d_s + a5 * om_m # a_p %= 360. delta_psi += (A + B * ttt) * np.sin(np.deg2rad(a_p)) delta_eps += (C + D * ttt) * np.cos(np.deg2rad(a_p)) # depends on [control=['for'], data=[]] if eop_correction: delta_eps += date.eop.deps / 3600000.0 delta_psi += date.eop.dpsi / 3600000.0 # depends on [control=['if'], data=[]] return (epsilon_bar, delta_psi, delta_eps)
def _clean_value(key, val): ''' Clean out well-known bogus values. If it isn't clean (for example has value 'None'), return None. Otherwise, return the original value. NOTE: This logic also exists in the smbios module. This function is for use when not using smbios to retrieve the value. ''' if (val is None or not val or re.match('none', val, flags=re.IGNORECASE)): return None elif 'uuid' in key: # Try each version (1-5) of RFC4122 to check if it's actually a UUID for uuidver in range(1, 5): try: uuid.UUID(val, version=uuidver) return val except ValueError: continue log.trace('HW %s value %s is an invalid UUID', key, val.replace('\n', ' ')) return None elif re.search('serial|part|version', key): # 'To be filled by O.E.M. # 'Not applicable' etc. # 'Not specified' etc. # 0000000, 1234567 etc. # begone! if (re.match(r'^[0]+$', val) or re.match(r'[0]?1234567[8]?[9]?[0]?', val) or re.search(r'sernum|part[_-]?number|specified|filled|applicable', val, flags=re.IGNORECASE)): return None elif re.search('asset|manufacturer', key): # AssetTag0. Manufacturer04. Begone. if re.search(r'manufacturer|to be filled|available|asset|^no(ne|t)', val, flags=re.IGNORECASE): return None else: # map unspecified, undefined, unknown & whatever to None if (re.search(r'to be filled', val, flags=re.IGNORECASE) or re.search(r'un(known|specified)|no(t|ne)? (asset|provided|defined|available|present|specified)', val, flags=re.IGNORECASE)): return None return val
def function[_clean_value, parameter[key, val]]: constant[ Clean out well-known bogus values. If it isn't clean (for example has value 'None'), return None. Otherwise, return the original value. NOTE: This logic also exists in the smbios module. This function is for use when not using smbios to retrieve the value. ] if <ast.BoolOp object at 0x7da1b21697e0> begin[:] return[constant[None]] return[name[val]]
keyword[def] identifier[_clean_value] ( identifier[key] , identifier[val] ): literal[string] keyword[if] ( identifier[val] keyword[is] keyword[None] keyword[or] keyword[not] identifier[val] keyword[or] identifier[re] . identifier[match] ( literal[string] , identifier[val] , identifier[flags] = identifier[re] . identifier[IGNORECASE] )): keyword[return] keyword[None] keyword[elif] literal[string] keyword[in] identifier[key] : keyword[for] identifier[uuidver] keyword[in] identifier[range] ( literal[int] , literal[int] ): keyword[try] : identifier[uuid] . identifier[UUID] ( identifier[val] , identifier[version] = identifier[uuidver] ) keyword[return] identifier[val] keyword[except] identifier[ValueError] : keyword[continue] identifier[log] . identifier[trace] ( literal[string] , identifier[key] , identifier[val] . identifier[replace] ( literal[string] , literal[string] )) keyword[return] keyword[None] keyword[elif] identifier[re] . identifier[search] ( literal[string] , identifier[key] ): keyword[if] ( identifier[re] . identifier[match] ( literal[string] , identifier[val] ) keyword[or] identifier[re] . identifier[match] ( literal[string] , identifier[val] ) keyword[or] identifier[re] . identifier[search] ( literal[string] , identifier[val] , identifier[flags] = identifier[re] . identifier[IGNORECASE] )): keyword[return] keyword[None] keyword[elif] identifier[re] . identifier[search] ( literal[string] , identifier[key] ): keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[val] , identifier[flags] = identifier[re] . identifier[IGNORECASE] ): keyword[return] keyword[None] keyword[else] : keyword[if] ( identifier[re] . identifier[search] ( literal[string] , identifier[val] , identifier[flags] = identifier[re] . identifier[IGNORECASE] ) keyword[or] identifier[re] . identifier[search] ( literal[string] , identifier[val] , identifier[flags] = identifier[re] . identifier[IGNORECASE] )): keyword[return] keyword[None] keyword[return] identifier[val]
def _clean_value(key, val): """ Clean out well-known bogus values. If it isn't clean (for example has value 'None'), return None. Otherwise, return the original value. NOTE: This logic also exists in the smbios module. This function is for use when not using smbios to retrieve the value. """ if val is None or not val or re.match('none', val, flags=re.IGNORECASE): return None # depends on [control=['if'], data=[]] elif 'uuid' in key: # Try each version (1-5) of RFC4122 to check if it's actually a UUID for uuidver in range(1, 5): try: uuid.UUID(val, version=uuidver) return val # depends on [control=['try'], data=[]] except ValueError: continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['uuidver']] log.trace('HW %s value %s is an invalid UUID', key, val.replace('\n', ' ')) return None # depends on [control=['if'], data=['key']] elif re.search('serial|part|version', key): # 'To be filled by O.E.M. # 'Not applicable' etc. # 'Not specified' etc. # 0000000, 1234567 etc. # begone! if re.match('^[0]+$', val) or re.match('[0]?1234567[8]?[9]?[0]?', val) or re.search('sernum|part[_-]?number|specified|filled|applicable', val, flags=re.IGNORECASE): return None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif re.search('asset|manufacturer', key): # AssetTag0. Manufacturer04. Begone. if re.search('manufacturer|to be filled|available|asset|^no(ne|t)', val, flags=re.IGNORECASE): return None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # map unspecified, undefined, unknown & whatever to None elif re.search('to be filled', val, flags=re.IGNORECASE) or re.search('un(known|specified)|no(t|ne)? (asset|provided|defined|available|present|specified)', val, flags=re.IGNORECASE): return None # depends on [control=['if'], data=[]] return val
def convert(self, convert_to=None, runscript="/bin/bash", force=False): '''This is a convenience function for the user to easily call to get the most likely desired result, conversion to the opposite format. We choose the selection based on the recipe name - meaning that we perform conversion with default based on recipe name. If the recipe object is DockerRecipe, we convert to Singularity. If the recipe object is SingularityRecipe, we convert to Docker. The user can override this by setting the variable convert_to Parameters ========== convert_to: can be manually forced (docker or singularity) runscript: default runscript (entrypoint) to use force: if True, override discovery from Dockerfile ''' converter = self._get_converter(convert_to) return converter(runscript=runscript, force=force)
def function[convert, parameter[self, convert_to, runscript, force]]: constant[This is a convenience function for the user to easily call to get the most likely desired result, conversion to the opposite format. We choose the selection based on the recipe name - meaning that we perform conversion with default based on recipe name. If the recipe object is DockerRecipe, we convert to Singularity. If the recipe object is SingularityRecipe, we convert to Docker. The user can override this by setting the variable convert_to Parameters ========== convert_to: can be manually forced (docker or singularity) runscript: default runscript (entrypoint) to use force: if True, override discovery from Dockerfile ] variable[converter] assign[=] call[name[self]._get_converter, parameter[name[convert_to]]] return[call[name[converter], parameter[]]]
keyword[def] identifier[convert] ( identifier[self] , identifier[convert_to] = keyword[None] , identifier[runscript] = literal[string] , identifier[force] = keyword[False] ): literal[string] identifier[converter] = identifier[self] . identifier[_get_converter] ( identifier[convert_to] ) keyword[return] identifier[converter] ( identifier[runscript] = identifier[runscript] , identifier[force] = identifier[force] )
def convert(self, convert_to=None, runscript='/bin/bash', force=False): """This is a convenience function for the user to easily call to get the most likely desired result, conversion to the opposite format. We choose the selection based on the recipe name - meaning that we perform conversion with default based on recipe name. If the recipe object is DockerRecipe, we convert to Singularity. If the recipe object is SingularityRecipe, we convert to Docker. The user can override this by setting the variable convert_to Parameters ========== convert_to: can be manually forced (docker or singularity) runscript: default runscript (entrypoint) to use force: if True, override discovery from Dockerfile """ converter = self._get_converter(convert_to) return converter(runscript=runscript, force=force)
def enforce_bounds(self, v): """Set `enforce_bounds` for both of the kernels to a new value. """ self._enforce_bounds = v self.k1.enforce_bounds = v self.k2.enforce_bounds = v
def function[enforce_bounds, parameter[self, v]]: constant[Set `enforce_bounds` for both of the kernels to a new value. ] name[self]._enforce_bounds assign[=] name[v] name[self].k1.enforce_bounds assign[=] name[v] name[self].k2.enforce_bounds assign[=] name[v]
keyword[def] identifier[enforce_bounds] ( identifier[self] , identifier[v] ): literal[string] identifier[self] . identifier[_enforce_bounds] = identifier[v] identifier[self] . identifier[k1] . identifier[enforce_bounds] = identifier[v] identifier[self] . identifier[k2] . identifier[enforce_bounds] = identifier[v]
def enforce_bounds(self, v): """Set `enforce_bounds` for both of the kernels to a new value. """ self._enforce_bounds = v self.k1.enforce_bounds = v self.k2.enforce_bounds = v
def signatures(ambiguous_word: str, pos: str = None, hyperhypo=True, adapted=False, remove_stopwords=True, to_lemmatize=True, remove_numbers=True, lowercase=True, to_stem=False, original_lesk=False, from_cache=True) -> dict: """ Takes an ambiguous word and optionally its Part-Of-Speech and returns a dictionary where keys are the synsets and values are sets of signatures. :param ambiguous_word: String, a single word. :param pos: String, one of 'a', 'r', 's', 'n', 'v', or None. :return: dict(synset:{signatures}). """ # Ensure that the POS is supported. pos = pos if pos in ['a', 'r', 's', 'n', 'v', None] else None # If the POS specified isn't found but other POS is in wordnet. if not wn.synsets(ambiguous_word, pos) and wn.synsets(ambiguous_word): pos = None # Holds the synset->signature dictionary. ss_sign = {} for ss in wn.synsets(ambiguous_word, pos): ss_sign[ss] = synset_signatures(ss, hyperhypo=hyperhypo, adapted=adapted, remove_stopwords=remove_stopwords, to_lemmatize=to_lemmatize, remove_numbers=remove_numbers, lowercase=lowercase, original_lesk=original_lesk, from_cache=from_cache) # Matching exact words may cause sparsity, so optional matching for stems. # Not advisible to use thus left out of the synsets_signatures() if to_stem == True: ss_sign = {ss:[porter.stem(s) for s in signature] for ss, signature in ss_sign.items()} return ss_sign
def function[signatures, parameter[ambiguous_word, pos, hyperhypo, adapted, remove_stopwords, to_lemmatize, remove_numbers, lowercase, to_stem, original_lesk, from_cache]]: constant[ Takes an ambiguous word and optionally its Part-Of-Speech and returns a dictionary where keys are the synsets and values are sets of signatures. :param ambiguous_word: String, a single word. :param pos: String, one of 'a', 'r', 's', 'n', 'v', or None. :return: dict(synset:{signatures}). ] variable[pos] assign[=] <ast.IfExp object at 0x7da1b224a050> if <ast.BoolOp object at 0x7da1b2249270> begin[:] variable[pos] assign[=] constant[None] variable[ss_sign] assign[=] dictionary[[], []] for taget[name[ss]] in starred[call[name[wn].synsets, parameter[name[ambiguous_word], name[pos]]]] begin[:] call[name[ss_sign]][name[ss]] assign[=] call[name[synset_signatures], parameter[name[ss]]] if compare[name[to_stem] equal[==] constant[True]] begin[:] variable[ss_sign] assign[=] <ast.DictComp object at 0x7da1b1d6ccd0> return[name[ss_sign]]
keyword[def] identifier[signatures] ( identifier[ambiguous_word] : identifier[str] , identifier[pos] : identifier[str] = keyword[None] , identifier[hyperhypo] = keyword[True] , identifier[adapted] = keyword[False] , identifier[remove_stopwords] = keyword[True] , identifier[to_lemmatize] = keyword[True] , identifier[remove_numbers] = keyword[True] , identifier[lowercase] = keyword[True] , identifier[to_stem] = keyword[False] , identifier[original_lesk] = keyword[False] , identifier[from_cache] = keyword[True] )-> identifier[dict] : literal[string] identifier[pos] = identifier[pos] keyword[if] identifier[pos] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , keyword[None] ] keyword[else] keyword[None] keyword[if] keyword[not] identifier[wn] . identifier[synsets] ( identifier[ambiguous_word] , identifier[pos] ) keyword[and] identifier[wn] . identifier[synsets] ( identifier[ambiguous_word] ): identifier[pos] = keyword[None] identifier[ss_sign] ={} keyword[for] identifier[ss] keyword[in] identifier[wn] . identifier[synsets] ( identifier[ambiguous_word] , identifier[pos] ): identifier[ss_sign] [ identifier[ss] ]= identifier[synset_signatures] ( identifier[ss] , identifier[hyperhypo] = identifier[hyperhypo] , identifier[adapted] = identifier[adapted] , identifier[remove_stopwords] = identifier[remove_stopwords] , identifier[to_lemmatize] = identifier[to_lemmatize] , identifier[remove_numbers] = identifier[remove_numbers] , identifier[lowercase] = identifier[lowercase] , identifier[original_lesk] = identifier[original_lesk] , identifier[from_cache] = identifier[from_cache] ) keyword[if] identifier[to_stem] == keyword[True] : identifier[ss_sign] ={ identifier[ss] :[ identifier[porter] . identifier[stem] ( identifier[s] ) keyword[for] identifier[s] keyword[in] identifier[signature] ] keyword[for] identifier[ss] , identifier[signature] keyword[in] identifier[ss_sign] . identifier[items] ()} keyword[return] identifier[ss_sign]
def signatures(ambiguous_word: str, pos: str=None, hyperhypo=True, adapted=False, remove_stopwords=True, to_lemmatize=True, remove_numbers=True, lowercase=True, to_stem=False, original_lesk=False, from_cache=True) -> dict: """ Takes an ambiguous word and optionally its Part-Of-Speech and returns a dictionary where keys are the synsets and values are sets of signatures. :param ambiguous_word: String, a single word. :param pos: String, one of 'a', 'r', 's', 'n', 'v', or None. :return: dict(synset:{signatures}). """ # Ensure that the POS is supported. pos = pos if pos in ['a', 'r', 's', 'n', 'v', None] else None # If the POS specified isn't found but other POS is in wordnet. if not wn.synsets(ambiguous_word, pos) and wn.synsets(ambiguous_word): pos = None # depends on [control=['if'], data=[]] # Holds the synset->signature dictionary. ss_sign = {} for ss in wn.synsets(ambiguous_word, pos): ss_sign[ss] = synset_signatures(ss, hyperhypo=hyperhypo, adapted=adapted, remove_stopwords=remove_stopwords, to_lemmatize=to_lemmatize, remove_numbers=remove_numbers, lowercase=lowercase, original_lesk=original_lesk, from_cache=from_cache) # depends on [control=['for'], data=['ss']] # Matching exact words may cause sparsity, so optional matching for stems. # Not advisible to use thus left out of the synsets_signatures() if to_stem == True: ss_sign = {ss: [porter.stem(s) for s in signature] for (ss, signature) in ss_sign.items()} # depends on [control=['if'], data=[]] return ss_sign
def create_view(self, name, expr, database=None): """ Create an MapD view from a table expression Parameters ---------- name : string expr : ibis TableExpr database : string, default None """ ast = self._build_ast(expr, MapDDialect.make_context()) select = ast.queries[0] statement = ddl.CreateView(name, select, database=database) self._execute(statement)
def function[create_view, parameter[self, name, expr, database]]: constant[ Create an MapD view from a table expression Parameters ---------- name : string expr : ibis TableExpr database : string, default None ] variable[ast] assign[=] call[name[self]._build_ast, parameter[name[expr], call[name[MapDDialect].make_context, parameter[]]]] variable[select] assign[=] call[name[ast].queries][constant[0]] variable[statement] assign[=] call[name[ddl].CreateView, parameter[name[name], name[select]]] call[name[self]._execute, parameter[name[statement]]]
keyword[def] identifier[create_view] ( identifier[self] , identifier[name] , identifier[expr] , identifier[database] = keyword[None] ): literal[string] identifier[ast] = identifier[self] . identifier[_build_ast] ( identifier[expr] , identifier[MapDDialect] . identifier[make_context] ()) identifier[select] = identifier[ast] . identifier[queries] [ literal[int] ] identifier[statement] = identifier[ddl] . identifier[CreateView] ( identifier[name] , identifier[select] , identifier[database] = identifier[database] ) identifier[self] . identifier[_execute] ( identifier[statement] )
def create_view(self, name, expr, database=None): """ Create an MapD view from a table expression Parameters ---------- name : string expr : ibis TableExpr database : string, default None """ ast = self._build_ast(expr, MapDDialect.make_context()) select = ast.queries[0] statement = ddl.CreateView(name, select, database=database) self._execute(statement)
def _create_grid(iterable, sizing_mode, layer=0): """Recursively create grid from input lists.""" return_list = [] for item in iterable: if isinstance(item, list): return_list.append(_create_grid(item, sizing_mode, layer+1)) elif isinstance(item, LayoutDOM): if sizing_mode is not None and _has_auto_sizing(item): item.sizing_mode = sizing_mode return_list.append(item) else: raise ValueError( """Only LayoutDOM items can be inserted into a layout. Tried to insert: %s of type %s""" % (item, type(item)) ) if layer % 2 == 0: return column(children=return_list, sizing_mode=sizing_mode) return row(children=return_list, sizing_mode=sizing_mode)
def function[_create_grid, parameter[iterable, sizing_mode, layer]]: constant[Recursively create grid from input lists.] variable[return_list] assign[=] list[[]] for taget[name[item]] in starred[name[iterable]] begin[:] if call[name[isinstance], parameter[name[item], name[list]]] begin[:] call[name[return_list].append, parameter[call[name[_create_grid], parameter[name[item], name[sizing_mode], binary_operation[name[layer] + constant[1]]]]]] if compare[binary_operation[name[layer] <ast.Mod object at 0x7da2590d6920> constant[2]] equal[==] constant[0]] begin[:] return[call[name[column], parameter[]]] return[call[name[row], parameter[]]]
keyword[def] identifier[_create_grid] ( identifier[iterable] , identifier[sizing_mode] , identifier[layer] = literal[int] ): literal[string] identifier[return_list] =[] keyword[for] identifier[item] keyword[in] identifier[iterable] : keyword[if] identifier[isinstance] ( identifier[item] , identifier[list] ): identifier[return_list] . identifier[append] ( identifier[_create_grid] ( identifier[item] , identifier[sizing_mode] , identifier[layer] + literal[int] )) keyword[elif] identifier[isinstance] ( identifier[item] , identifier[LayoutDOM] ): keyword[if] identifier[sizing_mode] keyword[is] keyword[not] keyword[None] keyword[and] identifier[_has_auto_sizing] ( identifier[item] ): identifier[item] . identifier[sizing_mode] = identifier[sizing_mode] identifier[return_list] . identifier[append] ( identifier[item] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[item] , identifier[type] ( identifier[item] )) ) keyword[if] identifier[layer] % literal[int] == literal[int] : keyword[return] identifier[column] ( identifier[children] = identifier[return_list] , identifier[sizing_mode] = identifier[sizing_mode] ) keyword[return] identifier[row] ( identifier[children] = identifier[return_list] , identifier[sizing_mode] = identifier[sizing_mode] )
def _create_grid(iterable, sizing_mode, layer=0): """Recursively create grid from input lists.""" return_list = [] for item in iterable: if isinstance(item, list): return_list.append(_create_grid(item, sizing_mode, layer + 1)) # depends on [control=['if'], data=[]] elif isinstance(item, LayoutDOM): if sizing_mode is not None and _has_auto_sizing(item): item.sizing_mode = sizing_mode # depends on [control=['if'], data=[]] return_list.append(item) # depends on [control=['if'], data=[]] else: raise ValueError('Only LayoutDOM items can be inserted into a layout.\n Tried to insert: %s of type %s' % (item, type(item))) # depends on [control=['for'], data=['item']] if layer % 2 == 0: return column(children=return_list, sizing_mode=sizing_mode) # depends on [control=['if'], data=[]] return row(children=return_list, sizing_mode=sizing_mode)
def guest_reset(self, userid): """reset a virtual machine :param str userid: the id of the virtual machine to be reset :returns: None """ action = "reset guest '%s'" % userid with zvmutils.log_and_reraise_sdkbase_error(action): self._vmops.guest_reset(userid)
def function[guest_reset, parameter[self, userid]]: constant[reset a virtual machine :param str userid: the id of the virtual machine to be reset :returns: None ] variable[action] assign[=] binary_operation[constant[reset guest '%s'] <ast.Mod object at 0x7da2590d6920> name[userid]] with call[name[zvmutils].log_and_reraise_sdkbase_error, parameter[name[action]]] begin[:] call[name[self]._vmops.guest_reset, parameter[name[userid]]]
keyword[def] identifier[guest_reset] ( identifier[self] , identifier[userid] ): literal[string] identifier[action] = literal[string] % identifier[userid] keyword[with] identifier[zvmutils] . identifier[log_and_reraise_sdkbase_error] ( identifier[action] ): identifier[self] . identifier[_vmops] . identifier[guest_reset] ( identifier[userid] )
def guest_reset(self, userid): """reset a virtual machine :param str userid: the id of the virtual machine to be reset :returns: None """ action = "reset guest '%s'" % userid with zvmutils.log_and_reraise_sdkbase_error(action): self._vmops.guest_reset(userid) # depends on [control=['with'], data=[]]
def put_overlay(self, overlay_name, overlay): """Store the overlay.""" logger.debug("Putting overlay: {}".format(overlay_name)) key = self.get_overlay_key(overlay_name) text = json.dumps(overlay, indent=2) self.put_text(key, text)
def function[put_overlay, parameter[self, overlay_name, overlay]]: constant[Store the overlay.] call[name[logger].debug, parameter[call[constant[Putting overlay: {}].format, parameter[name[overlay_name]]]]] variable[key] assign[=] call[name[self].get_overlay_key, parameter[name[overlay_name]]] variable[text] assign[=] call[name[json].dumps, parameter[name[overlay]]] call[name[self].put_text, parameter[name[key], name[text]]]
keyword[def] identifier[put_overlay] ( identifier[self] , identifier[overlay_name] , identifier[overlay] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[overlay_name] )) identifier[key] = identifier[self] . identifier[get_overlay_key] ( identifier[overlay_name] ) identifier[text] = identifier[json] . identifier[dumps] ( identifier[overlay] , identifier[indent] = literal[int] ) identifier[self] . identifier[put_text] ( identifier[key] , identifier[text] )
def put_overlay(self, overlay_name, overlay): """Store the overlay.""" logger.debug('Putting overlay: {}'.format(overlay_name)) key = self.get_overlay_key(overlay_name) text = json.dumps(overlay, indent=2) self.put_text(key, text)
def scope_required(*scopes): """ Test for specific scopes that the access token has been authenticated for before processing the request and eventual response. The scopes that are passed in determine how the decorator will respond to incoming requests: - If no scopes are passed in the arguments, the decorator will test for any available scopes and determine the response based on that. - If specific scopes are passed, the access token will be checked to make sure it has all of the scopes that were requested. This decorator will change the response if the access toke does not have the scope: - If an invalid scope is requested (one that does not exist), all requests will be denied, as no access tokens will be able to fulfill the scope request and the request will be denied. - If the access token does not have one of the requested scopes, the request will be denied and the user will be returned one of two responses: - A 400 response (Bad Request) will be returned if an unauthenticated user tries to access the resource. - A 403 response (Forbidden) will be returned if an authenticated user ties to access the resource but does not have the correct scope. """ def decorator(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): from django.http import HttpResponseBadRequest, HttpResponseForbidden from .exceptions.base import InvalidRequest, InsufficientScope from .models import Scope from .utils import request_error_header try: if not hasattr(request, "access_token"): raise CredentialsNotProvided() access_token = request.access_token for scope_name in scopes: try: scope = access_token.scope.for_short_name(scope_name) except Scope.DoesNotExist: raise ScopeNotEnough() except InvalidRequest as e: response = HttpResponseBadRequest() response["WWW-Authenticate"] = request_error_header(e) return response except InsufficientScope as e: response = HttpResponseForbidden() response["WWW-Authenticate"] = request_error_header(e) return response return view_func(request, *args, **kwargs) return _wrapped_view if scopes and hasattr(scopes[0], "__call__"): func = scopes[0] scopes = scopes[1:] return decorator(func) return decorator
def function[scope_required, parameter[]]: constant[ Test for specific scopes that the access token has been authenticated for before processing the request and eventual response. The scopes that are passed in determine how the decorator will respond to incoming requests: - If no scopes are passed in the arguments, the decorator will test for any available scopes and determine the response based on that. - If specific scopes are passed, the access token will be checked to make sure it has all of the scopes that were requested. This decorator will change the response if the access toke does not have the scope: - If an invalid scope is requested (one that does not exist), all requests will be denied, as no access tokens will be able to fulfill the scope request and the request will be denied. - If the access token does not have one of the requested scopes, the request will be denied and the user will be returned one of two responses: - A 400 response (Bad Request) will be returned if an unauthenticated user tries to access the resource. - A 403 response (Forbidden) will be returned if an authenticated user ties to access the resource but does not have the correct scope. ] def function[decorator, parameter[view_func]]: def function[_wrapped_view, parameter[request]]: from relative_module[django.http] import module[HttpResponseBadRequest], module[HttpResponseForbidden] from relative_module[exceptions.base] import module[InvalidRequest], module[InsufficientScope] from relative_module[models] import module[Scope] from relative_module[utils] import module[request_error_header] <ast.Try object at 0x7da18fe928c0> return[call[name[view_func], parameter[name[request], <ast.Starred object at 0x7da18fe93970>]]] return[name[_wrapped_view]] if <ast.BoolOp object at 0x7da18fe92230> begin[:] variable[func] assign[=] call[name[scopes]][constant[0]] variable[scopes] assign[=] call[name[scopes]][<ast.Slice object at 0x7da18f720a00>] return[call[name[decorator], parameter[name[func]]]] return[name[decorator]]
keyword[def] identifier[scope_required] (* identifier[scopes] ): literal[string] keyword[def] identifier[decorator] ( identifier[view_func] ): @ identifier[wraps] ( identifier[view_func] , identifier[assigned] = identifier[available_attrs] ( identifier[view_func] )) keyword[def] identifier[_wrapped_view] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ): keyword[from] identifier[django] . identifier[http] keyword[import] identifier[HttpResponseBadRequest] , identifier[HttpResponseForbidden] keyword[from] . identifier[exceptions] . identifier[base] keyword[import] identifier[InvalidRequest] , identifier[InsufficientScope] keyword[from] . identifier[models] keyword[import] identifier[Scope] keyword[from] . identifier[utils] keyword[import] identifier[request_error_header] keyword[try] : keyword[if] keyword[not] identifier[hasattr] ( identifier[request] , literal[string] ): keyword[raise] identifier[CredentialsNotProvided] () identifier[access_token] = identifier[request] . identifier[access_token] keyword[for] identifier[scope_name] keyword[in] identifier[scopes] : keyword[try] : identifier[scope] = identifier[access_token] . identifier[scope] . identifier[for_short_name] ( identifier[scope_name] ) keyword[except] identifier[Scope] . identifier[DoesNotExist] : keyword[raise] identifier[ScopeNotEnough] () keyword[except] identifier[InvalidRequest] keyword[as] identifier[e] : identifier[response] = identifier[HttpResponseBadRequest] () identifier[response] [ literal[string] ]= identifier[request_error_header] ( identifier[e] ) keyword[return] identifier[response] keyword[except] identifier[InsufficientScope] keyword[as] identifier[e] : identifier[response] = identifier[HttpResponseForbidden] () identifier[response] [ literal[string] ]= identifier[request_error_header] ( identifier[e] ) keyword[return] identifier[response] keyword[return] identifier[view_func] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[_wrapped_view] keyword[if] identifier[scopes] keyword[and] identifier[hasattr] ( identifier[scopes] [ literal[int] ], literal[string] ): identifier[func] = identifier[scopes] [ literal[int] ] identifier[scopes] = identifier[scopes] [ literal[int] :] keyword[return] identifier[decorator] ( identifier[func] ) keyword[return] identifier[decorator]
def scope_required(*scopes): """ Test for specific scopes that the access token has been authenticated for before processing the request and eventual response. The scopes that are passed in determine how the decorator will respond to incoming requests: - If no scopes are passed in the arguments, the decorator will test for any available scopes and determine the response based on that. - If specific scopes are passed, the access token will be checked to make sure it has all of the scopes that were requested. This decorator will change the response if the access toke does not have the scope: - If an invalid scope is requested (one that does not exist), all requests will be denied, as no access tokens will be able to fulfill the scope request and the request will be denied. - If the access token does not have one of the requested scopes, the request will be denied and the user will be returned one of two responses: - A 400 response (Bad Request) will be returned if an unauthenticated user tries to access the resource. - A 403 response (Forbidden) will be returned if an authenticated user ties to access the resource but does not have the correct scope. """ def decorator(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): from django.http import HttpResponseBadRequest, HttpResponseForbidden from .exceptions.base import InvalidRequest, InsufficientScope from .models import Scope from .utils import request_error_header try: if not hasattr(request, 'access_token'): raise CredentialsNotProvided() # depends on [control=['if'], data=[]] access_token = request.access_token for scope_name in scopes: try: scope = access_token.scope.for_short_name(scope_name) # depends on [control=['try'], data=[]] except Scope.DoesNotExist: raise ScopeNotEnough() # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['scope_name']] # depends on [control=['try'], data=[]] except InvalidRequest as e: response = HttpResponseBadRequest() response['WWW-Authenticate'] = request_error_header(e) return response # depends on [control=['except'], data=['e']] except InsufficientScope as e: response = HttpResponseForbidden() response['WWW-Authenticate'] = request_error_header(e) return response # depends on [control=['except'], data=['e']] return view_func(request, *args, **kwargs) return _wrapped_view if scopes and hasattr(scopes[0], '__call__'): func = scopes[0] scopes = scopes[1:] return decorator(func) # depends on [control=['if'], data=[]] return decorator
def copy_analysis_dict( analysis ): ''' Creates a copy from given analysis dict. ''' assert isinstance(analysis, dict), "(!) Input 'analysis' should be a dict!" new_dict = { POSTAG: analysis[POSTAG],\ ROOT: analysis[ROOT],\ FORM: analysis[FORM],\ CLITIC: analysis[CLITIC],\ ENDING: analysis[ENDING] } if LEMMA in analysis: new_dict[LEMMA] = analysis[LEMMA] if ROOT_TOKENS in analysis: new_dict[ROOT_TOKENS] = analysis[ROOT_TOKENS] return new_dict
def function[copy_analysis_dict, parameter[analysis]]: constant[ Creates a copy from given analysis dict. ] assert[call[name[isinstance], parameter[name[analysis], name[dict]]]] variable[new_dict] assign[=] dictionary[[<ast.Name object at 0x7da18dc04be0>, <ast.Name object at 0x7da18dc05960>, <ast.Name object at 0x7da18dc06200>, <ast.Name object at 0x7da18dc06920>, <ast.Name object at 0x7da18dc049a0>], [<ast.Subscript object at 0x7da18dc044f0>, <ast.Subscript object at 0x7da18dc05750>, <ast.Subscript object at 0x7da18dc06350>, <ast.Subscript object at 0x7da18dc050c0>, <ast.Subscript object at 0x7da18dc05270>]] if compare[name[LEMMA] in name[analysis]] begin[:] call[name[new_dict]][name[LEMMA]] assign[=] call[name[analysis]][name[LEMMA]] if compare[name[ROOT_TOKENS] in name[analysis]] begin[:] call[name[new_dict]][name[ROOT_TOKENS]] assign[=] call[name[analysis]][name[ROOT_TOKENS]] return[name[new_dict]]
keyword[def] identifier[copy_analysis_dict] ( identifier[analysis] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[analysis] , identifier[dict] ), literal[string] identifier[new_dict] ={ identifier[POSTAG] : identifier[analysis] [ identifier[POSTAG] ], identifier[ROOT] : identifier[analysis] [ identifier[ROOT] ], identifier[FORM] : identifier[analysis] [ identifier[FORM] ], identifier[CLITIC] : identifier[analysis] [ identifier[CLITIC] ], identifier[ENDING] : identifier[analysis] [ identifier[ENDING] ]} keyword[if] identifier[LEMMA] keyword[in] identifier[analysis] : identifier[new_dict] [ identifier[LEMMA] ]= identifier[analysis] [ identifier[LEMMA] ] keyword[if] identifier[ROOT_TOKENS] keyword[in] identifier[analysis] : identifier[new_dict] [ identifier[ROOT_TOKENS] ]= identifier[analysis] [ identifier[ROOT_TOKENS] ] keyword[return] identifier[new_dict]
def copy_analysis_dict(analysis): """ Creates a copy from given analysis dict. """ assert isinstance(analysis, dict), "(!) Input 'analysis' should be a dict!" new_dict = {POSTAG: analysis[POSTAG], ROOT: analysis[ROOT], FORM: analysis[FORM], CLITIC: analysis[CLITIC], ENDING: analysis[ENDING]} if LEMMA in analysis: new_dict[LEMMA] = analysis[LEMMA] # depends on [control=['if'], data=['LEMMA', 'analysis']] if ROOT_TOKENS in analysis: new_dict[ROOT_TOKENS] = analysis[ROOT_TOKENS] # depends on [control=['if'], data=['ROOT_TOKENS', 'analysis']] return new_dict
def non_structured_query(table, query=None, **kwargs): ''' Run a non-structed (not a dict) query on a servicenow table. See http://wiki.servicenow.com/index.php?title=Encoded_Query_Strings#gsc.tab=0 for help on constructing a non-structured query string. :param table: The table name, e.g. sys_user :type table: ``str`` :param query: The query to run (or use keyword arguments to filter data) :type query: ``str`` CLI Example: .. code-block:: bash salt myminion servicenow.non_structured_query sys_computer 'role=web' salt myminion servicenow.non_structured_query sys_computer role=web type=computer ''' client = _get_client() client.table = table # underlying lib doesn't use six or past.basestring, # does isinstance(x, str) # http://bit.ly/1VkMmpE if query is None: # try and assemble a query by keyword query_parts = [] for key, value in kwargs.items(): query_parts.append('{0}={1}'.format(key, value)) query = '^'.join(query_parts) query = six.text_type(query) response = client.get(query) return response
def function[non_structured_query, parameter[table, query]]: constant[ Run a non-structed (not a dict) query on a servicenow table. See http://wiki.servicenow.com/index.php?title=Encoded_Query_Strings#gsc.tab=0 for help on constructing a non-structured query string. :param table: The table name, e.g. sys_user :type table: ``str`` :param query: The query to run (or use keyword arguments to filter data) :type query: ``str`` CLI Example: .. code-block:: bash salt myminion servicenow.non_structured_query sys_computer 'role=web' salt myminion servicenow.non_structured_query sys_computer role=web type=computer ] variable[client] assign[=] call[name[_get_client], parameter[]] name[client].table assign[=] name[table] if compare[name[query] is constant[None]] begin[:] variable[query_parts] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18bc730a0>, <ast.Name object at 0x7da18bc72bc0>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:] call[name[query_parts].append, parameter[call[constant[{0}={1}].format, parameter[name[key], name[value]]]]] variable[query] assign[=] call[constant[^].join, parameter[name[query_parts]]] variable[query] assign[=] call[name[six].text_type, parameter[name[query]]] variable[response] assign[=] call[name[client].get, parameter[name[query]]] return[name[response]]
keyword[def] identifier[non_structured_query] ( identifier[table] , identifier[query] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[client] = identifier[_get_client] () identifier[client] . identifier[table] = identifier[table] keyword[if] identifier[query] keyword[is] keyword[None] : identifier[query_parts] =[] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[kwargs] . identifier[items] (): identifier[query_parts] . identifier[append] ( literal[string] . identifier[format] ( identifier[key] , identifier[value] )) identifier[query] = literal[string] . identifier[join] ( identifier[query_parts] ) identifier[query] = identifier[six] . identifier[text_type] ( identifier[query] ) identifier[response] = identifier[client] . identifier[get] ( identifier[query] ) keyword[return] identifier[response]
def non_structured_query(table, query=None, **kwargs): """ Run a non-structed (not a dict) query on a servicenow table. See http://wiki.servicenow.com/index.php?title=Encoded_Query_Strings#gsc.tab=0 for help on constructing a non-structured query string. :param table: The table name, e.g. sys_user :type table: ``str`` :param query: The query to run (or use keyword arguments to filter data) :type query: ``str`` CLI Example: .. code-block:: bash salt myminion servicenow.non_structured_query sys_computer 'role=web' salt myminion servicenow.non_structured_query sys_computer role=web type=computer """ client = _get_client() client.table = table # underlying lib doesn't use six or past.basestring, # does isinstance(x, str) # http://bit.ly/1VkMmpE if query is None: # try and assemble a query by keyword query_parts = [] for (key, value) in kwargs.items(): query_parts.append('{0}={1}'.format(key, value)) # depends on [control=['for'], data=[]] query = '^'.join(query_parts) # depends on [control=['if'], data=['query']] query = six.text_type(query) response = client.get(query) return response
def plot_separate(self): """ plot the different model components separately :return: """ f, axes = plt.subplots(2, 3, figsize=(16, 8)) self.decomposition_plot(ax=axes[0, 0], text='Lens light', lens_light_add=True, unconvolved=True) self.decomposition_plot(ax=axes[1, 0], text='Lens light convolved', lens_light_add=True) self.decomposition_plot(ax=axes[0, 1], text='Source light', source_add=True, unconvolved=True) self.decomposition_plot(ax=axes[1, 1], text='Source light convolved', source_add=True) self.decomposition_plot(ax=axes[0, 2], text='All components', source_add=True, lens_light_add=True, unconvolved=True) self.decomposition_plot(ax=axes[1, 2], text='All components convolved', source_add=True, lens_light_add=True, point_source_add=True) f.tight_layout() f.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0., hspace=0.05) return f, axes
def function[plot_separate, parameter[self]]: constant[ plot the different model components separately :return: ] <ast.Tuple object at 0x7da20c6e6380> assign[=] call[name[plt].subplots, parameter[constant[2], constant[3]]] call[name[self].decomposition_plot, parameter[]] call[name[self].decomposition_plot, parameter[]] call[name[self].decomposition_plot, parameter[]] call[name[self].decomposition_plot, parameter[]] call[name[self].decomposition_plot, parameter[]] call[name[self].decomposition_plot, parameter[]] call[name[f].tight_layout, parameter[]] call[name[f].subplots_adjust, parameter[]] return[tuple[[<ast.Name object at 0x7da1b04a7910>, <ast.Name object at 0x7da1b04a7820>]]]
keyword[def] identifier[plot_separate] ( identifier[self] ): literal[string] identifier[f] , identifier[axes] = identifier[plt] . identifier[subplots] ( literal[int] , literal[int] , identifier[figsize] =( literal[int] , literal[int] )) identifier[self] . identifier[decomposition_plot] ( identifier[ax] = identifier[axes] [ literal[int] , literal[int] ], identifier[text] = literal[string] , identifier[lens_light_add] = keyword[True] , identifier[unconvolved] = keyword[True] ) identifier[self] . identifier[decomposition_plot] ( identifier[ax] = identifier[axes] [ literal[int] , literal[int] ], identifier[text] = literal[string] , identifier[lens_light_add] = keyword[True] ) identifier[self] . identifier[decomposition_plot] ( identifier[ax] = identifier[axes] [ literal[int] , literal[int] ], identifier[text] = literal[string] , identifier[source_add] = keyword[True] , identifier[unconvolved] = keyword[True] ) identifier[self] . identifier[decomposition_plot] ( identifier[ax] = identifier[axes] [ literal[int] , literal[int] ], identifier[text] = literal[string] , identifier[source_add] = keyword[True] ) identifier[self] . identifier[decomposition_plot] ( identifier[ax] = identifier[axes] [ literal[int] , literal[int] ], identifier[text] = literal[string] , identifier[source_add] = keyword[True] , identifier[lens_light_add] = keyword[True] , identifier[unconvolved] = keyword[True] ) identifier[self] . identifier[decomposition_plot] ( identifier[ax] = identifier[axes] [ literal[int] , literal[int] ], identifier[text] = literal[string] , identifier[source_add] = keyword[True] , identifier[lens_light_add] = keyword[True] , identifier[point_source_add] = keyword[True] ) identifier[f] . identifier[tight_layout] () identifier[f] . identifier[subplots_adjust] ( identifier[left] = keyword[None] , identifier[bottom] = keyword[None] , identifier[right] = keyword[None] , identifier[top] = keyword[None] , identifier[wspace] = literal[int] , identifier[hspace] = literal[int] ) keyword[return] identifier[f] , identifier[axes]
def plot_separate(self): """ plot the different model components separately :return: """ (f, axes) = plt.subplots(2, 3, figsize=(16, 8)) self.decomposition_plot(ax=axes[0, 0], text='Lens light', lens_light_add=True, unconvolved=True) self.decomposition_plot(ax=axes[1, 0], text='Lens light convolved', lens_light_add=True) self.decomposition_plot(ax=axes[0, 1], text='Source light', source_add=True, unconvolved=True) self.decomposition_plot(ax=axes[1, 1], text='Source light convolved', source_add=True) self.decomposition_plot(ax=axes[0, 2], text='All components', source_add=True, lens_light_add=True, unconvolved=True) self.decomposition_plot(ax=axes[1, 2], text='All components convolved', source_add=True, lens_light_add=True, point_source_add=True) f.tight_layout() f.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.0, hspace=0.05) return (f, axes)
def _querystring(self): """Get additional keyword arguments""" kw = {} for key in self.KNOWN_QUERY_OPTIONS: val = getattr(self, key) if val is not None: kw[key] = val return kw
def function[_querystring, parameter[self]]: constant[Get additional keyword arguments] variable[kw] assign[=] dictionary[[], []] for taget[name[key]] in starred[name[self].KNOWN_QUERY_OPTIONS] begin[:] variable[val] assign[=] call[name[getattr], parameter[name[self], name[key]]] if compare[name[val] is_not constant[None]] begin[:] call[name[kw]][name[key]] assign[=] name[val] return[name[kw]]
keyword[def] identifier[_querystring] ( identifier[self] ): literal[string] identifier[kw] ={} keyword[for] identifier[key] keyword[in] identifier[self] . identifier[KNOWN_QUERY_OPTIONS] : identifier[val] = identifier[getattr] ( identifier[self] , identifier[key] ) keyword[if] identifier[val] keyword[is] keyword[not] keyword[None] : identifier[kw] [ identifier[key] ]= identifier[val] keyword[return] identifier[kw]
def _querystring(self): """Get additional keyword arguments""" kw = {} for key in self.KNOWN_QUERY_OPTIONS: val = getattr(self, key) if val is not None: kw[key] = val # depends on [control=['if'], data=['val']] # depends on [control=['for'], data=['key']] return kw
def parse_ini(self, paths=None, namespace=None, permissive=False): """Parse config files and return configuration options. Expects array of files that are in ini format. :param paths: List of paths to files to parse (uses ConfigParse logic). If not supplied, uses the ini_paths value supplied on initialization. """ namespace = namespace or self.prog results = {} # DeprecationWarning: SafeConfigParser has been renamed to ConfigParser # in Python 3.2. This alias will be removed in future versions. Use # ConfigParser directly instead. if sys.version_info < (3, 2): self.ini_config = configparser.SafeConfigParser() else: self.ini_config = configparser.ConfigParser() parser_errors = (configparser.NoOptionError, configparser.NoSectionError) inipaths = list(paths or reversed(self._ini_paths)) # check that explicitly defined ini paths exist for pth in inipaths: if not os.path.isfile(pth): raise OSError(errno.ENOENT, 'No such file or directory', pth) read_ok = self.ini_config.read(inipaths) assert read_ok == inipaths dicts = (list(self.ini_config._sections.values()) + [self.ini_config.defaults()]) ini_options = {k for d in dicts for k in d.keys() if k != '__name__'} if not ini_options: return results for option in self._options: ini_section = option.kwargs.get('ini_section') value = None if ini_section: try: value = self.ini_config.get(ini_section, option.name) results[option.dest] = option.type(value) except parser_errors as err: # this is an ERROR and the next one is a DEBUG b/c # this code is executed only if the Option is defined # with the ini_section keyword argument LOG.error('Error parsing ini file: %r -- Continuing.', err) if not value: try: value = self.ini_config.get(namespace, option.name) results[option.dest] = option.type(value) except parser_errors as err: LOG.debug('Error parsing ini file: %r -- Continuing.', err) if option.dest in results: ini_options.remove(option.dest) if ini_options and not permissive: raise simpl.exceptions.SimplConfigUnknownOption( 'No corresponding Option was found for the following ' 'values in the ini file: %s' % ', '.join(["'%s'" % o for o in ini_options])) return results
def function[parse_ini, parameter[self, paths, namespace, permissive]]: constant[Parse config files and return configuration options. Expects array of files that are in ini format. :param paths: List of paths to files to parse (uses ConfigParse logic). If not supplied, uses the ini_paths value supplied on initialization. ] variable[namespace] assign[=] <ast.BoolOp object at 0x7da1b09b8d90> variable[results] assign[=] dictionary[[], []] if compare[name[sys].version_info less[<] tuple[[<ast.Constant object at 0x7da1b09bbb50>, <ast.Constant object at 0x7da1b09bafe0>]]] begin[:] name[self].ini_config assign[=] call[name[configparser].SafeConfigParser, parameter[]] variable[parser_errors] assign[=] tuple[[<ast.Attribute object at 0x7da1b09ba320>, <ast.Attribute object at 0x7da1b09ba8c0>]] variable[inipaths] assign[=] call[name[list], parameter[<ast.BoolOp object at 0x7da1b09b9f30>]] for taget[name[pth]] in starred[name[inipaths]] begin[:] if <ast.UnaryOp object at 0x7da1b09bae00> begin[:] <ast.Raise object at 0x7da1b09b9ea0> variable[read_ok] assign[=] call[name[self].ini_config.read, parameter[name[inipaths]]] assert[compare[name[read_ok] equal[==] name[inipaths]]] variable[dicts] assign[=] binary_operation[call[name[list], parameter[call[name[self].ini_config._sections.values, parameter[]]]] + list[[<ast.Call object at 0x7da1b09bb9a0>]]] variable[ini_options] assign[=] <ast.SetComp object at 0x7da1b0a49e40> if <ast.UnaryOp object at 0x7da1b0a49330> begin[:] return[name[results]] for taget[name[option]] in starred[name[self]._options] begin[:] variable[ini_section] assign[=] call[name[option].kwargs.get, parameter[constant[ini_section]]] variable[value] assign[=] constant[None] if name[ini_section] begin[:] <ast.Try object at 0x7da1b0966680> if <ast.UnaryOp object at 0x7da18bcc8250> begin[:] <ast.Try object at 0x7da18bcc81f0> if compare[name[option].dest in name[results]] begin[:] call[name[ini_options].remove, parameter[name[option].dest]] if <ast.BoolOp object at 0x7da1b09d34c0> begin[:] <ast.Raise object at 0x7da1b09d28f0> return[name[results]]
keyword[def] identifier[parse_ini] ( identifier[self] , identifier[paths] = keyword[None] , identifier[namespace] = keyword[None] , identifier[permissive] = keyword[False] ): literal[string] identifier[namespace] = identifier[namespace] keyword[or] identifier[self] . identifier[prog] identifier[results] ={} keyword[if] identifier[sys] . identifier[version_info] <( literal[int] , literal[int] ): identifier[self] . identifier[ini_config] = identifier[configparser] . identifier[SafeConfigParser] () keyword[else] : identifier[self] . identifier[ini_config] = identifier[configparser] . identifier[ConfigParser] () identifier[parser_errors] =( identifier[configparser] . identifier[NoOptionError] , identifier[configparser] . identifier[NoSectionError] ) identifier[inipaths] = identifier[list] ( identifier[paths] keyword[or] identifier[reversed] ( identifier[self] . identifier[_ini_paths] )) keyword[for] identifier[pth] keyword[in] identifier[inipaths] : keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[pth] ): keyword[raise] identifier[OSError] ( identifier[errno] . identifier[ENOENT] , literal[string] , identifier[pth] ) identifier[read_ok] = identifier[self] . identifier[ini_config] . identifier[read] ( identifier[inipaths] ) keyword[assert] identifier[read_ok] == identifier[inipaths] identifier[dicts] =( identifier[list] ( identifier[self] . identifier[ini_config] . identifier[_sections] . identifier[values] ())+ [ identifier[self] . identifier[ini_config] . identifier[defaults] ()]) identifier[ini_options] ={ identifier[k] keyword[for] identifier[d] keyword[in] identifier[dicts] keyword[for] identifier[k] keyword[in] identifier[d] . identifier[keys] () keyword[if] identifier[k] != literal[string] } keyword[if] keyword[not] identifier[ini_options] : keyword[return] identifier[results] keyword[for] identifier[option] keyword[in] identifier[self] . identifier[_options] : identifier[ini_section] = identifier[option] . identifier[kwargs] . identifier[get] ( literal[string] ) identifier[value] = keyword[None] keyword[if] identifier[ini_section] : keyword[try] : identifier[value] = identifier[self] . identifier[ini_config] . identifier[get] ( identifier[ini_section] , identifier[option] . identifier[name] ) identifier[results] [ identifier[option] . identifier[dest] ]= identifier[option] . identifier[type] ( identifier[value] ) keyword[except] identifier[parser_errors] keyword[as] identifier[err] : identifier[LOG] . identifier[error] ( literal[string] , identifier[err] ) keyword[if] keyword[not] identifier[value] : keyword[try] : identifier[value] = identifier[self] . identifier[ini_config] . identifier[get] ( identifier[namespace] , identifier[option] . identifier[name] ) identifier[results] [ identifier[option] . identifier[dest] ]= identifier[option] . identifier[type] ( identifier[value] ) keyword[except] identifier[parser_errors] keyword[as] identifier[err] : identifier[LOG] . identifier[debug] ( literal[string] , identifier[err] ) keyword[if] identifier[option] . identifier[dest] keyword[in] identifier[results] : identifier[ini_options] . identifier[remove] ( identifier[option] . identifier[dest] ) keyword[if] identifier[ini_options] keyword[and] keyword[not] identifier[permissive] : keyword[raise] identifier[simpl] . identifier[exceptions] . identifier[SimplConfigUnknownOption] ( literal[string] literal[string] % literal[string] . identifier[join] ([ literal[string] % identifier[o] keyword[for] identifier[o] keyword[in] identifier[ini_options] ])) keyword[return] identifier[results]
def parse_ini(self, paths=None, namespace=None, permissive=False): """Parse config files and return configuration options. Expects array of files that are in ini format. :param paths: List of paths to files to parse (uses ConfigParse logic). If not supplied, uses the ini_paths value supplied on initialization. """ namespace = namespace or self.prog results = {} # DeprecationWarning: SafeConfigParser has been renamed to ConfigParser # in Python 3.2. This alias will be removed in future versions. Use # ConfigParser directly instead. if sys.version_info < (3, 2): self.ini_config = configparser.SafeConfigParser() # depends on [control=['if'], data=[]] else: self.ini_config = configparser.ConfigParser() parser_errors = (configparser.NoOptionError, configparser.NoSectionError) inipaths = list(paths or reversed(self._ini_paths)) # check that explicitly defined ini paths exist for pth in inipaths: if not os.path.isfile(pth): raise OSError(errno.ENOENT, 'No such file or directory', pth) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pth']] read_ok = self.ini_config.read(inipaths) assert read_ok == inipaths dicts = list(self.ini_config._sections.values()) + [self.ini_config.defaults()] ini_options = {k for d in dicts for k in d.keys() if k != '__name__'} if not ini_options: return results # depends on [control=['if'], data=[]] for option in self._options: ini_section = option.kwargs.get('ini_section') value = None if ini_section: try: value = self.ini_config.get(ini_section, option.name) results[option.dest] = option.type(value) # depends on [control=['try'], data=[]] except parser_errors as err: # this is an ERROR and the next one is a DEBUG b/c # this code is executed only if the Option is defined # with the ini_section keyword argument LOG.error('Error parsing ini file: %r -- Continuing.', err) # depends on [control=['except'], data=['err']] # depends on [control=['if'], data=[]] if not value: try: value = self.ini_config.get(namespace, option.name) results[option.dest] = option.type(value) # depends on [control=['try'], data=[]] except parser_errors as err: LOG.debug('Error parsing ini file: %r -- Continuing.', err) # depends on [control=['except'], data=['err']] # depends on [control=['if'], data=[]] if option.dest in results: ini_options.remove(option.dest) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['option']] if ini_options and (not permissive): raise simpl.exceptions.SimplConfigUnknownOption('No corresponding Option was found for the following values in the ini file: %s' % ', '.join(["'%s'" % o for o in ini_options])) # depends on [control=['if'], data=[]] return results
def write_byte(self, addr, val): """Write a single byte to the specified device.""" assert self._device is not None, 'Bus must be opened before operations are made against it!' self._select_device(addr) data = bytearray(1) data[0] = val & 0xFF self._device.write(data)
def function[write_byte, parameter[self, addr, val]]: constant[Write a single byte to the specified device.] assert[compare[name[self]._device is_not constant[None]]] call[name[self]._select_device, parameter[name[addr]]] variable[data] assign[=] call[name[bytearray], parameter[constant[1]]] call[name[data]][constant[0]] assign[=] binary_operation[name[val] <ast.BitAnd object at 0x7da2590d6b60> constant[255]] call[name[self]._device.write, parameter[name[data]]]
keyword[def] identifier[write_byte] ( identifier[self] , identifier[addr] , identifier[val] ): literal[string] keyword[assert] identifier[self] . identifier[_device] keyword[is] keyword[not] keyword[None] , literal[string] identifier[self] . identifier[_select_device] ( identifier[addr] ) identifier[data] = identifier[bytearray] ( literal[int] ) identifier[data] [ literal[int] ]= identifier[val] & literal[int] identifier[self] . identifier[_device] . identifier[write] ( identifier[data] )
def write_byte(self, addr, val): """Write a single byte to the specified device.""" assert self._device is not None, 'Bus must be opened before operations are made against it!' self._select_device(addr) data = bytearray(1) data[0] = val & 255 self._device.write(data)
def disable_scanners_by_group(self, group): """ Disables the scanners in the group if it matches one in the scanner_group_map. """ if group == 'all': self.logger.debug('Disabling all scanners') return self.zap.ascan.disable_all_scanners() try: scanner_list = self.scanner_group_map[group] except KeyError: raise ZAPError( 'Invalid group "{0}" provided. Valid groups are: {1}'.format( group, ', '.join(self.scanner_groups) ) ) self.logger.debug('Disabling scanner group {0}'.format(group)) return self.disable_scanners_by_ids(scanner_list)
def function[disable_scanners_by_group, parameter[self, group]]: constant[ Disables the scanners in the group if it matches one in the scanner_group_map. ] if compare[name[group] equal[==] constant[all]] begin[:] call[name[self].logger.debug, parameter[constant[Disabling all scanners]]] return[call[name[self].zap.ascan.disable_all_scanners, parameter[]]] <ast.Try object at 0x7da2041d89a0> call[name[self].logger.debug, parameter[call[constant[Disabling scanner group {0}].format, parameter[name[group]]]]] return[call[name[self].disable_scanners_by_ids, parameter[name[scanner_list]]]]
keyword[def] identifier[disable_scanners_by_group] ( identifier[self] , identifier[group] ): literal[string] keyword[if] identifier[group] == literal[string] : identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ) keyword[return] identifier[self] . identifier[zap] . identifier[ascan] . identifier[disable_all_scanners] () keyword[try] : identifier[scanner_list] = identifier[self] . identifier[scanner_group_map] [ identifier[group] ] keyword[except] identifier[KeyError] : keyword[raise] identifier[ZAPError] ( literal[string] . identifier[format] ( identifier[group] , literal[string] . identifier[join] ( identifier[self] . identifier[scanner_groups] ) ) ) identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[group] )) keyword[return] identifier[self] . identifier[disable_scanners_by_ids] ( identifier[scanner_list] )
def disable_scanners_by_group(self, group): """ Disables the scanners in the group if it matches one in the scanner_group_map. """ if group == 'all': self.logger.debug('Disabling all scanners') return self.zap.ascan.disable_all_scanners() # depends on [control=['if'], data=[]] try: scanner_list = self.scanner_group_map[group] # depends on [control=['try'], data=[]] except KeyError: raise ZAPError('Invalid group "{0}" provided. Valid groups are: {1}'.format(group, ', '.join(self.scanner_groups))) # depends on [control=['except'], data=[]] self.logger.debug('Disabling scanner group {0}'.format(group)) return self.disable_scanners_by_ids(scanner_list)
def comparison(self): """ comparison: expr (('==' | '!=' | '<=' | '>=' | '<' | '>') expr)* """ node = self.expr() while self.token.nature in ( Nature.EQ, Nature.NE, Nature.LE, Nature.GE, Nature.LT, Nature.GT, ): token = self.token if token.nature == Nature.EQ: self._process(Nature.EQ) elif token.nature == Nature.NE: self._process(Nature.NE) elif token.nature == Nature.LE: self._process(Nature.LE) elif token.nature == Nature.GE: self._process(Nature.GE) elif token.nature == Nature.LT: self._process(Nature.LT) elif token.nature == Nature.GT: self._process(Nature.GT) else: self.error() node = BinaryOperation(left=node, op=token, right=self.expr()) return node
def function[comparison, parameter[self]]: constant[ comparison: expr (('==' | '!=' | '<=' | '>=' | '<' | '>') expr)* ] variable[node] assign[=] call[name[self].expr, parameter[]] while compare[name[self].token.nature in tuple[[<ast.Attribute object at 0x7da18f722f80>, <ast.Attribute object at 0x7da18f723550>, <ast.Attribute object at 0x7da18f7234f0>, <ast.Attribute object at 0x7da18f723bb0>, <ast.Attribute object at 0x7da18f723700>, <ast.Attribute object at 0x7da18f7225f0>]]] begin[:] variable[token] assign[=] name[self].token if compare[name[token].nature equal[==] name[Nature].EQ] begin[:] call[name[self]._process, parameter[name[Nature].EQ]] variable[node] assign[=] call[name[BinaryOperation], parameter[]] return[name[node]]
keyword[def] identifier[comparison] ( identifier[self] ): literal[string] identifier[node] = identifier[self] . identifier[expr] () keyword[while] identifier[self] . identifier[token] . identifier[nature] keyword[in] ( identifier[Nature] . identifier[EQ] , identifier[Nature] . identifier[NE] , identifier[Nature] . identifier[LE] , identifier[Nature] . identifier[GE] , identifier[Nature] . identifier[LT] , identifier[Nature] . identifier[GT] , ): identifier[token] = identifier[self] . identifier[token] keyword[if] identifier[token] . identifier[nature] == identifier[Nature] . identifier[EQ] : identifier[self] . identifier[_process] ( identifier[Nature] . identifier[EQ] ) keyword[elif] identifier[token] . identifier[nature] == identifier[Nature] . identifier[NE] : identifier[self] . identifier[_process] ( identifier[Nature] . identifier[NE] ) keyword[elif] identifier[token] . identifier[nature] == identifier[Nature] . identifier[LE] : identifier[self] . identifier[_process] ( identifier[Nature] . identifier[LE] ) keyword[elif] identifier[token] . identifier[nature] == identifier[Nature] . identifier[GE] : identifier[self] . identifier[_process] ( identifier[Nature] . identifier[GE] ) keyword[elif] identifier[token] . identifier[nature] == identifier[Nature] . identifier[LT] : identifier[self] . identifier[_process] ( identifier[Nature] . identifier[LT] ) keyword[elif] identifier[token] . identifier[nature] == identifier[Nature] . identifier[GT] : identifier[self] . identifier[_process] ( identifier[Nature] . identifier[GT] ) keyword[else] : identifier[self] . identifier[error] () identifier[node] = identifier[BinaryOperation] ( identifier[left] = identifier[node] , identifier[op] = identifier[token] , identifier[right] = identifier[self] . identifier[expr] ()) keyword[return] identifier[node]
def comparison(self): """ comparison: expr (('==' | '!=' | '<=' | '>=' | '<' | '>') expr)* """ node = self.expr() while self.token.nature in (Nature.EQ, Nature.NE, Nature.LE, Nature.GE, Nature.LT, Nature.GT): token = self.token if token.nature == Nature.EQ: self._process(Nature.EQ) # depends on [control=['if'], data=[]] elif token.nature == Nature.NE: self._process(Nature.NE) # depends on [control=['if'], data=[]] elif token.nature == Nature.LE: self._process(Nature.LE) # depends on [control=['if'], data=[]] elif token.nature == Nature.GE: self._process(Nature.GE) # depends on [control=['if'], data=[]] elif token.nature == Nature.LT: self._process(Nature.LT) # depends on [control=['if'], data=[]] elif token.nature == Nature.GT: self._process(Nature.GT) # depends on [control=['if'], data=[]] else: self.error() node = BinaryOperation(left=node, op=token, right=self.expr()) # depends on [control=['while'], data=[]] return node
def get_encoding_from_reponse(r): """获取requests库get或post返回的对象编码 Args: r: requests库get或post返回的对象 Returns: 对象编码 """ encoding = requests.utils.get_encodings_from_content(r.text) return encoding[0] if encoding else requests.utils.get_encoding_from_headers(r.headers)
def function[get_encoding_from_reponse, parameter[r]]: constant[获取requests库get或post返回的对象编码 Args: r: requests库get或post返回的对象 Returns: 对象编码 ] variable[encoding] assign[=] call[name[requests].utils.get_encodings_from_content, parameter[name[r].text]] return[<ast.IfExp object at 0x7da1b2043f40>]
keyword[def] identifier[get_encoding_from_reponse] ( identifier[r] ): literal[string] identifier[encoding] = identifier[requests] . identifier[utils] . identifier[get_encodings_from_content] ( identifier[r] . identifier[text] ) keyword[return] identifier[encoding] [ literal[int] ] keyword[if] identifier[encoding] keyword[else] identifier[requests] . identifier[utils] . identifier[get_encoding_from_headers] ( identifier[r] . identifier[headers] )
def get_encoding_from_reponse(r): """获取requests库get或post返回的对象编码 Args: r: requests库get或post返回的对象 Returns: 对象编码 """ encoding = requests.utils.get_encodings_from_content(r.text) return encoding[0] if encoding else requests.utils.get_encoding_from_headers(r.headers)
def query_tag_values(self, metric_type=None, **tags): """ Query for possible tag values. :param metric_type: A MetricType to be queried. If left to None, matches all the MetricTypes :param tags: A dict of tag key/value pairs. Uses Hawkular-Metrics tag query language for syntax """ tagql = self._transform_tags(**tags) return self._get(self._get_metrics_tags_url(self._get_url(metric_type)) + '/{}'.format(tagql))
def function[query_tag_values, parameter[self, metric_type]]: constant[ Query for possible tag values. :param metric_type: A MetricType to be queried. If left to None, matches all the MetricTypes :param tags: A dict of tag key/value pairs. Uses Hawkular-Metrics tag query language for syntax ] variable[tagql] assign[=] call[name[self]._transform_tags, parameter[]] return[call[name[self]._get, parameter[binary_operation[call[name[self]._get_metrics_tags_url, parameter[call[name[self]._get_url, parameter[name[metric_type]]]]] + call[constant[/{}].format, parameter[name[tagql]]]]]]]
keyword[def] identifier[query_tag_values] ( identifier[self] , identifier[metric_type] = keyword[None] ,** identifier[tags] ): literal[string] identifier[tagql] = identifier[self] . identifier[_transform_tags] (** identifier[tags] ) keyword[return] identifier[self] . identifier[_get] ( identifier[self] . identifier[_get_metrics_tags_url] ( identifier[self] . identifier[_get_url] ( identifier[metric_type] ))+ literal[string] . identifier[format] ( identifier[tagql] ))
def query_tag_values(self, metric_type=None, **tags): """ Query for possible tag values. :param metric_type: A MetricType to be queried. If left to None, matches all the MetricTypes :param tags: A dict of tag key/value pairs. Uses Hawkular-Metrics tag query language for syntax """ tagql = self._transform_tags(**tags) return self._get(self._get_metrics_tags_url(self._get_url(metric_type)) + '/{}'.format(tagql))
def get_metrics(self, name=None): """Get metrics for this operator. Args: name(str, optional): Only return metrics matching `name`, where `name` can be a regular expression. If `name` is not supplied, then all metrics for this operator are returned. Returns: list(Metric): List of matching metrics. Retrieving a list of metrics whose name contains the string "temperatureSensor" could be performed as followed Example: >>> from streamsx import rest >>> sc = rest.StreamingAnalyticsConnection() >>> instances = sc.get_instances() >>> operator = instances[0].get_operators()[0] >>> metrics = op.get_metrics(name='*temperatureSensor*') """ return self._get_elements(self.metrics, 'metrics', Metric, name=name)
def function[get_metrics, parameter[self, name]]: constant[Get metrics for this operator. Args: name(str, optional): Only return metrics matching `name`, where `name` can be a regular expression. If `name` is not supplied, then all metrics for this operator are returned. Returns: list(Metric): List of matching metrics. Retrieving a list of metrics whose name contains the string "temperatureSensor" could be performed as followed Example: >>> from streamsx import rest >>> sc = rest.StreamingAnalyticsConnection() >>> instances = sc.get_instances() >>> operator = instances[0].get_operators()[0] >>> metrics = op.get_metrics(name='*temperatureSensor*') ] return[call[name[self]._get_elements, parameter[name[self].metrics, constant[metrics], name[Metric]]]]
keyword[def] identifier[get_metrics] ( identifier[self] , identifier[name] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[_get_elements] ( identifier[self] . identifier[metrics] , literal[string] , identifier[Metric] , identifier[name] = identifier[name] )
def get_metrics(self, name=None): """Get metrics for this operator. Args: name(str, optional): Only return metrics matching `name`, where `name` can be a regular expression. If `name` is not supplied, then all metrics for this operator are returned. Returns: list(Metric): List of matching metrics. Retrieving a list of metrics whose name contains the string "temperatureSensor" could be performed as followed Example: >>> from streamsx import rest >>> sc = rest.StreamingAnalyticsConnection() >>> instances = sc.get_instances() >>> operator = instances[0].get_operators()[0] >>> metrics = op.get_metrics(name='*temperatureSensor*') """ return self._get_elements(self.metrics, 'metrics', Metric, name=name)
def get_instance(self, instance, project_id=None): """ Retrieves a resource containing information about a Cloud SQL instance. :param instance: Database instance ID. This does not include the project ID. :type instance: str :param project_id: Project ID of the project that contains the instance. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: A Cloud SQL instance resource. :rtype: dict """ return self.get_conn().instances().get( project=project_id, instance=instance ).execute(num_retries=self.num_retries)
def function[get_instance, parameter[self, instance, project_id]]: constant[ Retrieves a resource containing information about a Cloud SQL instance. :param instance: Database instance ID. This does not include the project ID. :type instance: str :param project_id: Project ID of the project that contains the instance. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: A Cloud SQL instance resource. :rtype: dict ] return[call[call[call[call[name[self].get_conn, parameter[]].instances, parameter[]].get, parameter[]].execute, parameter[]]]
keyword[def] identifier[get_instance] ( identifier[self] , identifier[instance] , identifier[project_id] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[get_conn] (). identifier[instances] (). identifier[get] ( identifier[project] = identifier[project_id] , identifier[instance] = identifier[instance] ). identifier[execute] ( identifier[num_retries] = identifier[self] . identifier[num_retries] )
def get_instance(self, instance, project_id=None): """ Retrieves a resource containing information about a Cloud SQL instance. :param instance: Database instance ID. This does not include the project ID. :type instance: str :param project_id: Project ID of the project that contains the instance. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: A Cloud SQL instance resource. :rtype: dict """ return self.get_conn().instances().get(project=project_id, instance=instance).execute(num_retries=self.num_retries)
def _map_unity_proxy_to_object(value): """ Map returning value, if it is unity SFrame, SArray, map it """ vtype = type(value) if vtype in _proxy_map: return _proxy_map[vtype](value) elif vtype == list: return [_map_unity_proxy_to_object(v) for v in value] elif vtype == dict: return {k:_map_unity_proxy_to_object(v) for k,v in value.items()} else: return value
def function[_map_unity_proxy_to_object, parameter[value]]: constant[ Map returning value, if it is unity SFrame, SArray, map it ] variable[vtype] assign[=] call[name[type], parameter[name[value]]] if compare[name[vtype] in name[_proxy_map]] begin[:] return[call[call[name[_proxy_map]][name[vtype]], parameter[name[value]]]]
keyword[def] identifier[_map_unity_proxy_to_object] ( identifier[value] ): literal[string] identifier[vtype] = identifier[type] ( identifier[value] ) keyword[if] identifier[vtype] keyword[in] identifier[_proxy_map] : keyword[return] identifier[_proxy_map] [ identifier[vtype] ]( identifier[value] ) keyword[elif] identifier[vtype] == identifier[list] : keyword[return] [ identifier[_map_unity_proxy_to_object] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[value] ] keyword[elif] identifier[vtype] == identifier[dict] : keyword[return] { identifier[k] : identifier[_map_unity_proxy_to_object] ( identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[value] . identifier[items] ()} keyword[else] : keyword[return] identifier[value]
def _map_unity_proxy_to_object(value): """ Map returning value, if it is unity SFrame, SArray, map it """ vtype = type(value) if vtype in _proxy_map: return _proxy_map[vtype](value) # depends on [control=['if'], data=['vtype', '_proxy_map']] elif vtype == list: return [_map_unity_proxy_to_object(v) for v in value] # depends on [control=['if'], data=[]] elif vtype == dict: return {k: _map_unity_proxy_to_object(v) for (k, v) in value.items()} # depends on [control=['if'], data=[]] else: return value
def get_model(with_pipeline=False): """Get a multi-layer perceptron model. Optionally, put it in a pipeline that scales the data. """ model = NeuralNetClassifier(MLPClassifier) if with_pipeline: model = Pipeline([ ('scale', FeatureUnion([ ('minmax', MinMaxScaler()), ('normalize', Normalizer()), ])), ('select', SelectKBest(k=N_FEATURES)), # keep input size constant ('net', model), ]) return model
def function[get_model, parameter[with_pipeline]]: constant[Get a multi-layer perceptron model. Optionally, put it in a pipeline that scales the data. ] variable[model] assign[=] call[name[NeuralNetClassifier], parameter[name[MLPClassifier]]] if name[with_pipeline] begin[:] variable[model] assign[=] call[name[Pipeline], parameter[list[[<ast.Tuple object at 0x7da18eb54af0>, <ast.Tuple object at 0x7da18eb55b40>, <ast.Tuple object at 0x7da18eb56cb0>]]]] return[name[model]]
keyword[def] identifier[get_model] ( identifier[with_pipeline] = keyword[False] ): literal[string] identifier[model] = identifier[NeuralNetClassifier] ( identifier[MLPClassifier] ) keyword[if] identifier[with_pipeline] : identifier[model] = identifier[Pipeline] ([ ( literal[string] , identifier[FeatureUnion] ([ ( literal[string] , identifier[MinMaxScaler] ()), ( literal[string] , identifier[Normalizer] ()), ])), ( literal[string] , identifier[SelectKBest] ( identifier[k] = identifier[N_FEATURES] )), ( literal[string] , identifier[model] ), ]) keyword[return] identifier[model]
def get_model(with_pipeline=False): """Get a multi-layer perceptron model. Optionally, put it in a pipeline that scales the data. """ model = NeuralNetClassifier(MLPClassifier) if with_pipeline: # keep input size constant model = Pipeline([('scale', FeatureUnion([('minmax', MinMaxScaler()), ('normalize', Normalizer())])), ('select', SelectKBest(k=N_FEATURES)), ('net', model)]) # depends on [control=['if'], data=[]] return model
def get_shrink_data_info(in_data, api_key=None): """Shrink binary data of a png returns api_info """ if api_key: return _shrink_info(in_data, api_key) api_keys = find_keys() for key in api_keys: try: return _shrink_info(in_data, key) except ValueError: pass raise ValueError('No valid api key found')
def function[get_shrink_data_info, parameter[in_data, api_key]]: constant[Shrink binary data of a png returns api_info ] if name[api_key] begin[:] return[call[name[_shrink_info], parameter[name[in_data], name[api_key]]]] variable[api_keys] assign[=] call[name[find_keys], parameter[]] for taget[name[key]] in starred[name[api_keys]] begin[:] <ast.Try object at 0x7da18bc72d40> <ast.Raise object at 0x7da18bc70c10>
keyword[def] identifier[get_shrink_data_info] ( identifier[in_data] , identifier[api_key] = keyword[None] ): literal[string] keyword[if] identifier[api_key] : keyword[return] identifier[_shrink_info] ( identifier[in_data] , identifier[api_key] ) identifier[api_keys] = identifier[find_keys] () keyword[for] identifier[key] keyword[in] identifier[api_keys] : keyword[try] : keyword[return] identifier[_shrink_info] ( identifier[in_data] , identifier[key] ) keyword[except] identifier[ValueError] : keyword[pass] keyword[raise] identifier[ValueError] ( literal[string] )
def get_shrink_data_info(in_data, api_key=None): """Shrink binary data of a png returns api_info """ if api_key: return _shrink_info(in_data, api_key) # depends on [control=['if'], data=[]] api_keys = find_keys() for key in api_keys: try: return _shrink_info(in_data, key) # depends on [control=['try'], data=[]] except ValueError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['key']] raise ValueError('No valid api key found')
def add_parameters(traj): """Adds all necessary parameters to the `traj` container. You can choose between two parameter sets. One for the Lorenz attractor and one for the Roessler attractor. The former is chosen for `traj.diff_name=='diff_lorenz'`, the latter for `traj.diff_name=='diff_roessler'`. You can use parameter presetting to switch between the two cases. :raises: A ValueError if `traj.diff_name` is none of the above """ traj.f_add_parameter('steps', 10000, comment='Number of time steps to simulate') traj.f_add_parameter('dt', 0.01, comment='Step size') # Here we want to add the initial conditions as an array parameter, since we will simulate # a 3-D differential equation, that is the Roessler attractor # (https://en.wikipedia.org/wiki/R%C3%B6ssler_attractor) traj.f_add_parameter(ArrayParameter,'initial_conditions', np.array([0.0,0.0,0.0]), comment = 'Our initial conditions, as default we will start from' ' origin!') # Per default we choose the name `'diff_lorenz'` as in the last example traj.f_add_parameter('diff_name','diff_lorenz', comment= 'Name of our differential equation') # We want some control flow depending on which name we really choose if traj.diff_name == 'diff_lorenz': # These parameters are for the Lorenz differential equation traj.f_add_parameter('func_params.sigma', 10.0) traj.f_add_parameter('func_params.beta', 8.0/3.0) traj.f_add_parameter('func_params.rho', 28.0) elif traj.diff_name == 'diff_roessler': # If we use the Roessler system we need different parameters traj.f_add_parameter('func_params.a', 0.1) traj.f_add_parameter('func_params.c', 14.0) else: raise ValueError('I don\'t know what %s is.' % traj.diff_name)
def function[add_parameters, parameter[traj]]: constant[Adds all necessary parameters to the `traj` container. You can choose between two parameter sets. One for the Lorenz attractor and one for the Roessler attractor. The former is chosen for `traj.diff_name=='diff_lorenz'`, the latter for `traj.diff_name=='diff_roessler'`. You can use parameter presetting to switch between the two cases. :raises: A ValueError if `traj.diff_name` is none of the above ] call[name[traj].f_add_parameter, parameter[constant[steps], constant[10000]]] call[name[traj].f_add_parameter, parameter[constant[dt], constant[0.01]]] call[name[traj].f_add_parameter, parameter[name[ArrayParameter], constant[initial_conditions], call[name[np].array, parameter[list[[<ast.Constant object at 0x7da1b26aca30>, <ast.Constant object at 0x7da1b26aee90>, <ast.Constant object at 0x7da1b26ae800>]]]]]] call[name[traj].f_add_parameter, parameter[constant[diff_name], constant[diff_lorenz]]] if compare[name[traj].diff_name equal[==] constant[diff_lorenz]] begin[:] call[name[traj].f_add_parameter, parameter[constant[func_params.sigma], constant[10.0]]] call[name[traj].f_add_parameter, parameter[constant[func_params.beta], binary_operation[constant[8.0] / constant[3.0]]]] call[name[traj].f_add_parameter, parameter[constant[func_params.rho], constant[28.0]]]
keyword[def] identifier[add_parameters] ( identifier[traj] ): literal[string] identifier[traj] . identifier[f_add_parameter] ( literal[string] , literal[int] , identifier[comment] = literal[string] ) identifier[traj] . identifier[f_add_parameter] ( literal[string] , literal[int] , identifier[comment] = literal[string] ) identifier[traj] . identifier[f_add_parameter] ( identifier[ArrayParameter] , literal[string] , identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] ]), identifier[comment] = literal[string] literal[string] ) identifier[traj] . identifier[f_add_parameter] ( literal[string] , literal[string] , identifier[comment] = literal[string] ) keyword[if] identifier[traj] . identifier[diff_name] == literal[string] : identifier[traj] . identifier[f_add_parameter] ( literal[string] , literal[int] ) identifier[traj] . identifier[f_add_parameter] ( literal[string] , literal[int] / literal[int] ) identifier[traj] . identifier[f_add_parameter] ( literal[string] , literal[int] ) keyword[elif] identifier[traj] . identifier[diff_name] == literal[string] : identifier[traj] . identifier[f_add_parameter] ( literal[string] , literal[int] ) identifier[traj] . identifier[f_add_parameter] ( literal[string] , literal[int] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[traj] . identifier[diff_name] )
def add_parameters(traj): """Adds all necessary parameters to the `traj` container. You can choose between two parameter sets. One for the Lorenz attractor and one for the Roessler attractor. The former is chosen for `traj.diff_name=='diff_lorenz'`, the latter for `traj.diff_name=='diff_roessler'`. You can use parameter presetting to switch between the two cases. :raises: A ValueError if `traj.diff_name` is none of the above """ traj.f_add_parameter('steps', 10000, comment='Number of time steps to simulate') traj.f_add_parameter('dt', 0.01, comment='Step size') # Here we want to add the initial conditions as an array parameter, since we will simulate # a 3-D differential equation, that is the Roessler attractor # (https://en.wikipedia.org/wiki/R%C3%B6ssler_attractor) traj.f_add_parameter(ArrayParameter, 'initial_conditions', np.array([0.0, 0.0, 0.0]), comment='Our initial conditions, as default we will start from origin!') # Per default we choose the name `'diff_lorenz'` as in the last example traj.f_add_parameter('diff_name', 'diff_lorenz', comment='Name of our differential equation') # We want some control flow depending on which name we really choose if traj.diff_name == 'diff_lorenz': # These parameters are for the Lorenz differential equation traj.f_add_parameter('func_params.sigma', 10.0) traj.f_add_parameter('func_params.beta', 8.0 / 3.0) traj.f_add_parameter('func_params.rho', 28.0) # depends on [control=['if'], data=[]] elif traj.diff_name == 'diff_roessler': # If we use the Roessler system we need different parameters traj.f_add_parameter('func_params.a', 0.1) traj.f_add_parameter('func_params.c', 14.0) # depends on [control=['if'], data=[]] else: raise ValueError("I don't know what %s is." % traj.diff_name)
def simDeath(self): ''' Determines which agents in the current population "die" or should be replaced. Takes no inputs, returns a Boolean array of size self.AgentCount, which has True for agents who die and False for those that survive. Returns all False by default, must be overwritten by a subclass to have replacement events. Parameters ---------- None Returns ------- who_dies : np.array Boolean array of size self.AgentCount indicating which agents die and are replaced. ''' print('AgentType subclass must define method simDeath!') who_dies = np.ones(self.AgentCount,dtype=bool) return who_dies
def function[simDeath, parameter[self]]: constant[ Determines which agents in the current population "die" or should be replaced. Takes no inputs, returns a Boolean array of size self.AgentCount, which has True for agents who die and False for those that survive. Returns all False by default, must be overwritten by a subclass to have replacement events. Parameters ---------- None Returns ------- who_dies : np.array Boolean array of size self.AgentCount indicating which agents die and are replaced. ] call[name[print], parameter[constant[AgentType subclass must define method simDeath!]]] variable[who_dies] assign[=] call[name[np].ones, parameter[name[self].AgentCount]] return[name[who_dies]]
keyword[def] identifier[simDeath] ( identifier[self] ): literal[string] identifier[print] ( literal[string] ) identifier[who_dies] = identifier[np] . identifier[ones] ( identifier[self] . identifier[AgentCount] , identifier[dtype] = identifier[bool] ) keyword[return] identifier[who_dies]
def simDeath(self): """ Determines which agents in the current population "die" or should be replaced. Takes no inputs, returns a Boolean array of size self.AgentCount, which has True for agents who die and False for those that survive. Returns all False by default, must be overwritten by a subclass to have replacement events. Parameters ---------- None Returns ------- who_dies : np.array Boolean array of size self.AgentCount indicating which agents die and are replaced. """ print('AgentType subclass must define method simDeath!') who_dies = np.ones(self.AgentCount, dtype=bool) return who_dies
def has_all_changes_covered(self): """ Return `True` if all changes have been covered, `False` otherwise. """ for filename in self.files(): for hunk in self.file_source_hunks(filename): for line in hunk: if line.reason is None: continue # line untouched if line.status is False: return False # line not covered return True
def function[has_all_changes_covered, parameter[self]]: constant[ Return `True` if all changes have been covered, `False` otherwise. ] for taget[name[filename]] in starred[call[name[self].files, parameter[]]] begin[:] for taget[name[hunk]] in starred[call[name[self].file_source_hunks, parameter[name[filename]]]] begin[:] for taget[name[line]] in starred[name[hunk]] begin[:] if compare[name[line].reason is constant[None]] begin[:] continue if compare[name[line].status is constant[False]] begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[has_all_changes_covered] ( identifier[self] ): literal[string] keyword[for] identifier[filename] keyword[in] identifier[self] . identifier[files] (): keyword[for] identifier[hunk] keyword[in] identifier[self] . identifier[file_source_hunks] ( identifier[filename] ): keyword[for] identifier[line] keyword[in] identifier[hunk] : keyword[if] identifier[line] . identifier[reason] keyword[is] keyword[None] : keyword[continue] keyword[if] identifier[line] . identifier[status] keyword[is] keyword[False] : keyword[return] keyword[False] keyword[return] keyword[True]
def has_all_changes_covered(self): """ Return `True` if all changes have been covered, `False` otherwise. """ for filename in self.files(): for hunk in self.file_source_hunks(filename): for line in hunk: if line.reason is None: continue # line untouched # depends on [control=['if'], data=[]] if line.status is False: return False # line not covered # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['for'], data=['hunk']] # depends on [control=['for'], data=['filename']] return True
def analysis(self, morf): """Like `analysis2` but doesn't return excluded line numbers.""" f, s, _, m, mf = self.analysis2(morf) return f, s, m, mf
def function[analysis, parameter[self, morf]]: constant[Like `analysis2` but doesn't return excluded line numbers.] <ast.Tuple object at 0x7da18f09cdc0> assign[=] call[name[self].analysis2, parameter[name[morf]]] return[tuple[[<ast.Name object at 0x7da18dc05f30>, <ast.Name object at 0x7da18dc058a0>, <ast.Name object at 0x7da18dc04610>, <ast.Name object at 0x7da18dc05cf0>]]]
keyword[def] identifier[analysis] ( identifier[self] , identifier[morf] ): literal[string] identifier[f] , identifier[s] , identifier[_] , identifier[m] , identifier[mf] = identifier[self] . identifier[analysis2] ( identifier[morf] ) keyword[return] identifier[f] , identifier[s] , identifier[m] , identifier[mf]
def analysis(self, morf): """Like `analysis2` but doesn't return excluded line numbers.""" (f, s, _, m, mf) = self.analysis2(morf) return (f, s, m, mf)
def set_scanner (type, scanner): """ Sets a scanner class that will be used for this 'type'. """ if __debug__: from .scanner import Scanner assert isinstance(type, basestring) assert issubclass(scanner, Scanner) validate (type) __types [type]['scanner'] = scanner
def function[set_scanner, parameter[type, scanner]]: constant[ Sets a scanner class that will be used for this 'type'. ] if name[__debug__] begin[:] from relative_module[scanner] import module[Scanner] assert[call[name[isinstance], parameter[name[type], name[basestring]]]] assert[call[name[issubclass], parameter[name[scanner], name[Scanner]]]] call[name[validate], parameter[name[type]]] call[call[name[__types]][name[type]]][constant[scanner]] assign[=] name[scanner]
keyword[def] identifier[set_scanner] ( identifier[type] , identifier[scanner] ): literal[string] keyword[if] identifier[__debug__] : keyword[from] . identifier[scanner] keyword[import] identifier[Scanner] keyword[assert] identifier[isinstance] ( identifier[type] , identifier[basestring] ) keyword[assert] identifier[issubclass] ( identifier[scanner] , identifier[Scanner] ) identifier[validate] ( identifier[type] ) identifier[__types] [ identifier[type] ][ literal[string] ]= identifier[scanner]
def set_scanner(type, scanner): """ Sets a scanner class that will be used for this 'type'. """ if __debug__: from .scanner import Scanner assert isinstance(type, basestring) assert issubclass(scanner, Scanner) # depends on [control=['if'], data=[]] validate(type) __types[type]['scanner'] = scanner
def _tag_ebs(self, conn, role): """ set tags, carrying the cluster name, instance role and instance id for the EBS storage """ tags = {'Name': 'spilo_' + self.cluster_name, 'Role': role, 'Instance': self.instance_id} volumes = conn.get_all_volumes(filters={'attachment.instance-id': self.instance_id}) conn.create_tags([v.id for v in volumes], tags)
def function[_tag_ebs, parameter[self, conn, role]]: constant[ set tags, carrying the cluster name, instance role and instance id for the EBS storage ] variable[tags] assign[=] dictionary[[<ast.Constant object at 0x7da1b21e1000>, <ast.Constant object at 0x7da1b21e1f90>, <ast.Constant object at 0x7da1b21e37c0>], [<ast.BinOp object at 0x7da1b21e04c0>, <ast.Name object at 0x7da1b21e37f0>, <ast.Attribute object at 0x7da1b21e3cd0>]] variable[volumes] assign[=] call[name[conn].get_all_volumes, parameter[]] call[name[conn].create_tags, parameter[<ast.ListComp object at 0x7da1b21e12a0>, name[tags]]]
keyword[def] identifier[_tag_ebs] ( identifier[self] , identifier[conn] , identifier[role] ): literal[string] identifier[tags] ={ literal[string] : literal[string] + identifier[self] . identifier[cluster_name] , literal[string] : identifier[role] , literal[string] : identifier[self] . identifier[instance_id] } identifier[volumes] = identifier[conn] . identifier[get_all_volumes] ( identifier[filters] ={ literal[string] : identifier[self] . identifier[instance_id] }) identifier[conn] . identifier[create_tags] ([ identifier[v] . identifier[id] keyword[for] identifier[v] keyword[in] identifier[volumes] ], identifier[tags] )
def _tag_ebs(self, conn, role): """ set tags, carrying the cluster name, instance role and instance id for the EBS storage """ tags = {'Name': 'spilo_' + self.cluster_name, 'Role': role, 'Instance': self.instance_id} volumes = conn.get_all_volumes(filters={'attachment.instance-id': self.instance_id}) conn.create_tags([v.id for v in volumes], tags)
def fn_int8(self, value): """ Return the value cast to an 8-bit signed integer (numpy array) or a Python int (single value) :param value: The number or array :return: The number or array as int/int8 """ if is_ndarray(value) or isinstance(value, (list, tuple)): return self._to_ndarray(value).astype(numpy.int8) else: return int(value)
def function[fn_int8, parameter[self, value]]: constant[ Return the value cast to an 8-bit signed integer (numpy array) or a Python int (single value) :param value: The number or array :return: The number or array as int/int8 ] if <ast.BoolOp object at 0x7da18f813f70> begin[:] return[call[call[name[self]._to_ndarray, parameter[name[value]]].astype, parameter[name[numpy].int8]]]
keyword[def] identifier[fn_int8] ( identifier[self] , identifier[value] ): literal[string] keyword[if] identifier[is_ndarray] ( identifier[value] ) keyword[or] identifier[isinstance] ( identifier[value] ,( identifier[list] , identifier[tuple] )): keyword[return] identifier[self] . identifier[_to_ndarray] ( identifier[value] ). identifier[astype] ( identifier[numpy] . identifier[int8] ) keyword[else] : keyword[return] identifier[int] ( identifier[value] )
def fn_int8(self, value): """ Return the value cast to an 8-bit signed integer (numpy array) or a Python int (single value) :param value: The number or array :return: The number or array as int/int8 """ if is_ndarray(value) or isinstance(value, (list, tuple)): return self._to_ndarray(value).astype(numpy.int8) # depends on [control=['if'], data=[]] else: return int(value)
def fetch(bank, key): ''' Fetch a key value. ''' _init_client() etcd_key = '{0}/{1}/{2}'.format(path_prefix, bank, key) try: value = client.read(etcd_key).value return __context__['serial'].loads(base64.b64decode(value)) except etcd.EtcdKeyNotFound: return {} except Exception as exc: raise SaltCacheError( 'There was an error reading the key, {0}: {1}'.format( etcd_key, exc ) )
def function[fetch, parameter[bank, key]]: constant[ Fetch a key value. ] call[name[_init_client], parameter[]] variable[etcd_key] assign[=] call[constant[{0}/{1}/{2}].format, parameter[name[path_prefix], name[bank], name[key]]] <ast.Try object at 0x7da1b1c644c0>
keyword[def] identifier[fetch] ( identifier[bank] , identifier[key] ): literal[string] identifier[_init_client] () identifier[etcd_key] = literal[string] . identifier[format] ( identifier[path_prefix] , identifier[bank] , identifier[key] ) keyword[try] : identifier[value] = identifier[client] . identifier[read] ( identifier[etcd_key] ). identifier[value] keyword[return] identifier[__context__] [ literal[string] ]. identifier[loads] ( identifier[base64] . identifier[b64decode] ( identifier[value] )) keyword[except] identifier[etcd] . identifier[EtcdKeyNotFound] : keyword[return] {} keyword[except] identifier[Exception] keyword[as] identifier[exc] : keyword[raise] identifier[SaltCacheError] ( literal[string] . identifier[format] ( identifier[etcd_key] , identifier[exc] ) )
def fetch(bank, key): """ Fetch a key value. """ _init_client() etcd_key = '{0}/{1}/{2}'.format(path_prefix, bank, key) try: value = client.read(etcd_key).value return __context__['serial'].loads(base64.b64decode(value)) # depends on [control=['try'], data=[]] except etcd.EtcdKeyNotFound: return {} # depends on [control=['except'], data=[]] except Exception as exc: raise SaltCacheError('There was an error reading the key, {0}: {1}'.format(etcd_key, exc)) # depends on [control=['except'], data=['exc']]
def _bor16(ins): ''' Pops top 2 operands out of the stack, and performs 1st operand OR (bitwise) 2nd operand (top of the stack), pushes result (16 bit in HL). 16 bit un/signed version Optimizations: If any of the operators are constants: Returns either 0 or the other operand ''' op1, op2 = tuple(ins.quad[2:]) if _int_ops(op1, op2) is not None: op1, op2 = _int_ops(op1, op2) output = _16bit_oper(op1) if op2 == 0: # X | 0 = X output.append('push hl') return output if op2 == 0xFFFF: # X & 0xFFFF = 0xFFFF output.append('ld hl, 0FFFFh') output.append('push hl') return output output = _16bit_oper(op1, op2) output.append('call __BOR16') output.append('push hl') REQUIRES.add('bor16.asm') return output
def function[_bor16, parameter[ins]]: constant[ Pops top 2 operands out of the stack, and performs 1st operand OR (bitwise) 2nd operand (top of the stack), pushes result (16 bit in HL). 16 bit un/signed version Optimizations: If any of the operators are constants: Returns either 0 or the other operand ] <ast.Tuple object at 0x7da20cabf130> assign[=] call[name[tuple], parameter[call[name[ins].quad][<ast.Slice object at 0x7da20cabe200>]]] if compare[call[name[_int_ops], parameter[name[op1], name[op2]]] is_not constant[None]] begin[:] <ast.Tuple object at 0x7da20cabdf90> assign[=] call[name[_int_ops], parameter[name[op1], name[op2]]] variable[output] assign[=] call[name[_16bit_oper], parameter[name[op1]]] if compare[name[op2] equal[==] constant[0]] begin[:] call[name[output].append, parameter[constant[push hl]]] return[name[output]] if compare[name[op2] equal[==] constant[65535]] begin[:] call[name[output].append, parameter[constant[ld hl, 0FFFFh]]] call[name[output].append, parameter[constant[push hl]]] return[name[output]] variable[output] assign[=] call[name[_16bit_oper], parameter[name[op1], name[op2]]] call[name[output].append, parameter[constant[call __BOR16]]] call[name[output].append, parameter[constant[push hl]]] call[name[REQUIRES].add, parameter[constant[bor16.asm]]] return[name[output]]
keyword[def] identifier[_bor16] ( identifier[ins] ): literal[string] identifier[op1] , identifier[op2] = identifier[tuple] ( identifier[ins] . identifier[quad] [ literal[int] :]) keyword[if] identifier[_int_ops] ( identifier[op1] , identifier[op2] ) keyword[is] keyword[not] keyword[None] : identifier[op1] , identifier[op2] = identifier[_int_ops] ( identifier[op1] , identifier[op2] ) identifier[output] = identifier[_16bit_oper] ( identifier[op1] ) keyword[if] identifier[op2] == literal[int] : identifier[output] . identifier[append] ( literal[string] ) keyword[return] identifier[output] keyword[if] identifier[op2] == literal[int] : identifier[output] . identifier[append] ( literal[string] ) identifier[output] . identifier[append] ( literal[string] ) keyword[return] identifier[output] identifier[output] = identifier[_16bit_oper] ( identifier[op1] , identifier[op2] ) identifier[output] . identifier[append] ( literal[string] ) identifier[output] . identifier[append] ( literal[string] ) identifier[REQUIRES] . identifier[add] ( literal[string] ) keyword[return] identifier[output]
def _bor16(ins): """ Pops top 2 operands out of the stack, and performs 1st operand OR (bitwise) 2nd operand (top of the stack), pushes result (16 bit in HL). 16 bit un/signed version Optimizations: If any of the operators are constants: Returns either 0 or the other operand """ (op1, op2) = tuple(ins.quad[2:]) if _int_ops(op1, op2) is not None: (op1, op2) = _int_ops(op1, op2) output = _16bit_oper(op1) if op2 == 0: # X | 0 = X output.append('push hl') return output # depends on [control=['if'], data=[]] if op2 == 65535: # X & 0xFFFF = 0xFFFF output.append('ld hl, 0FFFFh') output.append('push hl') return output # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] output = _16bit_oper(op1, op2) output.append('call __BOR16') output.append('push hl') REQUIRES.add('bor16.asm') return output
def audio_card(card: AudioCard) -> Attachment: """ Returns an attachment for an audio card. Will raise a TypeError if 'card' argument is not an AudioCard. :param card: :return: """ if not isinstance(card, AudioCard): raise TypeError('CardFactory.audio_card(): `card` argument is not an instance of an AudioCard, ' 'unable to prepare attachment.') return Attachment(content_type=CardFactory.content_types.audio_card, content=card)
def function[audio_card, parameter[card]]: constant[ Returns an attachment for an audio card. Will raise a TypeError if 'card' argument is not an AudioCard. :param card: :return: ] if <ast.UnaryOp object at 0x7da1b03e33d0> begin[:] <ast.Raise object at 0x7da1b03e3bb0> return[call[name[Attachment], parameter[]]]
keyword[def] identifier[audio_card] ( identifier[card] : identifier[AudioCard] )-> identifier[Attachment] : literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[card] , identifier[AudioCard] ): keyword[raise] identifier[TypeError] ( literal[string] literal[string] ) keyword[return] identifier[Attachment] ( identifier[content_type] = identifier[CardFactory] . identifier[content_types] . identifier[audio_card] , identifier[content] = identifier[card] )
def audio_card(card: AudioCard) -> Attachment: """ Returns an attachment for an audio card. Will raise a TypeError if 'card' argument is not an AudioCard. :param card: :return: """ if not isinstance(card, AudioCard): raise TypeError('CardFactory.audio_card(): `card` argument is not an instance of an AudioCard, unable to prepare attachment.') # depends on [control=['if'], data=[]] return Attachment(content_type=CardFactory.content_types.audio_card, content=card)
def get_info_of_object(self, obj, selector=None): """ return info dictionary of the *obj* The info example: { u'contentDescription': u'', u'checked': False, u'scrollable': True, u'text': u'', u'packageName': u'com.android.launcher', u'selected': False, u'enabled': True, u'bounds': { u'top': 231, u'left': 0, u'right': 1080, u'bottom': 1776 }, u'className': u'android.view.View', u'focusable': False, u'focused': False, u'clickable': False, u'checkable': False, u'chileCount': 1, u'longClickable': False, u'visibleBounds': { u'top': 231, u'left': 0, u'right': 1080, u'bottom': 1776 } } """ if selector: return obj.info.get(selector) else: return obj.info
def function[get_info_of_object, parameter[self, obj, selector]]: constant[ return info dictionary of the *obj* The info example: { u'contentDescription': u'', u'checked': False, u'scrollable': True, u'text': u'', u'packageName': u'com.android.launcher', u'selected': False, u'enabled': True, u'bounds': { u'top': 231, u'left': 0, u'right': 1080, u'bottom': 1776 }, u'className': u'android.view.View', u'focusable': False, u'focused': False, u'clickable': False, u'checkable': False, u'chileCount': 1, u'longClickable': False, u'visibleBounds': { u'top': 231, u'left': 0, u'right': 1080, u'bottom': 1776 } } ] if name[selector] begin[:] return[call[name[obj].info.get, parameter[name[selector]]]]
keyword[def] identifier[get_info_of_object] ( identifier[self] , identifier[obj] , identifier[selector] = keyword[None] ): literal[string] keyword[if] identifier[selector] : keyword[return] identifier[obj] . identifier[info] . identifier[get] ( identifier[selector] ) keyword[else] : keyword[return] identifier[obj] . identifier[info]
def get_info_of_object(self, obj, selector=None): """ return info dictionary of the *obj* The info example: { u'contentDescription': u'', u'checked': False, u'scrollable': True, u'text': u'', u'packageName': u'com.android.launcher', u'selected': False, u'enabled': True, u'bounds': { u'top': 231, u'left': 0, u'right': 1080, u'bottom': 1776 }, u'className': u'android.view.View', u'focusable': False, u'focused': False, u'clickable': False, u'checkable': False, u'chileCount': 1, u'longClickable': False, u'visibleBounds': { u'top': 231, u'left': 0, u'right': 1080, u'bottom': 1776 } } """ if selector: return obj.info.get(selector) # depends on [control=['if'], data=[]] else: return obj.info
def newidf(version=None): """open a new idf file easy way to open a new idf file for particular version. Works only id Energyplus of that version is installed. Parameters ---------- version: string version of the new file you want to create. Will work only if this version of Energyplus has been installed. Returns ------- idf file of type eppy.modelmake.IDF """ # noqa: E501 if not version: version = "8.9" import eppy.easyopen as easyopen idfstring = " Version,{};".format(str(version)) fhandle = StringIO(idfstring) return easyopen.easyopen(fhandle)
def function[newidf, parameter[version]]: constant[open a new idf file easy way to open a new idf file for particular version. Works only id Energyplus of that version is installed. Parameters ---------- version: string version of the new file you want to create. Will work only if this version of Energyplus has been installed. Returns ------- idf file of type eppy.modelmake.IDF ] if <ast.UnaryOp object at 0x7da18dc994e0> begin[:] variable[version] assign[=] constant[8.9] import module[eppy.easyopen] as alias[easyopen] variable[idfstring] assign[=] call[constant[ Version,{};].format, parameter[call[name[str], parameter[name[version]]]]] variable[fhandle] assign[=] call[name[StringIO], parameter[name[idfstring]]] return[call[name[easyopen].easyopen, parameter[name[fhandle]]]]
keyword[def] identifier[newidf] ( identifier[version] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[version] : identifier[version] = literal[string] keyword[import] identifier[eppy] . identifier[easyopen] keyword[as] identifier[easyopen] identifier[idfstring] = literal[string] . identifier[format] ( identifier[str] ( identifier[version] )) identifier[fhandle] = identifier[StringIO] ( identifier[idfstring] ) keyword[return] identifier[easyopen] . identifier[easyopen] ( identifier[fhandle] )
def newidf(version=None): """open a new idf file easy way to open a new idf file for particular version. Works only id Energyplus of that version is installed. Parameters ---------- version: string version of the new file you want to create. Will work only if this version of Energyplus has been installed. Returns ------- idf file of type eppy.modelmake.IDF """ # noqa: E501 if not version: version = '8.9' # depends on [control=['if'], data=[]] import eppy.easyopen as easyopen idfstring = ' Version,{};'.format(str(version)) fhandle = StringIO(idfstring) return easyopen.easyopen(fhandle)
def get(self, account_id): """ Return a specific account given its ID """ response = self.client._make_request('/accounts/{0}'.format(account_id)) return response.json()
def function[get, parameter[self, account_id]]: constant[ Return a specific account given its ID ] variable[response] assign[=] call[name[self].client._make_request, parameter[call[constant[/accounts/{0}].format, parameter[name[account_id]]]]] return[call[name[response].json, parameter[]]]
keyword[def] identifier[get] ( identifier[self] , identifier[account_id] ): literal[string] identifier[response] = identifier[self] . identifier[client] . identifier[_make_request] ( literal[string] . identifier[format] ( identifier[account_id] )) keyword[return] identifier[response] . identifier[json] ()
def get(self, account_id): """ Return a specific account given its ID """ response = self.client._make_request('/accounts/{0}'.format(account_id)) return response.json()
def _set_alarm_falling_event_index(self, v, load=False): """ Setter method for alarm_falling_event_index, mapped from YANG variable /rmon/alarm_entry/alarm_falling_event_index (alarm-falling-event-index-type) If this variable is read-only (config: false) in the source YANG file, then _set_alarm_falling_event_index is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_alarm_falling_event_index() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['-2147483648..2147483647']}, int_size=32), restriction_dict={'range': [u'1 .. 65535']}), is_leaf=True, yang_name="alarm-falling-event-index", rest_name="event", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Event for falling alarm', u'alt-name': u'event'}}, namespace='urn:brocade.com:mgmt:brocade-rmon', defining_module='brocade-rmon', yang_type='alarm-falling-event-index-type', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """alarm_falling_event_index must be of a type compatible with alarm-falling-event-index-type""", 'defined-type': "brocade-rmon:alarm-falling-event-index-type", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['-2147483648..2147483647']}, int_size=32), restriction_dict={'range': [u'1 .. 65535']}), is_leaf=True, yang_name="alarm-falling-event-index", rest_name="event", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Event for falling alarm', u'alt-name': u'event'}}, namespace='urn:brocade.com:mgmt:brocade-rmon', defining_module='brocade-rmon', yang_type='alarm-falling-event-index-type', is_config=True)""", }) self.__alarm_falling_event_index = t if hasattr(self, '_set'): self._set()
def function[_set_alarm_falling_event_index, parameter[self, v, load]]: constant[ Setter method for alarm_falling_event_index, mapped from YANG variable /rmon/alarm_entry/alarm_falling_event_index (alarm-falling-event-index-type) If this variable is read-only (config: false) in the source YANG file, then _set_alarm_falling_event_index is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_alarm_falling_event_index() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da20c6c5c60> name[self].__alarm_falling_event_index assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_alarm_falling_event_index] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[long] , identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}, identifier[int_size] = literal[int] ), identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__alarm_falling_event_index] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_alarm_falling_event_index(self, v, load=False): """ Setter method for alarm_falling_event_index, mapped from YANG variable /rmon/alarm_entry/alarm_falling_event_index (alarm-falling-event-index-type) If this variable is read-only (config: false) in the source YANG file, then _set_alarm_falling_event_index is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_alarm_falling_event_index() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['-2147483648..2147483647']}, int_size=32), restriction_dict={'range': [u'1 .. 65535']}), is_leaf=True, yang_name='alarm-falling-event-index', rest_name='event', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Event for falling alarm', u'alt-name': u'event'}}, namespace='urn:brocade.com:mgmt:brocade-rmon', defining_module='brocade-rmon', yang_type='alarm-falling-event-index-type', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'alarm_falling_event_index must be of a type compatible with alarm-falling-event-index-type', 'defined-type': 'brocade-rmon:alarm-falling-event-index-type', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={\'range\': [\'-2147483648..2147483647\']}, int_size=32), restriction_dict={\'range\': [u\'1 .. 65535\']}), is_leaf=True, yang_name="alarm-falling-event-index", rest_name="event", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Event for falling alarm\', u\'alt-name\': u\'event\'}}, namespace=\'urn:brocade.com:mgmt:brocade-rmon\', defining_module=\'brocade-rmon\', yang_type=\'alarm-falling-event-index-type\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__alarm_falling_event_index = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def _query(action=None, command=None, args=None, method='GET', header_dict=None, data=None): ''' Make a web call to GoGrid .. versionadded:: 2015.8.0 ''' vm_ = get_configured_provider() apikey = config.get_cloud_config_value( 'apikey', vm_, __opts__, search_global=False ) sharedsecret = config.get_cloud_config_value( 'sharedsecret', vm_, __opts__, search_global=False ) path = 'https://api.gogrid.com/api/' if action: path += action if command: path += '/{0}'.format(command) log.debug('GoGrid URL: %s', path) if not isinstance(args, dict): args = {} epoch = six.text_type(int(time.time())) hashtext = ''.join((apikey, sharedsecret, epoch)) args['sig'] = salt.utils.hashutils.md5_digest(hashtext) args['format'] = 'json' args['v'] = '1.0' args['api_key'] = apikey if header_dict is None: header_dict = {} if method != 'POST': header_dict['Accept'] = 'application/json' decode = True if method == 'DELETE': decode = False return_content = None result = salt.utils.http.query( path, method, params=args, data=data, header_dict=header_dict, decode=decode, decode_type='json', text=True, status=True, opts=__opts__, ) log.debug('GoGrid Response Status Code: %s', result['status']) return result['dict']
def function[_query, parameter[action, command, args, method, header_dict, data]]: constant[ Make a web call to GoGrid .. versionadded:: 2015.8.0 ] variable[vm_] assign[=] call[name[get_configured_provider], parameter[]] variable[apikey] assign[=] call[name[config].get_cloud_config_value, parameter[constant[apikey], name[vm_], name[__opts__]]] variable[sharedsecret] assign[=] call[name[config].get_cloud_config_value, parameter[constant[sharedsecret], name[vm_], name[__opts__]]] variable[path] assign[=] constant[https://api.gogrid.com/api/] if name[action] begin[:] <ast.AugAssign object at 0x7da1b1cd6710> if name[command] begin[:] <ast.AugAssign object at 0x7da1b1cd73d0> call[name[log].debug, parameter[constant[GoGrid URL: %s], name[path]]] if <ast.UnaryOp object at 0x7da1b1cd7730> begin[:] variable[args] assign[=] dictionary[[], []] variable[epoch] assign[=] call[name[six].text_type, parameter[call[name[int], parameter[call[name[time].time, parameter[]]]]]] variable[hashtext] assign[=] call[constant[].join, parameter[tuple[[<ast.Name object at 0x7da1b1f48ac0>, <ast.Name object at 0x7da1b1f49de0>, <ast.Name object at 0x7da1b1f48f40>]]]] call[name[args]][constant[sig]] assign[=] call[name[salt].utils.hashutils.md5_digest, parameter[name[hashtext]]] call[name[args]][constant[format]] assign[=] constant[json] call[name[args]][constant[v]] assign[=] constant[1.0] call[name[args]][constant[api_key]] assign[=] name[apikey] if compare[name[header_dict] is constant[None]] begin[:] variable[header_dict] assign[=] dictionary[[], []] if compare[name[method] not_equal[!=] constant[POST]] begin[:] call[name[header_dict]][constant[Accept]] assign[=] constant[application/json] variable[decode] assign[=] constant[True] if compare[name[method] equal[==] constant[DELETE]] begin[:] variable[decode] assign[=] constant[False] variable[return_content] assign[=] constant[None] variable[result] assign[=] call[name[salt].utils.http.query, parameter[name[path], name[method]]] call[name[log].debug, parameter[constant[GoGrid Response Status Code: %s], call[name[result]][constant[status]]]] return[call[name[result]][constant[dict]]]
keyword[def] identifier[_query] ( identifier[action] = keyword[None] , identifier[command] = keyword[None] , identifier[args] = keyword[None] , identifier[method] = literal[string] , identifier[header_dict] = keyword[None] , identifier[data] = keyword[None] ): literal[string] identifier[vm_] = identifier[get_configured_provider] () identifier[apikey] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False] ) identifier[sharedsecret] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False] ) identifier[path] = literal[string] keyword[if] identifier[action] : identifier[path] += identifier[action] keyword[if] identifier[command] : identifier[path] += literal[string] . identifier[format] ( identifier[command] ) identifier[log] . identifier[debug] ( literal[string] , identifier[path] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[args] , identifier[dict] ): identifier[args] ={} identifier[epoch] = identifier[six] . identifier[text_type] ( identifier[int] ( identifier[time] . identifier[time] ())) identifier[hashtext] = literal[string] . identifier[join] (( identifier[apikey] , identifier[sharedsecret] , identifier[epoch] )) identifier[args] [ literal[string] ]= identifier[salt] . identifier[utils] . identifier[hashutils] . identifier[md5_digest] ( identifier[hashtext] ) identifier[args] [ literal[string] ]= literal[string] identifier[args] [ literal[string] ]= literal[string] identifier[args] [ literal[string] ]= identifier[apikey] keyword[if] identifier[header_dict] keyword[is] keyword[None] : identifier[header_dict] ={} keyword[if] identifier[method] != literal[string] : identifier[header_dict] [ literal[string] ]= literal[string] identifier[decode] = keyword[True] keyword[if] identifier[method] == literal[string] : identifier[decode] = keyword[False] identifier[return_content] = keyword[None] identifier[result] = identifier[salt] . identifier[utils] . identifier[http] . identifier[query] ( identifier[path] , identifier[method] , identifier[params] = identifier[args] , identifier[data] = identifier[data] , identifier[header_dict] = identifier[header_dict] , identifier[decode] = identifier[decode] , identifier[decode_type] = literal[string] , identifier[text] = keyword[True] , identifier[status] = keyword[True] , identifier[opts] = identifier[__opts__] , ) identifier[log] . identifier[debug] ( literal[string] , identifier[result] [ literal[string] ]) keyword[return] identifier[result] [ literal[string] ]
def _query(action=None, command=None, args=None, method='GET', header_dict=None, data=None): """ Make a web call to GoGrid .. versionadded:: 2015.8.0 """ vm_ = get_configured_provider() apikey = config.get_cloud_config_value('apikey', vm_, __opts__, search_global=False) sharedsecret = config.get_cloud_config_value('sharedsecret', vm_, __opts__, search_global=False) path = 'https://api.gogrid.com/api/' if action: path += action # depends on [control=['if'], data=[]] if command: path += '/{0}'.format(command) # depends on [control=['if'], data=[]] log.debug('GoGrid URL: %s', path) if not isinstance(args, dict): args = {} # depends on [control=['if'], data=[]] epoch = six.text_type(int(time.time())) hashtext = ''.join((apikey, sharedsecret, epoch)) args['sig'] = salt.utils.hashutils.md5_digest(hashtext) args['format'] = 'json' args['v'] = '1.0' args['api_key'] = apikey if header_dict is None: header_dict = {} # depends on [control=['if'], data=['header_dict']] if method != 'POST': header_dict['Accept'] = 'application/json' # depends on [control=['if'], data=[]] decode = True if method == 'DELETE': decode = False # depends on [control=['if'], data=[]] return_content = None result = salt.utils.http.query(path, method, params=args, data=data, header_dict=header_dict, decode=decode, decode_type='json', text=True, status=True, opts=__opts__) log.debug('GoGrid Response Status Code: %s', result['status']) return result['dict']
def update_col(input, **params): """ Updates document with value from another document/collection/constant :param input: :param params: :return: """ PARAM_TARGET = 'target' PARAM_UPDATE = 'update' SOURCE_TYPE = 'src.type' SOURCE_COL = 'src.col' SOURCE_FIELD = 'src.field' DEST_FIELD = 'dest.field' SOURCE_TYPE_DOC = 'doc' SOURCE_TYPE_CONST = 'const' CONST_VALUE = 'const.value' update_list = params.get(PARAM_UPDATE) res = input[params.get(PARAM_TARGET)] if PARAM_TARGET in params else input for row in res: for update_desc in update_list: if update_desc[SOURCE_TYPE] == SOURCE_TYPE_DOC: row[update_desc[DEST_FIELD]] = input[update_desc[SOURCE_COL]][update_desc[SOURCE_FIELD]] elif update_desc[SOURCE_TYPE] == SOURCE_TYPE_CONST: row[update_desc[DEST_FIELD]] = update_desc[CONST_VALUE] return res
def function[update_col, parameter[input]]: constant[ Updates document with value from another document/collection/constant :param input: :param params: :return: ] variable[PARAM_TARGET] assign[=] constant[target] variable[PARAM_UPDATE] assign[=] constant[update] variable[SOURCE_TYPE] assign[=] constant[src.type] variable[SOURCE_COL] assign[=] constant[src.col] variable[SOURCE_FIELD] assign[=] constant[src.field] variable[DEST_FIELD] assign[=] constant[dest.field] variable[SOURCE_TYPE_DOC] assign[=] constant[doc] variable[SOURCE_TYPE_CONST] assign[=] constant[const] variable[CONST_VALUE] assign[=] constant[const.value] variable[update_list] assign[=] call[name[params].get, parameter[name[PARAM_UPDATE]]] variable[res] assign[=] <ast.IfExp object at 0x7da2054a4ca0> for taget[name[row]] in starred[name[res]] begin[:] for taget[name[update_desc]] in starred[name[update_list]] begin[:] if compare[call[name[update_desc]][name[SOURCE_TYPE]] equal[==] name[SOURCE_TYPE_DOC]] begin[:] call[name[row]][call[name[update_desc]][name[DEST_FIELD]]] assign[=] call[call[name[input]][call[name[update_desc]][name[SOURCE_COL]]]][call[name[update_desc]][name[SOURCE_FIELD]]] return[name[res]]
keyword[def] identifier[update_col] ( identifier[input] ,** identifier[params] ): literal[string] identifier[PARAM_TARGET] = literal[string] identifier[PARAM_UPDATE] = literal[string] identifier[SOURCE_TYPE] = literal[string] identifier[SOURCE_COL] = literal[string] identifier[SOURCE_FIELD] = literal[string] identifier[DEST_FIELD] = literal[string] identifier[SOURCE_TYPE_DOC] = literal[string] identifier[SOURCE_TYPE_CONST] = literal[string] identifier[CONST_VALUE] = literal[string] identifier[update_list] = identifier[params] . identifier[get] ( identifier[PARAM_UPDATE] ) identifier[res] = identifier[input] [ identifier[params] . identifier[get] ( identifier[PARAM_TARGET] )] keyword[if] identifier[PARAM_TARGET] keyword[in] identifier[params] keyword[else] identifier[input] keyword[for] identifier[row] keyword[in] identifier[res] : keyword[for] identifier[update_desc] keyword[in] identifier[update_list] : keyword[if] identifier[update_desc] [ identifier[SOURCE_TYPE] ]== identifier[SOURCE_TYPE_DOC] : identifier[row] [ identifier[update_desc] [ identifier[DEST_FIELD] ]]= identifier[input] [ identifier[update_desc] [ identifier[SOURCE_COL] ]][ identifier[update_desc] [ identifier[SOURCE_FIELD] ]] keyword[elif] identifier[update_desc] [ identifier[SOURCE_TYPE] ]== identifier[SOURCE_TYPE_CONST] : identifier[row] [ identifier[update_desc] [ identifier[DEST_FIELD] ]]= identifier[update_desc] [ identifier[CONST_VALUE] ] keyword[return] identifier[res]
def update_col(input, **params): """ Updates document with value from another document/collection/constant :param input: :param params: :return: """ PARAM_TARGET = 'target' PARAM_UPDATE = 'update' SOURCE_TYPE = 'src.type' SOURCE_COL = 'src.col' SOURCE_FIELD = 'src.field' DEST_FIELD = 'dest.field' SOURCE_TYPE_DOC = 'doc' SOURCE_TYPE_CONST = 'const' CONST_VALUE = 'const.value' update_list = params.get(PARAM_UPDATE) res = input[params.get(PARAM_TARGET)] if PARAM_TARGET in params else input for row in res: for update_desc in update_list: if update_desc[SOURCE_TYPE] == SOURCE_TYPE_DOC: row[update_desc[DEST_FIELD]] = input[update_desc[SOURCE_COL]][update_desc[SOURCE_FIELD]] # depends on [control=['if'], data=[]] elif update_desc[SOURCE_TYPE] == SOURCE_TYPE_CONST: row[update_desc[DEST_FIELD]] = update_desc[CONST_VALUE] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['update_desc']] # depends on [control=['for'], data=['row']] return res
def get_run_number(): """ Get a run number for this execution of the model system, for identifying the output hdf5 files). Returns ------- The integer number for this run of the model system. """ try: f = open(os.path.join(os.getenv('DATA_HOME', "."), 'RUNNUM'), 'r') num = int(f.read()) f.close() except Exception: num = 1 f = open(os.path.join(os.getenv('DATA_HOME', "."), 'RUNNUM'), 'w') f.write(str(num + 1)) f.close() return num
def function[get_run_number, parameter[]]: constant[ Get a run number for this execution of the model system, for identifying the output hdf5 files). Returns ------- The integer number for this run of the model system. ] <ast.Try object at 0x7da20c76c400> variable[f] assign[=] call[name[open], parameter[call[name[os].path.join, parameter[call[name[os].getenv, parameter[constant[DATA_HOME], constant[.]]], constant[RUNNUM]]], constant[w]]] call[name[f].write, parameter[call[name[str], parameter[binary_operation[name[num] + constant[1]]]]]] call[name[f].close, parameter[]] return[name[num]]
keyword[def] identifier[get_run_number] (): literal[string] keyword[try] : identifier[f] = identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[getenv] ( literal[string] , literal[string] ), literal[string] ), literal[string] ) identifier[num] = identifier[int] ( identifier[f] . identifier[read] ()) identifier[f] . identifier[close] () keyword[except] identifier[Exception] : identifier[num] = literal[int] identifier[f] = identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[getenv] ( literal[string] , literal[string] ), literal[string] ), literal[string] ) identifier[f] . identifier[write] ( identifier[str] ( identifier[num] + literal[int] )) identifier[f] . identifier[close] () keyword[return] identifier[num]
def get_run_number(): """ Get a run number for this execution of the model system, for identifying the output hdf5 files). Returns ------- The integer number for this run of the model system. """ try: f = open(os.path.join(os.getenv('DATA_HOME', '.'), 'RUNNUM'), 'r') num = int(f.read()) f.close() # depends on [control=['try'], data=[]] except Exception: num = 1 # depends on [control=['except'], data=[]] f = open(os.path.join(os.getenv('DATA_HOME', '.'), 'RUNNUM'), 'w') f.write(str(num + 1)) f.close() return num
def ensure_compliance(self): """Ensures that the modules are not loaded.""" if not self.modules: return try: loaded_modules = self._get_loaded_modules() non_compliant_modules = [] for module in self.modules: if module in loaded_modules: log("Module '%s' is enabled but should not be." % (module), level=INFO) non_compliant_modules.append(module) if len(non_compliant_modules) == 0: return for module in non_compliant_modules: self._disable_module(module) self._restart_apache() except subprocess.CalledProcessError as e: log('Error occurred auditing apache module compliance. ' 'This may have been already reported. ' 'Output is: %s' % e.output, level=ERROR)
def function[ensure_compliance, parameter[self]]: constant[Ensures that the modules are not loaded.] if <ast.UnaryOp object at 0x7da18f813fd0> begin[:] return[None] <ast.Try object at 0x7da18f810850>
keyword[def] identifier[ensure_compliance] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[modules] : keyword[return] keyword[try] : identifier[loaded_modules] = identifier[self] . identifier[_get_loaded_modules] () identifier[non_compliant_modules] =[] keyword[for] identifier[module] keyword[in] identifier[self] . identifier[modules] : keyword[if] identifier[module] keyword[in] identifier[loaded_modules] : identifier[log] ( literal[string] % ( identifier[module] ), identifier[level] = identifier[INFO] ) identifier[non_compliant_modules] . identifier[append] ( identifier[module] ) keyword[if] identifier[len] ( identifier[non_compliant_modules] )== literal[int] : keyword[return] keyword[for] identifier[module] keyword[in] identifier[non_compliant_modules] : identifier[self] . identifier[_disable_module] ( identifier[module] ) identifier[self] . identifier[_restart_apache] () keyword[except] identifier[subprocess] . identifier[CalledProcessError] keyword[as] identifier[e] : identifier[log] ( literal[string] literal[string] literal[string] % identifier[e] . identifier[output] , identifier[level] = identifier[ERROR] )
def ensure_compliance(self): """Ensures that the modules are not loaded.""" if not self.modules: return # depends on [control=['if'], data=[]] try: loaded_modules = self._get_loaded_modules() non_compliant_modules = [] for module in self.modules: if module in loaded_modules: log("Module '%s' is enabled but should not be." % module, level=INFO) non_compliant_modules.append(module) # depends on [control=['if'], data=['module']] # depends on [control=['for'], data=['module']] if len(non_compliant_modules) == 0: return # depends on [control=['if'], data=[]] for module in non_compliant_modules: self._disable_module(module) # depends on [control=['for'], data=['module']] self._restart_apache() # depends on [control=['try'], data=[]] except subprocess.CalledProcessError as e: log('Error occurred auditing apache module compliance. This may have been already reported. Output is: %s' % e.output, level=ERROR) # depends on [control=['except'], data=['e']]
def theta(v): """Neutrino direction in polar coordinates. Downgoing event: theta = 180deg Horizont: 90deg Upgoing: theta = 0 Angles in radians. """ v = np.atleast_2d(v) dir_z = v[:, 2] return theta_separg(dir_z)
def function[theta, parameter[v]]: constant[Neutrino direction in polar coordinates. Downgoing event: theta = 180deg Horizont: 90deg Upgoing: theta = 0 Angles in radians. ] variable[v] assign[=] call[name[np].atleast_2d, parameter[name[v]]] variable[dir_z] assign[=] call[name[v]][tuple[[<ast.Slice object at 0x7da1b23499c0>, <ast.Constant object at 0x7da1b234ad40>]]] return[call[name[theta_separg], parameter[name[dir_z]]]]
keyword[def] identifier[theta] ( identifier[v] ): literal[string] identifier[v] = identifier[np] . identifier[atleast_2d] ( identifier[v] ) identifier[dir_z] = identifier[v] [:, literal[int] ] keyword[return] identifier[theta_separg] ( identifier[dir_z] )
def theta(v): """Neutrino direction in polar coordinates. Downgoing event: theta = 180deg Horizont: 90deg Upgoing: theta = 0 Angles in radians. """ v = np.atleast_2d(v) dir_z = v[:, 2] return theta_separg(dir_z)
def get_max_size(pool, num_option, item_length): """ Calculate the max number of item that an option can stored in the pool at give time. This is to limit the pool size to POOL_SIZE Args: option_index (int): the index of the option to calculate the size for pool (dict): answer pool num_option (int): total number of options available for the question item_length (int): the length of the item Returns: int: the max number of items that `option_index` can have """ max_items = POOL_SIZE / item_length # existing items plus the reserved for min size. If there is an option has 1 item, POOL_OPTION_MIN_SIZE - 1 space # is reserved. existing = POOL_OPTION_MIN_SIZE * num_option + sum([max(0, len(pool.get(i, {})) - 5) for i in xrange(num_option)]) return int(max_items - existing)
def function[get_max_size, parameter[pool, num_option, item_length]]: constant[ Calculate the max number of item that an option can stored in the pool at give time. This is to limit the pool size to POOL_SIZE Args: option_index (int): the index of the option to calculate the size for pool (dict): answer pool num_option (int): total number of options available for the question item_length (int): the length of the item Returns: int: the max number of items that `option_index` can have ] variable[max_items] assign[=] binary_operation[name[POOL_SIZE] / name[item_length]] variable[existing] assign[=] binary_operation[binary_operation[name[POOL_OPTION_MIN_SIZE] * name[num_option]] + call[name[sum], parameter[<ast.ListComp object at 0x7da1b0d18610>]]] return[call[name[int], parameter[binary_operation[name[max_items] - name[existing]]]]]
keyword[def] identifier[get_max_size] ( identifier[pool] , identifier[num_option] , identifier[item_length] ): literal[string] identifier[max_items] = identifier[POOL_SIZE] / identifier[item_length] identifier[existing] = identifier[POOL_OPTION_MIN_SIZE] * identifier[num_option] + identifier[sum] ([ identifier[max] ( literal[int] , identifier[len] ( identifier[pool] . identifier[get] ( identifier[i] ,{}))- literal[int] ) keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[num_option] )]) keyword[return] identifier[int] ( identifier[max_items] - identifier[existing] )
def get_max_size(pool, num_option, item_length): """ Calculate the max number of item that an option can stored in the pool at give time. This is to limit the pool size to POOL_SIZE Args: option_index (int): the index of the option to calculate the size for pool (dict): answer pool num_option (int): total number of options available for the question item_length (int): the length of the item Returns: int: the max number of items that `option_index` can have """ max_items = POOL_SIZE / item_length # existing items plus the reserved for min size. If there is an option has 1 item, POOL_OPTION_MIN_SIZE - 1 space # is reserved. existing = POOL_OPTION_MIN_SIZE * num_option + sum([max(0, len(pool.get(i, {})) - 5) for i in xrange(num_option)]) return int(max_items - existing)
def schedule(self): """Start scheduling jobs.""" if self.async_mode: self._scheduler.start() self._listener.start() else: self._scheduler.schedule()
def function[schedule, parameter[self]]: constant[Start scheduling jobs.] if name[self].async_mode begin[:] call[name[self]._scheduler.start, parameter[]] call[name[self]._listener.start, parameter[]]
keyword[def] identifier[schedule] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[async_mode] : identifier[self] . identifier[_scheduler] . identifier[start] () identifier[self] . identifier[_listener] . identifier[start] () keyword[else] : identifier[self] . identifier[_scheduler] . identifier[schedule] ()
def schedule(self): """Start scheduling jobs.""" if self.async_mode: self._scheduler.start() self._listener.start() # depends on [control=['if'], data=[]] else: self._scheduler.schedule()
def sort_by_ref(vcf_file, data): """Sort a VCF file by genome reference and position, adding contig information. """ out_file = "%s-prep.vcf.gz" % utils.splitext_plus(vcf_file)[0] if not utils.file_uptodate(out_file, vcf_file): with file_transaction(data, out_file) as tx_out_file: header_file = "%s-header.txt" % utils.splitext_plus(tx_out_file)[0] with open(header_file, "w") as out_handle: for region in ref.file_contigs(dd.get_ref_file(data), data["config"]): out_handle.write("##contig=<ID=%s,length=%s>\n" % (region.name, region.size)) cat_cmd = "zcat" if vcf_file.endswith("vcf.gz") else "cat" cmd = ("{cat_cmd} {vcf_file} | grep -v ^##contig | bcftools annotate -h {header_file} | " "vt sort -m full -o {tx_out_file} -") with utils.chdir(os.path.dirname(tx_out_file)): do.run(cmd.format(**locals()), "Sort VCF by reference") return bgzip_and_index(out_file, data["config"])
def function[sort_by_ref, parameter[vcf_file, data]]: constant[Sort a VCF file by genome reference and position, adding contig information. ] variable[out_file] assign[=] binary_operation[constant[%s-prep.vcf.gz] <ast.Mod object at 0x7da2590d6920> call[call[name[utils].splitext_plus, parameter[name[vcf_file]]]][constant[0]]] if <ast.UnaryOp object at 0x7da1b1897100> begin[:] with call[name[file_transaction], parameter[name[data], name[out_file]]] begin[:] variable[header_file] assign[=] binary_operation[constant[%s-header.txt] <ast.Mod object at 0x7da2590d6920> call[call[name[utils].splitext_plus, parameter[name[tx_out_file]]]][constant[0]]] with call[name[open], parameter[name[header_file], constant[w]]] begin[:] for taget[name[region]] in starred[call[name[ref].file_contigs, parameter[call[name[dd].get_ref_file, parameter[name[data]]], call[name[data]][constant[config]]]]] begin[:] call[name[out_handle].write, parameter[binary_operation[constant[##contig=<ID=%s,length=%s> ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b1833880>, <ast.Attribute object at 0x7da1b1831e70>]]]]] variable[cat_cmd] assign[=] <ast.IfExp object at 0x7da1b18336a0> variable[cmd] assign[=] constant[{cat_cmd} {vcf_file} | grep -v ^##contig | bcftools annotate -h {header_file} | vt sort -m full -o {tx_out_file} -] with call[name[utils].chdir, parameter[call[name[os].path.dirname, parameter[name[tx_out_file]]]]] begin[:] call[name[do].run, parameter[call[name[cmd].format, parameter[]], constant[Sort VCF by reference]]] return[call[name[bgzip_and_index], parameter[name[out_file], call[name[data]][constant[config]]]]]
keyword[def] identifier[sort_by_ref] ( identifier[vcf_file] , identifier[data] ): literal[string] identifier[out_file] = literal[string] % identifier[utils] . identifier[splitext_plus] ( identifier[vcf_file] )[ literal[int] ] keyword[if] keyword[not] identifier[utils] . identifier[file_uptodate] ( identifier[out_file] , identifier[vcf_file] ): keyword[with] identifier[file_transaction] ( identifier[data] , identifier[out_file] ) keyword[as] identifier[tx_out_file] : identifier[header_file] = literal[string] % identifier[utils] . identifier[splitext_plus] ( identifier[tx_out_file] )[ literal[int] ] keyword[with] identifier[open] ( identifier[header_file] , literal[string] ) keyword[as] identifier[out_handle] : keyword[for] identifier[region] keyword[in] identifier[ref] . identifier[file_contigs] ( identifier[dd] . identifier[get_ref_file] ( identifier[data] ), identifier[data] [ literal[string] ]): identifier[out_handle] . identifier[write] ( literal[string] %( identifier[region] . identifier[name] , identifier[region] . identifier[size] )) identifier[cat_cmd] = literal[string] keyword[if] identifier[vcf_file] . identifier[endswith] ( literal[string] ) keyword[else] literal[string] identifier[cmd] =( literal[string] literal[string] ) keyword[with] identifier[utils] . identifier[chdir] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[tx_out_file] )): identifier[do] . identifier[run] ( identifier[cmd] . identifier[format] (** identifier[locals] ()), literal[string] ) keyword[return] identifier[bgzip_and_index] ( identifier[out_file] , identifier[data] [ literal[string] ])
def sort_by_ref(vcf_file, data): """Sort a VCF file by genome reference and position, adding contig information. """ out_file = '%s-prep.vcf.gz' % utils.splitext_plus(vcf_file)[0] if not utils.file_uptodate(out_file, vcf_file): with file_transaction(data, out_file) as tx_out_file: header_file = '%s-header.txt' % utils.splitext_plus(tx_out_file)[0] with open(header_file, 'w') as out_handle: for region in ref.file_contigs(dd.get_ref_file(data), data['config']): out_handle.write('##contig=<ID=%s,length=%s>\n' % (region.name, region.size)) # depends on [control=['for'], data=['region']] # depends on [control=['with'], data=['out_handle']] cat_cmd = 'zcat' if vcf_file.endswith('vcf.gz') else 'cat' cmd = '{cat_cmd} {vcf_file} | grep -v ^##contig | bcftools annotate -h {header_file} | vt sort -m full -o {tx_out_file} -' with utils.chdir(os.path.dirname(tx_out_file)): do.run(cmd.format(**locals()), 'Sort VCF by reference') # depends on [control=['with'], data=[]] # depends on [control=['with'], data=['tx_out_file']] # depends on [control=['if'], data=[]] return bgzip_and_index(out_file, data['config'])
def _domain_event_tunable_cb(conn, domain, params, opaque): ''' Domain tunable events handler ''' _salt_send_domain_event(opaque, conn, domain, opaque['event'], { 'params': params })
def function[_domain_event_tunable_cb, parameter[conn, domain, params, opaque]]: constant[ Domain tunable events handler ] call[name[_salt_send_domain_event], parameter[name[opaque], name[conn], name[domain], call[name[opaque]][constant[event]], dictionary[[<ast.Constant object at 0x7da20c7cbe80>], [<ast.Name object at 0x7da20c7c9fc0>]]]]
keyword[def] identifier[_domain_event_tunable_cb] ( identifier[conn] , identifier[domain] , identifier[params] , identifier[opaque] ): literal[string] identifier[_salt_send_domain_event] ( identifier[opaque] , identifier[conn] , identifier[domain] , identifier[opaque] [ literal[string] ],{ literal[string] : identifier[params] })
def _domain_event_tunable_cb(conn, domain, params, opaque): """ Domain tunable events handler """ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {'params': params})
def raise_on_wrong_settings(self): """ Validates the configuration settings and raises RuntimeError on error """ self.__ensure_dir_exists(self.working_directory, 'working directory') for idir in self.include_paths: self.__ensure_dir_exists(idir, 'include directory') if self.__xml_generator not in ["castxml", "gccxml"]: msg = ('xml_generator("%s") should either be ' + '"castxml" or "gccxml".') % self.xml_generator raise RuntimeError(msg)
def function[raise_on_wrong_settings, parameter[self]]: constant[ Validates the configuration settings and raises RuntimeError on error ] call[name[self].__ensure_dir_exists, parameter[name[self].working_directory, constant[working directory]]] for taget[name[idir]] in starred[name[self].include_paths] begin[:] call[name[self].__ensure_dir_exists, parameter[name[idir], constant[include directory]]] if compare[name[self].__xml_generator <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b13a4610>, <ast.Constant object at 0x7da1b13a7a00>]]] begin[:] variable[msg] assign[=] binary_operation[binary_operation[constant[xml_generator("%s") should either be ] + constant["castxml" or "gccxml".]] <ast.Mod object at 0x7da2590d6920> name[self].xml_generator] <ast.Raise object at 0x7da1b13a5750>
keyword[def] identifier[raise_on_wrong_settings] ( identifier[self] ): literal[string] identifier[self] . identifier[__ensure_dir_exists] ( identifier[self] . identifier[working_directory] , literal[string] ) keyword[for] identifier[idir] keyword[in] identifier[self] . identifier[include_paths] : identifier[self] . identifier[__ensure_dir_exists] ( identifier[idir] , literal[string] ) keyword[if] identifier[self] . identifier[__xml_generator] keyword[not] keyword[in] [ literal[string] , literal[string] ]: identifier[msg] =( literal[string] + literal[string] )% identifier[self] . identifier[xml_generator] keyword[raise] identifier[RuntimeError] ( identifier[msg] )
def raise_on_wrong_settings(self): """ Validates the configuration settings and raises RuntimeError on error """ self.__ensure_dir_exists(self.working_directory, 'working directory') for idir in self.include_paths: self.__ensure_dir_exists(idir, 'include directory') # depends on [control=['for'], data=['idir']] if self.__xml_generator not in ['castxml', 'gccxml']: msg = ('xml_generator("%s") should either be ' + '"castxml" or "gccxml".') % self.xml_generator raise RuntimeError(msg) # depends on [control=['if'], data=[]]
def check_backends(self, service_id, version_number): """Performs a health check against each backend in version. If the backend has a specific type of healthcheck, that one is performed, otherwise a HEAD request to / is performed. The first item is the details on the Backend itself. The second item is details of the specific HTTP request performed as a health check. The third item is the response details.""" content = self._fetch("/service/%s/version/%d/backend/check_all" % (service_id, version_number)) # TODO: Use a strong-typed class for output? return content
def function[check_backends, parameter[self, service_id, version_number]]: constant[Performs a health check against each backend in version. If the backend has a specific type of healthcheck, that one is performed, otherwise a HEAD request to / is performed. The first item is the details on the Backend itself. The second item is details of the specific HTTP request performed as a health check. The third item is the response details.] variable[content] assign[=] call[name[self]._fetch, parameter[binary_operation[constant[/service/%s/version/%d/backend/check_all] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0f122c0>, <ast.Name object at 0x7da1b0f11630>]]]]] return[name[content]]
keyword[def] identifier[check_backends] ( identifier[self] , identifier[service_id] , identifier[version_number] ): literal[string] identifier[content] = identifier[self] . identifier[_fetch] ( literal[string] %( identifier[service_id] , identifier[version_number] )) keyword[return] identifier[content]
def check_backends(self, service_id, version_number): """Performs a health check against each backend in version. If the backend has a specific type of healthcheck, that one is performed, otherwise a HEAD request to / is performed. The first item is the details on the Backend itself. The second item is details of the specific HTTP request performed as a health check. The third item is the response details.""" content = self._fetch('/service/%s/version/%d/backend/check_all' % (service_id, version_number)) # TODO: Use a strong-typed class for output? return content
def colorbar(fig, ax, im, width=0.05, height=1.0, hoffset=0.01, voffset=0.0, orientation='vertical'): ''' draw colorbar without resizing the axes object to make room kwargs: :: fig : matplotlib.figure.Figure ax : matplotlib.axes.AxesSubplot im : matplotlib.image.AxesImage width : float, colorbar width in fraction of ax width height : float, colorbar height in fraction of ax height hoffset : float, horizontal spacing to main axes in fraction of width voffset : float, vertical spacing to main axis in fraction of height orientation : str, 'horizontal' or 'vertical' return: :: object : colorbar handle ''' rect = np.array(ax.get_position().bounds) rect = np.array(ax.get_position().bounds) caxrect = [0]*4 caxrect[0] = rect[0] + rect[2] + hoffset*rect[2] caxrect[1] = rect[1] + voffset*rect[3] caxrect[2] = rect[2]*width caxrect[3] = rect[3]*height cax = fig.add_axes(caxrect) cb = fig.colorbar(im, cax=cax, orientation=orientation) return cb
def function[colorbar, parameter[fig, ax, im, width, height, hoffset, voffset, orientation]]: constant[ draw colorbar without resizing the axes object to make room kwargs: :: fig : matplotlib.figure.Figure ax : matplotlib.axes.AxesSubplot im : matplotlib.image.AxesImage width : float, colorbar width in fraction of ax width height : float, colorbar height in fraction of ax height hoffset : float, horizontal spacing to main axes in fraction of width voffset : float, vertical spacing to main axis in fraction of height orientation : str, 'horizontal' or 'vertical' return: :: object : colorbar handle ] variable[rect] assign[=] call[name[np].array, parameter[call[name[ax].get_position, parameter[]].bounds]] variable[rect] assign[=] call[name[np].array, parameter[call[name[ax].get_position, parameter[]].bounds]] variable[caxrect] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b0b56590>]] * constant[4]] call[name[caxrect]][constant[0]] assign[=] binary_operation[binary_operation[call[name[rect]][constant[0]] + call[name[rect]][constant[2]]] + binary_operation[name[hoffset] * call[name[rect]][constant[2]]]] call[name[caxrect]][constant[1]] assign[=] binary_operation[call[name[rect]][constant[1]] + binary_operation[name[voffset] * call[name[rect]][constant[3]]]] call[name[caxrect]][constant[2]] assign[=] binary_operation[call[name[rect]][constant[2]] * name[width]] call[name[caxrect]][constant[3]] assign[=] binary_operation[call[name[rect]][constant[3]] * name[height]] variable[cax] assign[=] call[name[fig].add_axes, parameter[name[caxrect]]] variable[cb] assign[=] call[name[fig].colorbar, parameter[name[im]]] return[name[cb]]
keyword[def] identifier[colorbar] ( identifier[fig] , identifier[ax] , identifier[im] , identifier[width] = literal[int] , identifier[height] = literal[int] , identifier[hoffset] = literal[int] , identifier[voffset] = literal[int] , identifier[orientation] = literal[string] ): literal[string] identifier[rect] = identifier[np] . identifier[array] ( identifier[ax] . identifier[get_position] (). identifier[bounds] ) identifier[rect] = identifier[np] . identifier[array] ( identifier[ax] . identifier[get_position] (). identifier[bounds] ) identifier[caxrect] =[ literal[int] ]* literal[int] identifier[caxrect] [ literal[int] ]= identifier[rect] [ literal[int] ]+ identifier[rect] [ literal[int] ]+ identifier[hoffset] * identifier[rect] [ literal[int] ] identifier[caxrect] [ literal[int] ]= identifier[rect] [ literal[int] ]+ identifier[voffset] * identifier[rect] [ literal[int] ] identifier[caxrect] [ literal[int] ]= identifier[rect] [ literal[int] ]* identifier[width] identifier[caxrect] [ literal[int] ]= identifier[rect] [ literal[int] ]* identifier[height] identifier[cax] = identifier[fig] . identifier[add_axes] ( identifier[caxrect] ) identifier[cb] = identifier[fig] . identifier[colorbar] ( identifier[im] , identifier[cax] = identifier[cax] , identifier[orientation] = identifier[orientation] ) keyword[return] identifier[cb]
def colorbar(fig, ax, im, width=0.05, height=1.0, hoffset=0.01, voffset=0.0, orientation='vertical'): """ draw colorbar without resizing the axes object to make room kwargs: :: fig : matplotlib.figure.Figure ax : matplotlib.axes.AxesSubplot im : matplotlib.image.AxesImage width : float, colorbar width in fraction of ax width height : float, colorbar height in fraction of ax height hoffset : float, horizontal spacing to main axes in fraction of width voffset : float, vertical spacing to main axis in fraction of height orientation : str, 'horizontal' or 'vertical' return: :: object : colorbar handle """ rect = np.array(ax.get_position().bounds) rect = np.array(ax.get_position().bounds) caxrect = [0] * 4 caxrect[0] = rect[0] + rect[2] + hoffset * rect[2] caxrect[1] = rect[1] + voffset * rect[3] caxrect[2] = rect[2] * width caxrect[3] = rect[3] * height cax = fig.add_axes(caxrect) cb = fig.colorbar(im, cax=cax, orientation=orientation) return cb
def _build_url(self): """Build url based on searching by date or by show.""" url_params = [ BASE_URL, self.category + ' ratings', self.day, self.year, self.month ] return SEARCH_URL.format(*url_params)
def function[_build_url, parameter[self]]: constant[Build url based on searching by date or by show.] variable[url_params] assign[=] list[[<ast.Name object at 0x7da1b271e0b0>, <ast.BinOp object at 0x7da1b271f1f0>, <ast.Attribute object at 0x7da1b271ea40>, <ast.Attribute object at 0x7da1b271f1c0>, <ast.Attribute object at 0x7da1b271e4d0>]] return[call[name[SEARCH_URL].format, parameter[<ast.Starred object at 0x7da1b271e050>]]]
keyword[def] identifier[_build_url] ( identifier[self] ): literal[string] identifier[url_params] =[ identifier[BASE_URL] , identifier[self] . identifier[category] + literal[string] , identifier[self] . identifier[day] , identifier[self] . identifier[year] , identifier[self] . identifier[month] ] keyword[return] identifier[SEARCH_URL] . identifier[format] (* identifier[url_params] )
def _build_url(self): """Build url based on searching by date or by show.""" url_params = [BASE_URL, self.category + ' ratings', self.day, self.year, self.month] return SEARCH_URL.format(*url_params)
def iteritems(self): "x.iteritems() -> an iterator over the (key, value) items of x in sorted order" left_is_hash = isinstance(self.left, numbers.Integral) right_is_hash = isinstance(self.right, numbers.Integral) if all([left_is_hash, right_is_hash]) or self.right is None: if all((self.size>=1, self.length, self.prune is not self.LEFT_NODE)): yield(0, self.left) if all((self.size>=2, self.length, self.prune is not self.RIGHT_NODE)): yield(1, self.right) if self.left is not None and not left_is_hash: for idx,hash_ in self.left.iteritems(): yield(idx, hash_) if self.right is not None and not right_is_hash: offset = 2 ** ((self.size-1).bit_length() - 1) for idx,hash_ in self.right.iteritems(): yield(offset+idx, hash_)
def function[iteritems, parameter[self]]: constant[x.iteritems() -> an iterator over the (key, value) items of x in sorted order] variable[left_is_hash] assign[=] call[name[isinstance], parameter[name[self].left, name[numbers].Integral]] variable[right_is_hash] assign[=] call[name[isinstance], parameter[name[self].right, name[numbers].Integral]] if <ast.BoolOp object at 0x7da1b055b6d0> begin[:] if call[name[all], parameter[tuple[[<ast.Compare object at 0x7da1b055b700>, <ast.Attribute object at 0x7da1b055b8b0>, <ast.Compare object at 0x7da1b0559ff0>]]]] begin[:] <ast.Yield object at 0x7da1b0559bd0> if call[name[all], parameter[tuple[[<ast.Compare object at 0x7da1b0559000>, <ast.Attribute object at 0x7da1b0558070>, <ast.Compare object at 0x7da1b05595a0>]]]] begin[:] <ast.Yield object at 0x7da1b05599c0> if <ast.BoolOp object at 0x7da1b055b340> begin[:] for taget[tuple[[<ast.Name object at 0x7da1b055a4a0>, <ast.Name object at 0x7da1b055ae60>]]] in starred[call[name[self].left.iteritems, parameter[]]] begin[:] <ast.Yield object at 0x7da1b0559c60> if <ast.BoolOp object at 0x7da1b0558340> begin[:] variable[offset] assign[=] binary_operation[constant[2] ** binary_operation[call[binary_operation[name[self].size - constant[1]].bit_length, parameter[]] - constant[1]]] for taget[tuple[[<ast.Name object at 0x7da1b055b850>, <ast.Name object at 0x7da1b055b640>]]] in starred[call[name[self].right.iteritems, parameter[]]] begin[:] <ast.Yield object at 0x7da1b055abc0>
keyword[def] identifier[iteritems] ( identifier[self] ): literal[string] identifier[left_is_hash] = identifier[isinstance] ( identifier[self] . identifier[left] , identifier[numbers] . identifier[Integral] ) identifier[right_is_hash] = identifier[isinstance] ( identifier[self] . identifier[right] , identifier[numbers] . identifier[Integral] ) keyword[if] identifier[all] ([ identifier[left_is_hash] , identifier[right_is_hash] ]) keyword[or] identifier[self] . identifier[right] keyword[is] keyword[None] : keyword[if] identifier[all] (( identifier[self] . identifier[size] >= literal[int] , identifier[self] . identifier[length] , identifier[self] . identifier[prune] keyword[is] keyword[not] identifier[self] . identifier[LEFT_NODE] )): keyword[yield] ( literal[int] , identifier[self] . identifier[left] ) keyword[if] identifier[all] (( identifier[self] . identifier[size] >= literal[int] , identifier[self] . identifier[length] , identifier[self] . identifier[prune] keyword[is] keyword[not] identifier[self] . identifier[RIGHT_NODE] )): keyword[yield] ( literal[int] , identifier[self] . identifier[right] ) keyword[if] identifier[self] . identifier[left] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[left_is_hash] : keyword[for] identifier[idx] , identifier[hash_] keyword[in] identifier[self] . identifier[left] . identifier[iteritems] (): keyword[yield] ( identifier[idx] , identifier[hash_] ) keyword[if] identifier[self] . identifier[right] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[right_is_hash] : identifier[offset] = literal[int] **(( identifier[self] . identifier[size] - literal[int] ). identifier[bit_length] ()- literal[int] ) keyword[for] identifier[idx] , identifier[hash_] keyword[in] identifier[self] . identifier[right] . identifier[iteritems] (): keyword[yield] ( identifier[offset] + identifier[idx] , identifier[hash_] )
def iteritems(self): """x.iteritems() -> an iterator over the (key, value) items of x in sorted order""" left_is_hash = isinstance(self.left, numbers.Integral) right_is_hash = isinstance(self.right, numbers.Integral) if all([left_is_hash, right_is_hash]) or self.right is None: if all((self.size >= 1, self.length, self.prune is not self.LEFT_NODE)): yield (0, self.left) # depends on [control=['if'], data=[]] if all((self.size >= 2, self.length, self.prune is not self.RIGHT_NODE)): yield (1, self.right) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if self.left is not None and (not left_is_hash): for (idx, hash_) in self.left.iteritems(): yield (idx, hash_) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] if self.right is not None and (not right_is_hash): offset = 2 ** ((self.size - 1).bit_length() - 1) for (idx, hash_) in self.right.iteritems(): yield (offset + idx, hash_) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
def add_delete(self, value): """Delete a tag or populator by value - these are processed before upserts""" value = value.strip() v = value.lower() self.lower_val_to_val[v] = value if len(v) == 0: raise ValueError("Invalid value for delete. Value is empty.") self.deletes.add(v)
def function[add_delete, parameter[self, value]]: constant[Delete a tag or populator by value - these are processed before upserts] variable[value] assign[=] call[name[value].strip, parameter[]] variable[v] assign[=] call[name[value].lower, parameter[]] call[name[self].lower_val_to_val][name[v]] assign[=] name[value] if compare[call[name[len], parameter[name[v]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b2555e10> call[name[self].deletes.add, parameter[name[v]]]
keyword[def] identifier[add_delete] ( identifier[self] , identifier[value] ): literal[string] identifier[value] = identifier[value] . identifier[strip] () identifier[v] = identifier[value] . identifier[lower] () identifier[self] . identifier[lower_val_to_val] [ identifier[v] ]= identifier[value] keyword[if] identifier[len] ( identifier[v] )== literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[self] . identifier[deletes] . identifier[add] ( identifier[v] )
def add_delete(self, value): """Delete a tag or populator by value - these are processed before upserts""" value = value.strip() v = value.lower() self.lower_val_to_val[v] = value if len(v) == 0: raise ValueError('Invalid value for delete. Value is empty.') # depends on [control=['if'], data=[]] self.deletes.add(v)
def _unit(g): """Applies the UNIT rule to 'g' (see top comment).""" nt_unit_rule = get_any_nt_unit_rule(g) while nt_unit_rule: g = _remove_unit_rule(g, nt_unit_rule) nt_unit_rule = get_any_nt_unit_rule(g) return g
def function[_unit, parameter[g]]: constant[Applies the UNIT rule to 'g' (see top comment).] variable[nt_unit_rule] assign[=] call[name[get_any_nt_unit_rule], parameter[name[g]]] while name[nt_unit_rule] begin[:] variable[g] assign[=] call[name[_remove_unit_rule], parameter[name[g], name[nt_unit_rule]]] variable[nt_unit_rule] assign[=] call[name[get_any_nt_unit_rule], parameter[name[g]]] return[name[g]]
keyword[def] identifier[_unit] ( identifier[g] ): literal[string] identifier[nt_unit_rule] = identifier[get_any_nt_unit_rule] ( identifier[g] ) keyword[while] identifier[nt_unit_rule] : identifier[g] = identifier[_remove_unit_rule] ( identifier[g] , identifier[nt_unit_rule] ) identifier[nt_unit_rule] = identifier[get_any_nt_unit_rule] ( identifier[g] ) keyword[return] identifier[g]
def _unit(g): """Applies the UNIT rule to 'g' (see top comment).""" nt_unit_rule = get_any_nt_unit_rule(g) while nt_unit_rule: g = _remove_unit_rule(g, nt_unit_rule) nt_unit_rule = get_any_nt_unit_rule(g) # depends on [control=['while'], data=[]] return g
def main(): """ NAME di_vgp.py DESCRIPTION converts declination/inclination to virtual geomagnetic pole SYNTAX di_vgp.py [-h] [options] OPTIONS -h prints help message and quits -i interactive data entry -f FILE to specify intput file -F FILE to specify output file <filename to read/write from/to standard input INPUT for file entry: D I SLAT SLON where: D: declination I: inclination SLAT: site latitude (positive north) SLON: site longitude (positive east) OUTPUT PLON PLAT where: PLAT: pole latitude PLON: pole longitude (positive east) """ if '-h' in sys.argv: print(main.__doc__) sys.exit() if '-F' in sys.argv: ind=sys.argv.index('-F') ofile=sys.argv[ind+1] out=open(ofile,'w') else: out='' if '-i' in sys.argv: # if one is -i a95=0 while 1: try: ans = input("Input Declination: <cntrl-D to quit> ") Dec = float(ans) # assign input to Dec, after conversion to floating point ans = input("Input Inclination: ") Inc = float(ans) ans = input("Input Site Latitude: ") slat = float(ans) ans = input("Input Site Longitude: ") slong = float(ans) output = pmag.dia_vgp(Dec,Inc,a95,slat,slong) print('%7.1f %7.1f'%(output[0],output[1])) except: print("\n Good-bye\n") sys.exit() elif '-f' in sys.argv: # input of file name ind=sys.argv.index('-f') file=sys.argv[ind+1] data=numpy.loadtxt(file) else: # data = numpy.loadtxt(sys.stdin,dtype=numpy.float) # read from S/I if len(data.shape)>1: # 2-D array N=data.shape[0] if data.shape[1]==4: # only dec,inc,sitelat, site long -no alpha95 data=data.transpose() inlist=numpy.array([data[0],data[1],numpy.zeros(N),data[2],data[3]]).transpose() output = pmag.dia_vgp(inlist) for k in range(N): if out=='': print('%7.1f %7.1f'%(output[0][k],output[1][k])) else: out.write('%7.1f %7.1f\n'%(output[0][k],output[1][k])) else: # single line of data if len(data)==4: data=[data[0],data[1],0,data[2],data[3]] output = pmag.dia_vgp(data) if out=='': # spit to standard output print('%7.1f %7.1f'%(output[0],output[1])) else: # write to file out.write('%7.1f %7.1f\n'%(output[0],output[1]))
def function[main, parameter[]]: constant[ NAME di_vgp.py DESCRIPTION converts declination/inclination to virtual geomagnetic pole SYNTAX di_vgp.py [-h] [options] OPTIONS -h prints help message and quits -i interactive data entry -f FILE to specify intput file -F FILE to specify output file <filename to read/write from/to standard input INPUT for file entry: D I SLAT SLON where: D: declination I: inclination SLAT: site latitude (positive north) SLON: site longitude (positive east) OUTPUT PLON PLAT where: PLAT: pole latitude PLON: pole longitude (positive east) ] if compare[constant[-h] in name[sys].argv] begin[:] call[name[print], parameter[name[main].__doc__]] call[name[sys].exit, parameter[]] if compare[constant[-F] in name[sys].argv] begin[:] variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-F]]] variable[ofile] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]] variable[out] assign[=] call[name[open], parameter[name[ofile], constant[w]]] if compare[constant[-i] in name[sys].argv] begin[:] variable[a95] assign[=] constant[0] while constant[1] begin[:] <ast.Try object at 0x7da1b04cbd30> if compare[call[name[len], parameter[name[data].shape]] greater[>] constant[1]] begin[:] variable[N] assign[=] call[name[data].shape][constant[0]] if compare[call[name[data].shape][constant[1]] equal[==] constant[4]] begin[:] variable[data] assign[=] call[name[data].transpose, parameter[]] variable[inlist] assign[=] call[call[name[numpy].array, parameter[list[[<ast.Subscript object at 0x7da1b0455f30>, <ast.Subscript object at 0x7da1b0455fc0>, <ast.Call object at 0x7da1b0456050>, <ast.Subscript object at 0x7da1b0456110>, <ast.Subscript object at 0x7da1b04561a0>]]]].transpose, parameter[]] variable[output] assign[=] call[name[pmag].dia_vgp, parameter[name[inlist]]] for taget[name[k]] in starred[call[name[range], parameter[name[N]]]] begin[:] if compare[name[out] equal[==] constant[]] begin[:] call[name[print], parameter[binary_operation[constant[%7.1f %7.1f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b04570a0>, <ast.Subscript object at 0x7da1b0457190>]]]]]
keyword[def] identifier[main] (): literal[string] keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[print] ( identifier[main] . identifier[__doc__] ) identifier[sys] . identifier[exit] () keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] ) identifier[ofile] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ] identifier[out] = identifier[open] ( identifier[ofile] , literal[string] ) keyword[else] : identifier[out] = literal[string] keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[a95] = literal[int] keyword[while] literal[int] : keyword[try] : identifier[ans] = identifier[input] ( literal[string] ) identifier[Dec] = identifier[float] ( identifier[ans] ) identifier[ans] = identifier[input] ( literal[string] ) identifier[Inc] = identifier[float] ( identifier[ans] ) identifier[ans] = identifier[input] ( literal[string] ) identifier[slat] = identifier[float] ( identifier[ans] ) identifier[ans] = identifier[input] ( literal[string] ) identifier[slong] = identifier[float] ( identifier[ans] ) identifier[output] = identifier[pmag] . identifier[dia_vgp] ( identifier[Dec] , identifier[Inc] , identifier[a95] , identifier[slat] , identifier[slong] ) identifier[print] ( literal[string] %( identifier[output] [ literal[int] ], identifier[output] [ literal[int] ])) keyword[except] : identifier[print] ( literal[string] ) identifier[sys] . identifier[exit] () keyword[elif] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] ) identifier[file] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ] identifier[data] = identifier[numpy] . identifier[loadtxt] ( identifier[file] ) keyword[else] : identifier[data] = identifier[numpy] . identifier[loadtxt] ( identifier[sys] . identifier[stdin] , identifier[dtype] = identifier[numpy] . identifier[float] ) keyword[if] identifier[len] ( identifier[data] . identifier[shape] )> literal[int] : identifier[N] = identifier[data] . identifier[shape] [ literal[int] ] keyword[if] identifier[data] . identifier[shape] [ literal[int] ]== literal[int] : identifier[data] = identifier[data] . identifier[transpose] () identifier[inlist] = identifier[numpy] . identifier[array] ([ identifier[data] [ literal[int] ], identifier[data] [ literal[int] ], identifier[numpy] . identifier[zeros] ( identifier[N] ), identifier[data] [ literal[int] ], identifier[data] [ literal[int] ]]). identifier[transpose] () identifier[output] = identifier[pmag] . identifier[dia_vgp] ( identifier[inlist] ) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[N] ): keyword[if] identifier[out] == literal[string] : identifier[print] ( literal[string] %( identifier[output] [ literal[int] ][ identifier[k] ], identifier[output] [ literal[int] ][ identifier[k] ])) keyword[else] : identifier[out] . identifier[write] ( literal[string] %( identifier[output] [ literal[int] ][ identifier[k] ], identifier[output] [ literal[int] ][ identifier[k] ])) keyword[else] : keyword[if] identifier[len] ( identifier[data] )== literal[int] : identifier[data] =[ identifier[data] [ literal[int] ], identifier[data] [ literal[int] ], literal[int] , identifier[data] [ literal[int] ], identifier[data] [ literal[int] ]] identifier[output] = identifier[pmag] . identifier[dia_vgp] ( identifier[data] ) keyword[if] identifier[out] == literal[string] : identifier[print] ( literal[string] %( identifier[output] [ literal[int] ], identifier[output] [ literal[int] ])) keyword[else] : identifier[out] . identifier[write] ( literal[string] %( identifier[output] [ literal[int] ], identifier[output] [ literal[int] ]))
def main(): """ NAME di_vgp.py DESCRIPTION converts declination/inclination to virtual geomagnetic pole SYNTAX di_vgp.py [-h] [options] OPTIONS -h prints help message and quits -i interactive data entry -f FILE to specify intput file -F FILE to specify output file <filename to read/write from/to standard input INPUT for file entry: D I SLAT SLON where: D: declination I: inclination SLAT: site latitude (positive north) SLON: site longitude (positive east) OUTPUT PLON PLAT where: PLAT: pole latitude PLON: pole longitude (positive east) """ if '-h' in sys.argv: print(main.__doc__) sys.exit() # depends on [control=['if'], data=[]] if '-F' in sys.argv: ind = sys.argv.index('-F') ofile = sys.argv[ind + 1] out = open(ofile, 'w') # depends on [control=['if'], data=[]] else: out = '' if '-i' in sys.argv: # if one is -i a95 = 0 while 1: try: ans = input('Input Declination: <cntrl-D to quit> ') Dec = float(ans) # assign input to Dec, after conversion to floating point ans = input('Input Inclination: ') Inc = float(ans) ans = input('Input Site Latitude: ') slat = float(ans) ans = input('Input Site Longitude: ') slong = float(ans) output = pmag.dia_vgp(Dec, Inc, a95, slat, slong) print('%7.1f %7.1f' % (output[0], output[1])) # depends on [control=['try'], data=[]] except: print('\n Good-bye\n') sys.exit() # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] elif '-f' in sys.argv: # input of file name ind = sys.argv.index('-f') file = sys.argv[ind + 1] data = numpy.loadtxt(file) # depends on [control=['if'], data=[]] else: # data = numpy.loadtxt(sys.stdin, dtype=numpy.float) # read from S/I if len(data.shape) > 1: # 2-D array N = data.shape[0] if data.shape[1] == 4: # only dec,inc,sitelat, site long -no alpha95 data = data.transpose() inlist = numpy.array([data[0], data[1], numpy.zeros(N), data[2], data[3]]).transpose() # depends on [control=['if'], data=[]] output = pmag.dia_vgp(inlist) for k in range(N): if out == '': print('%7.1f %7.1f' % (output[0][k], output[1][k])) # depends on [control=['if'], data=[]] else: out.write('%7.1f %7.1f\n' % (output[0][k], output[1][k])) # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]] else: # single line of data if len(data) == 4: data = [data[0], data[1], 0, data[2], data[3]] # depends on [control=['if'], data=[]] output = pmag.dia_vgp(data) if out == '': # spit to standard output print('%7.1f %7.1f' % (output[0], output[1])) # depends on [control=['if'], data=[]] else: # write to file out.write('%7.1f %7.1f\n' % (output[0], output[1]))
def battery_update(self, SYS_STATUS): '''update battery level''' # main flight battery self.battery_level = SYS_STATUS.battery_remaining self.voltage_level = SYS_STATUS.voltage_battery self.current_battery = SYS_STATUS.current_battery if self.settings.numcells != 0: self.per_cell = (self.voltage_level*0.001) / self.settings.numcells
def function[battery_update, parameter[self, SYS_STATUS]]: constant[update battery level] name[self].battery_level assign[=] name[SYS_STATUS].battery_remaining name[self].voltage_level assign[=] name[SYS_STATUS].voltage_battery name[self].current_battery assign[=] name[SYS_STATUS].current_battery if compare[name[self].settings.numcells not_equal[!=] constant[0]] begin[:] name[self].per_cell assign[=] binary_operation[binary_operation[name[self].voltage_level * constant[0.001]] / name[self].settings.numcells]
keyword[def] identifier[battery_update] ( identifier[self] , identifier[SYS_STATUS] ): literal[string] identifier[self] . identifier[battery_level] = identifier[SYS_STATUS] . identifier[battery_remaining] identifier[self] . identifier[voltage_level] = identifier[SYS_STATUS] . identifier[voltage_battery] identifier[self] . identifier[current_battery] = identifier[SYS_STATUS] . identifier[current_battery] keyword[if] identifier[self] . identifier[settings] . identifier[numcells] != literal[int] : identifier[self] . identifier[per_cell] =( identifier[self] . identifier[voltage_level] * literal[int] )/ identifier[self] . identifier[settings] . identifier[numcells]
def battery_update(self, SYS_STATUS): """update battery level""" # main flight battery self.battery_level = SYS_STATUS.battery_remaining self.voltage_level = SYS_STATUS.voltage_battery self.current_battery = SYS_STATUS.current_battery if self.settings.numcells != 0: self.per_cell = self.voltage_level * 0.001 / self.settings.numcells # depends on [control=['if'], data=[]]
def factorize(self): """ Factorize s.t. CUR = data Updated Values -------------- .C : updated values for C. .U : updated values for U. .R : updated values for R. """ [prow, pcol] = self.sample_probability() self._rid = self.sample(self._rrank, prow) self._cid = self.sample(self._crank, pcol) self._rcnt = np.ones(len(self._rid)) self._ccnt = np.ones(len(self._cid)) self.computeUCR()
def function[factorize, parameter[self]]: constant[ Factorize s.t. CUR = data Updated Values -------------- .C : updated values for C. .U : updated values for U. .R : updated values for R. ] <ast.List object at 0x7da1b03e3520> assign[=] call[name[self].sample_probability, parameter[]] name[self]._rid assign[=] call[name[self].sample, parameter[name[self]._rrank, name[prow]]] name[self]._cid assign[=] call[name[self].sample, parameter[name[self]._crank, name[pcol]]] name[self]._rcnt assign[=] call[name[np].ones, parameter[call[name[len], parameter[name[self]._rid]]]] name[self]._ccnt assign[=] call[name[np].ones, parameter[call[name[len], parameter[name[self]._cid]]]] call[name[self].computeUCR, parameter[]]
keyword[def] identifier[factorize] ( identifier[self] ): literal[string] [ identifier[prow] , identifier[pcol] ]= identifier[self] . identifier[sample_probability] () identifier[self] . identifier[_rid] = identifier[self] . identifier[sample] ( identifier[self] . identifier[_rrank] , identifier[prow] ) identifier[self] . identifier[_cid] = identifier[self] . identifier[sample] ( identifier[self] . identifier[_crank] , identifier[pcol] ) identifier[self] . identifier[_rcnt] = identifier[np] . identifier[ones] ( identifier[len] ( identifier[self] . identifier[_rid] )) identifier[self] . identifier[_ccnt] = identifier[np] . identifier[ones] ( identifier[len] ( identifier[self] . identifier[_cid] )) identifier[self] . identifier[computeUCR] ()
def factorize(self): """ Factorize s.t. CUR = data Updated Values -------------- .C : updated values for C. .U : updated values for U. .R : updated values for R. """ [prow, pcol] = self.sample_probability() self._rid = self.sample(self._rrank, prow) self._cid = self.sample(self._crank, pcol) self._rcnt = np.ones(len(self._rid)) self._ccnt = np.ones(len(self._cid)) self.computeUCR()
def clear(self): """Empties DEPQ. Performance: O(1)""" with self.lock: self.data.clear() self.items.clear()
def function[clear, parameter[self]]: constant[Empties DEPQ. Performance: O(1)] with name[self].lock begin[:] call[name[self].data.clear, parameter[]] call[name[self].items.clear, parameter[]]
keyword[def] identifier[clear] ( identifier[self] ): literal[string] keyword[with] identifier[self] . identifier[lock] : identifier[self] . identifier[data] . identifier[clear] () identifier[self] . identifier[items] . identifier[clear] ()
def clear(self): """Empties DEPQ. Performance: O(1)""" with self.lock: self.data.clear() self.items.clear() # depends on [control=['with'], data=[]]
def _convert_etree_element_to_queue(entry_element): ''' Converts entry element to queue object. The format of xml response for queue: <QueueDescription xmlns=\"http://schemas.microsoft.com/netservices/2010/10/servicebus/connect\"> <MaxSizeInBytes>10000</MaxSizeInBytes> <DefaultMessageTimeToLive>PT5M</DefaultMessageTimeToLive> <LockDuration>PT2M</LockDuration> <RequiresGroupedReceives>False</RequiresGroupedReceives> <SupportsDuplicateDetection>False</SupportsDuplicateDetection> ... </QueueDescription> ''' queue = Queue() # get node for each attribute in Queue class, if nothing found then the # response is not valid xml for Queue. invalid_queue = True queue_element = entry_element.find('./atom:content/sb:QueueDescription', _etree_sb_feed_namespaces) if queue_element is not None: mappings = [ ('LockDuration', 'lock_duration', None), ('MaxSizeInMegabytes', 'max_size_in_megabytes', int), ('RequiresDuplicateDetection', 'requires_duplicate_detection', _parse_bool), ('RequiresSession', 'requires_session', _parse_bool), ('DefaultMessageTimeToLive', 'default_message_time_to_live', None), ('DeadLetteringOnMessageExpiration', 'dead_lettering_on_message_expiration', _parse_bool), ('DuplicateDetectionHistoryTimeWindow', 'duplicate_detection_history_time_window', None), ('EnableBatchedOperations', 'enable_batched_operations', _parse_bool), ('MaxDeliveryCount', 'max_delivery_count', int), ('MessageCount', 'message_count', int), ('SizeInBytes', 'size_in_bytes', int), ] for mapping in mappings: if _read_etree_element(queue_element, mapping[0], queue, mapping[1], mapping[2]): invalid_queue = False if invalid_queue: raise AzureServiceBusResourceNotFound(_ERROR_QUEUE_NOT_FOUND) # extract id, updated and name value from feed entry and set them of queue. for name, value in _ETreeXmlToObject.get_entry_properties_from_element( entry_element, True).items(): setattr(queue, name, value) return queue
def function[_convert_etree_element_to_queue, parameter[entry_element]]: constant[ Converts entry element to queue object. The format of xml response for queue: <QueueDescription xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"> <MaxSizeInBytes>10000</MaxSizeInBytes> <DefaultMessageTimeToLive>PT5M</DefaultMessageTimeToLive> <LockDuration>PT2M</LockDuration> <RequiresGroupedReceives>False</RequiresGroupedReceives> <SupportsDuplicateDetection>False</SupportsDuplicateDetection> ... </QueueDescription> ] variable[queue] assign[=] call[name[Queue], parameter[]] variable[invalid_queue] assign[=] constant[True] variable[queue_element] assign[=] call[name[entry_element].find, parameter[constant[./atom:content/sb:QueueDescription], name[_etree_sb_feed_namespaces]]] if compare[name[queue_element] is_not constant[None]] begin[:] variable[mappings] assign[=] list[[<ast.Tuple object at 0x7da18f58fac0>, <ast.Tuple object at 0x7da18f58ce50>, <ast.Tuple object at 0x7da18f58ed70>, <ast.Tuple object at 0x7da18f58e050>, <ast.Tuple object at 0x7da18f58dc60>, <ast.Tuple object at 0x7da18f58ca00>, <ast.Tuple object at 0x7da18f58cfd0>, <ast.Tuple object at 0x7da18f58d810>, <ast.Tuple object at 0x7da18f58e0e0>, <ast.Tuple object at 0x7da18f58fa30>, <ast.Tuple object at 0x7da18f58e590>]] for taget[name[mapping]] in starred[name[mappings]] begin[:] if call[name[_read_etree_element], parameter[name[queue_element], call[name[mapping]][constant[0]], name[queue], call[name[mapping]][constant[1]], call[name[mapping]][constant[2]]]] begin[:] variable[invalid_queue] assign[=] constant[False] if name[invalid_queue] begin[:] <ast.Raise object at 0x7da204345db0> for taget[tuple[[<ast.Name object at 0x7da204347430>, <ast.Name object at 0x7da204345150>]]] in starred[call[call[name[_ETreeXmlToObject].get_entry_properties_from_element, parameter[name[entry_element], constant[True]]].items, parameter[]]] begin[:] call[name[setattr], parameter[name[queue], name[name], name[value]]] return[name[queue]]
keyword[def] identifier[_convert_etree_element_to_queue] ( identifier[entry_element] ): literal[string] identifier[queue] = identifier[Queue] () identifier[invalid_queue] = keyword[True] identifier[queue_element] = identifier[entry_element] . identifier[find] ( literal[string] , identifier[_etree_sb_feed_namespaces] ) keyword[if] identifier[queue_element] keyword[is] keyword[not] keyword[None] : identifier[mappings] =[ ( literal[string] , literal[string] , keyword[None] ), ( literal[string] , literal[string] , identifier[int] ), ( literal[string] , literal[string] , identifier[_parse_bool] ), ( literal[string] , literal[string] , identifier[_parse_bool] ), ( literal[string] , literal[string] , keyword[None] ), ( literal[string] , literal[string] , identifier[_parse_bool] ), ( literal[string] , literal[string] , keyword[None] ), ( literal[string] , literal[string] , identifier[_parse_bool] ), ( literal[string] , literal[string] , identifier[int] ), ( literal[string] , literal[string] , identifier[int] ), ( literal[string] , literal[string] , identifier[int] ), ] keyword[for] identifier[mapping] keyword[in] identifier[mappings] : keyword[if] identifier[_read_etree_element] ( identifier[queue_element] , identifier[mapping] [ literal[int] ], identifier[queue] , identifier[mapping] [ literal[int] ], identifier[mapping] [ literal[int] ]): identifier[invalid_queue] = keyword[False] keyword[if] identifier[invalid_queue] : keyword[raise] identifier[AzureServiceBusResourceNotFound] ( identifier[_ERROR_QUEUE_NOT_FOUND] ) keyword[for] identifier[name] , identifier[value] keyword[in] identifier[_ETreeXmlToObject] . identifier[get_entry_properties_from_element] ( identifier[entry_element] , keyword[True] ). identifier[items] (): identifier[setattr] ( identifier[queue] , identifier[name] , identifier[value] ) keyword[return] identifier[queue]
def _convert_etree_element_to_queue(entry_element): """ Converts entry element to queue object. The format of xml response for queue: <QueueDescription xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"> <MaxSizeInBytes>10000</MaxSizeInBytes> <DefaultMessageTimeToLive>PT5M</DefaultMessageTimeToLive> <LockDuration>PT2M</LockDuration> <RequiresGroupedReceives>False</RequiresGroupedReceives> <SupportsDuplicateDetection>False</SupportsDuplicateDetection> ... </QueueDescription> """ queue = Queue() # get node for each attribute in Queue class, if nothing found then the # response is not valid xml for Queue. invalid_queue = True queue_element = entry_element.find('./atom:content/sb:QueueDescription', _etree_sb_feed_namespaces) if queue_element is not None: mappings = [('LockDuration', 'lock_duration', None), ('MaxSizeInMegabytes', 'max_size_in_megabytes', int), ('RequiresDuplicateDetection', 'requires_duplicate_detection', _parse_bool), ('RequiresSession', 'requires_session', _parse_bool), ('DefaultMessageTimeToLive', 'default_message_time_to_live', None), ('DeadLetteringOnMessageExpiration', 'dead_lettering_on_message_expiration', _parse_bool), ('DuplicateDetectionHistoryTimeWindow', 'duplicate_detection_history_time_window', None), ('EnableBatchedOperations', 'enable_batched_operations', _parse_bool), ('MaxDeliveryCount', 'max_delivery_count', int), ('MessageCount', 'message_count', int), ('SizeInBytes', 'size_in_bytes', int)] for mapping in mappings: if _read_etree_element(queue_element, mapping[0], queue, mapping[1], mapping[2]): invalid_queue = False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['mapping']] # depends on [control=['if'], data=['queue_element']] if invalid_queue: raise AzureServiceBusResourceNotFound(_ERROR_QUEUE_NOT_FOUND) # depends on [control=['if'], data=[]] # extract id, updated and name value from feed entry and set them of queue. for (name, value) in _ETreeXmlToObject.get_entry_properties_from_element(entry_element, True).items(): setattr(queue, name, value) # depends on [control=['for'], data=[]] return queue
def countdown(template, duration=datetime.timedelta(seconds=5)): """ Do a countdown for duration, printing the template (which may accept one positional argument). Template should be something like ``countdown complete in {} seconds.`` """ now = datetime.datetime.now() deadline = now + duration remaining = deadline - datetime.datetime.now() while remaining: remaining = deadline - datetime.datetime.now() remaining = max(datetime.timedelta(), remaining) msg = template.format(remaining.total_seconds()) print(msg, end=' ' * 10) sys.stdout.flush() time.sleep(.1) print('\b' * 80, end='') sys.stdout.flush() print()
def function[countdown, parameter[template, duration]]: constant[ Do a countdown for duration, printing the template (which may accept one positional argument). Template should be something like ``countdown complete in {} seconds.`` ] variable[now] assign[=] call[name[datetime].datetime.now, parameter[]] variable[deadline] assign[=] binary_operation[name[now] + name[duration]] variable[remaining] assign[=] binary_operation[name[deadline] - call[name[datetime].datetime.now, parameter[]]] while name[remaining] begin[:] variable[remaining] assign[=] binary_operation[name[deadline] - call[name[datetime].datetime.now, parameter[]]] variable[remaining] assign[=] call[name[max], parameter[call[name[datetime].timedelta, parameter[]], name[remaining]]] variable[msg] assign[=] call[name[template].format, parameter[call[name[remaining].total_seconds, parameter[]]]] call[name[print], parameter[name[msg]]] call[name[sys].stdout.flush, parameter[]] call[name[time].sleep, parameter[constant[0.1]]] call[name[print], parameter[binary_operation[constant[] * constant[80]]]] call[name[sys].stdout.flush, parameter[]] call[name[print], parameter[]]
keyword[def] identifier[countdown] ( identifier[template] , identifier[duration] = identifier[datetime] . identifier[timedelta] ( identifier[seconds] = literal[int] )): literal[string] identifier[now] = identifier[datetime] . identifier[datetime] . identifier[now] () identifier[deadline] = identifier[now] + identifier[duration] identifier[remaining] = identifier[deadline] - identifier[datetime] . identifier[datetime] . identifier[now] () keyword[while] identifier[remaining] : identifier[remaining] = identifier[deadline] - identifier[datetime] . identifier[datetime] . identifier[now] () identifier[remaining] = identifier[max] ( identifier[datetime] . identifier[timedelta] (), identifier[remaining] ) identifier[msg] = identifier[template] . identifier[format] ( identifier[remaining] . identifier[total_seconds] ()) identifier[print] ( identifier[msg] , identifier[end] = literal[string] * literal[int] ) identifier[sys] . identifier[stdout] . identifier[flush] () identifier[time] . identifier[sleep] ( literal[int] ) identifier[print] ( literal[string] * literal[int] , identifier[end] = literal[string] ) identifier[sys] . identifier[stdout] . identifier[flush] () identifier[print] ()
def countdown(template, duration=datetime.timedelta(seconds=5)): """ Do a countdown for duration, printing the template (which may accept one positional argument). Template should be something like ``countdown complete in {} seconds.`` """ now = datetime.datetime.now() deadline = now + duration remaining = deadline - datetime.datetime.now() while remaining: remaining = deadline - datetime.datetime.now() remaining = max(datetime.timedelta(), remaining) msg = template.format(remaining.total_seconds()) print(msg, end=' ' * 10) sys.stdout.flush() time.sleep(0.1) print('\x08' * 80, end='') sys.stdout.flush() # depends on [control=['while'], data=[]] print()
def encode_request(self, fields, files): """ Encode fields and files for posting to an HTTP server. :param fields: The fields to send as a list of (fieldname, value) tuples. :param files: The files to send as a list of (fieldname, filename, file_bytes) tuple. """ # Adapted from packaging, which in turn was adapted from # http://code.activestate.com/recipes/146306 parts = [] boundary = self.boundary for k, values in fields: if not isinstance(values, (list, tuple)): values = [values] for v in values: parts.extend(( b'--' + boundary, ('Content-Disposition: form-data; name="%s"' % k).encode('utf-8'), b'', v.encode('utf-8'))) for key, filename, value in files: parts.extend(( b'--' + boundary, ('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename)).encode('utf-8'), b'', value)) parts.extend((b'--' + boundary + b'--', b'')) body = b'\r\n'.join(parts) ct = b'multipart/form-data; boundary=' + boundary headers = { 'Content-type': ct, 'Content-length': str(len(body)) } return Request(self.url, body, headers)
def function[encode_request, parameter[self, fields, files]]: constant[ Encode fields and files for posting to an HTTP server. :param fields: The fields to send as a list of (fieldname, value) tuples. :param files: The files to send as a list of (fieldname, filename, file_bytes) tuple. ] variable[parts] assign[=] list[[]] variable[boundary] assign[=] name[self].boundary for taget[tuple[[<ast.Name object at 0x7da20e749720>, <ast.Name object at 0x7da20e74b2b0>]]] in starred[name[fields]] begin[:] if <ast.UnaryOp object at 0x7da20e749b40> begin[:] variable[values] assign[=] list[[<ast.Name object at 0x7da20e74b850>]] for taget[name[v]] in starred[name[values]] begin[:] call[name[parts].extend, parameter[tuple[[<ast.BinOp object at 0x7da20c6c4550>, <ast.Call object at 0x7da20c6c6080>, <ast.Constant object at 0x7da20c6c5540>, <ast.Call object at 0x7da20c6c5750>]]]] for taget[tuple[[<ast.Name object at 0x7da20c6c73a0>, <ast.Name object at 0x7da20c6c4220>, <ast.Name object at 0x7da20c6c7460>]]] in starred[name[files]] begin[:] call[name[parts].extend, parameter[tuple[[<ast.BinOp object at 0x7da20c6c77f0>, <ast.Call object at 0x7da20c6c6f20>, <ast.Constant object at 0x7da1b1de2620>, <ast.Name object at 0x7da20c6c71c0>]]]] call[name[parts].extend, parameter[tuple[[<ast.BinOp object at 0x7da1b1f82980>, <ast.Constant object at 0x7da1b1f821d0>]]]] variable[body] assign[=] call[constant[b'\r\n'].join, parameter[name[parts]]] variable[ct] assign[=] binary_operation[constant[b'multipart/form-data; boundary='] + name[boundary]] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b1de3eb0>, <ast.Constant object at 0x7da1b1de1a80>], [<ast.Name object at 0x7da1b1de1c00>, <ast.Call object at 0x7da1b1de1e10>]] return[call[name[Request], parameter[name[self].url, name[body], name[headers]]]]
keyword[def] identifier[encode_request] ( identifier[self] , identifier[fields] , identifier[files] ): literal[string] identifier[parts] =[] identifier[boundary] = identifier[self] . identifier[boundary] keyword[for] identifier[k] , identifier[values] keyword[in] identifier[fields] : keyword[if] keyword[not] identifier[isinstance] ( identifier[values] ,( identifier[list] , identifier[tuple] )): identifier[values] =[ identifier[values] ] keyword[for] identifier[v] keyword[in] identifier[values] : identifier[parts] . identifier[extend] (( literal[string] + identifier[boundary] , ( literal[string] % identifier[k] ). identifier[encode] ( literal[string] ), literal[string] , identifier[v] . identifier[encode] ( literal[string] ))) keyword[for] identifier[key] , identifier[filename] , identifier[value] keyword[in] identifier[files] : identifier[parts] . identifier[extend] (( literal[string] + identifier[boundary] , ( literal[string] % ( identifier[key] , identifier[filename] )). identifier[encode] ( literal[string] ), literal[string] , identifier[value] )) identifier[parts] . identifier[extend] (( literal[string] + identifier[boundary] + literal[string] , literal[string] )) identifier[body] = literal[string] . identifier[join] ( identifier[parts] ) identifier[ct] = literal[string] + identifier[boundary] identifier[headers] ={ literal[string] : identifier[ct] , literal[string] : identifier[str] ( identifier[len] ( identifier[body] )) } keyword[return] identifier[Request] ( identifier[self] . identifier[url] , identifier[body] , identifier[headers] )
def encode_request(self, fields, files): """ Encode fields and files for posting to an HTTP server. :param fields: The fields to send as a list of (fieldname, value) tuples. :param files: The files to send as a list of (fieldname, filename, file_bytes) tuple. """ # Adapted from packaging, which in turn was adapted from # http://code.activestate.com/recipes/146306 parts = [] boundary = self.boundary for (k, values) in fields: if not isinstance(values, (list, tuple)): values = [values] # depends on [control=['if'], data=[]] for v in values: parts.extend((b'--' + boundary, ('Content-Disposition: form-data; name="%s"' % k).encode('utf-8'), b'', v.encode('utf-8'))) # depends on [control=['for'], data=['v']] # depends on [control=['for'], data=[]] for (key, filename, value) in files: parts.extend((b'--' + boundary, ('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename)).encode('utf-8'), b'', value)) # depends on [control=['for'], data=[]] parts.extend((b'--' + boundary + b'--', b'')) body = b'\r\n'.join(parts) ct = b'multipart/form-data; boundary=' + boundary headers = {'Content-type': ct, 'Content-length': str(len(body))} return Request(self.url, body, headers)
def sanity_check(self): """ Check block and throw PyrtlError or PyrtlInternalError if there is an issue. Should not modify anything, only check data structures to make sure they have been built according to the assumptions stated in the Block comments.""" # TODO: check that the wirevector_by_name is sane from .wire import Input, Const, Output from .helperfuncs import get_stack, get_stacks # check for valid LogicNets (and wires) for net in self.logic: self.sanity_check_net(net) for w in self.wirevector_subset(): if w.bitwidth is None: raise PyrtlError( 'error, missing bitwidth for WireVector "%s" \n\n %s' % (w.name, get_stack(w))) # check for unique names wirevector_names_set = set(x.name for x in self.wirevector_set) if len(self.wirevector_set) != len(wirevector_names_set): wirevector_names_list = [x.name for x in self.wirevector_set] for w in wirevector_names_set: wirevector_names_list.remove(w) raise PyrtlError('Duplicate wire names found for the following ' 'different signals: %s' % repr(wirevector_names_list)) # check for dead input wires (not connected to anything) all_input_and_consts = self.wirevector_subset((Input, Const)) # The following line also checks for duplicate wire drivers wire_src_dict, wire_dst_dict = self.net_connections() dest_set = set(wire_src_dict.keys()) arg_set = set(wire_dst_dict.keys()) full_set = dest_set | arg_set connected_minus_allwires = full_set.difference(self.wirevector_set) if len(connected_minus_allwires) > 0: bad_wire_names = '\n '.join(str(x) for x in connected_minus_allwires) raise PyrtlError('Unknown wires found in net:\n %s \n\n %s' % (bad_wire_names, get_stacks(*connected_minus_allwires))) allwires_minus_connected = self.wirevector_set.difference(full_set) allwires_minus_connected = allwires_minus_connected.difference(all_input_and_consts) # ^ allow inputs and consts to be unconnected if len(allwires_minus_connected) > 0: bad_wire_names = '\n '.join(str(x) for x in allwires_minus_connected) raise PyrtlError('Wires declared but not connected:\n %s \n\n %s' % (bad_wire_names, get_stacks(*allwires_minus_connected))) # Check for wires that are inputs to a logicNet, but are not block inputs and are never # driven. ins = arg_set.difference(dest_set) undriven = ins.difference(all_input_and_consts) if len(undriven) > 0: raise PyrtlError('Wires used but never driven: %s \n\n %s' % ([w.name for w in undriven], get_stacks(*undriven))) # Check for async memories not specified as such self.sanity_check_memory_sync(wire_src_dict) if debug_mode: # Check for wires that are destinations of a logicNet, but are not outputs and are never # used as args. outs = dest_set.difference(arg_set) unused = outs.difference(self.wirevector_subset(Output)) if len(unused) > 0: names = [w.name for w in unused] print('Warning: Wires driven but never used { %s } ' % names) print(get_stacks(*unused))
def function[sanity_check, parameter[self]]: constant[ Check block and throw PyrtlError or PyrtlInternalError if there is an issue. Should not modify anything, only check data structures to make sure they have been built according to the assumptions stated in the Block comments.] from relative_module[wire] import module[Input], module[Const], module[Output] from relative_module[helperfuncs] import module[get_stack], module[get_stacks] for taget[name[net]] in starred[name[self].logic] begin[:] call[name[self].sanity_check_net, parameter[name[net]]] for taget[name[w]] in starred[call[name[self].wirevector_subset, parameter[]]] begin[:] if compare[name[w].bitwidth is constant[None]] begin[:] <ast.Raise object at 0x7da20c6aaf20> variable[wirevector_names_set] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da20c6abb80>]] if compare[call[name[len], parameter[name[self].wirevector_set]] not_equal[!=] call[name[len], parameter[name[wirevector_names_set]]]] begin[:] variable[wirevector_names_list] assign[=] <ast.ListComp object at 0x7da20c6a9c30> for taget[name[w]] in starred[name[wirevector_names_set]] begin[:] call[name[wirevector_names_list].remove, parameter[name[w]]] <ast.Raise object at 0x7da20c6a96c0> variable[all_input_and_consts] assign[=] call[name[self].wirevector_subset, parameter[tuple[[<ast.Name object at 0x7da20c6a8280>, <ast.Name object at 0x7da20c6aa650>]]]] <ast.Tuple object at 0x7da20c6abca0> assign[=] call[name[self].net_connections, parameter[]] variable[dest_set] assign[=] call[name[set], parameter[call[name[wire_src_dict].keys, parameter[]]]] variable[arg_set] assign[=] call[name[set], parameter[call[name[wire_dst_dict].keys, parameter[]]]] variable[full_set] assign[=] binary_operation[name[dest_set] <ast.BitOr object at 0x7da2590d6aa0> name[arg_set]] variable[connected_minus_allwires] assign[=] call[name[full_set].difference, parameter[name[self].wirevector_set]] if compare[call[name[len], parameter[name[connected_minus_allwires]]] greater[>] constant[0]] begin[:] variable[bad_wire_names] assign[=] call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da20c6a8730>]] <ast.Raise object at 0x7da20c6a9e70> variable[allwires_minus_connected] assign[=] call[name[self].wirevector_set.difference, parameter[name[full_set]]] variable[allwires_minus_connected] assign[=] call[name[allwires_minus_connected].difference, parameter[name[all_input_and_consts]]] if compare[call[name[len], parameter[name[allwires_minus_connected]]] greater[>] constant[0]] begin[:] variable[bad_wire_names] assign[=] call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da2041d9d20>]] <ast.Raise object at 0x7da2041daf20> variable[ins] assign[=] call[name[arg_set].difference, parameter[name[dest_set]]] variable[undriven] assign[=] call[name[ins].difference, parameter[name[all_input_and_consts]]] if compare[call[name[len], parameter[name[undriven]]] greater[>] constant[0]] begin[:] <ast.Raise object at 0x7da2041db370> call[name[self].sanity_check_memory_sync, parameter[name[wire_src_dict]]] if name[debug_mode] begin[:] variable[outs] assign[=] call[name[dest_set].difference, parameter[name[arg_set]]] variable[unused] assign[=] call[name[outs].difference, parameter[call[name[self].wirevector_subset, parameter[name[Output]]]]] if compare[call[name[len], parameter[name[unused]]] greater[>] constant[0]] begin[:] variable[names] assign[=] <ast.ListComp object at 0x7da2041d9bd0> call[name[print], parameter[binary_operation[constant[Warning: Wires driven but never used { %s } ] <ast.Mod object at 0x7da2590d6920> name[names]]]] call[name[print], parameter[call[name[get_stacks], parameter[<ast.Starred object at 0x7da2041d95d0>]]]]
keyword[def] identifier[sanity_check] ( identifier[self] ): literal[string] keyword[from] . identifier[wire] keyword[import] identifier[Input] , identifier[Const] , identifier[Output] keyword[from] . identifier[helperfuncs] keyword[import] identifier[get_stack] , identifier[get_stacks] keyword[for] identifier[net] keyword[in] identifier[self] . identifier[logic] : identifier[self] . identifier[sanity_check_net] ( identifier[net] ) keyword[for] identifier[w] keyword[in] identifier[self] . identifier[wirevector_subset] (): keyword[if] identifier[w] . identifier[bitwidth] keyword[is] keyword[None] : keyword[raise] identifier[PyrtlError] ( literal[string] %( identifier[w] . identifier[name] , identifier[get_stack] ( identifier[w] ))) identifier[wirevector_names_set] = identifier[set] ( identifier[x] . identifier[name] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[wirevector_set] ) keyword[if] identifier[len] ( identifier[self] . identifier[wirevector_set] )!= identifier[len] ( identifier[wirevector_names_set] ): identifier[wirevector_names_list] =[ identifier[x] . identifier[name] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[wirevector_set] ] keyword[for] identifier[w] keyword[in] identifier[wirevector_names_set] : identifier[wirevector_names_list] . identifier[remove] ( identifier[w] ) keyword[raise] identifier[PyrtlError] ( literal[string] literal[string] % identifier[repr] ( identifier[wirevector_names_list] )) identifier[all_input_and_consts] = identifier[self] . identifier[wirevector_subset] (( identifier[Input] , identifier[Const] )) identifier[wire_src_dict] , identifier[wire_dst_dict] = identifier[self] . identifier[net_connections] () identifier[dest_set] = identifier[set] ( identifier[wire_src_dict] . identifier[keys] ()) identifier[arg_set] = identifier[set] ( identifier[wire_dst_dict] . identifier[keys] ()) identifier[full_set] = identifier[dest_set] | identifier[arg_set] identifier[connected_minus_allwires] = identifier[full_set] . identifier[difference] ( identifier[self] . identifier[wirevector_set] ) keyword[if] identifier[len] ( identifier[connected_minus_allwires] )> literal[int] : identifier[bad_wire_names] = literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[connected_minus_allwires] ) keyword[raise] identifier[PyrtlError] ( literal[string] %( identifier[bad_wire_names] , identifier[get_stacks] (* identifier[connected_minus_allwires] ))) identifier[allwires_minus_connected] = identifier[self] . identifier[wirevector_set] . identifier[difference] ( identifier[full_set] ) identifier[allwires_minus_connected] = identifier[allwires_minus_connected] . identifier[difference] ( identifier[all_input_and_consts] ) keyword[if] identifier[len] ( identifier[allwires_minus_connected] )> literal[int] : identifier[bad_wire_names] = literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[allwires_minus_connected] ) keyword[raise] identifier[PyrtlError] ( literal[string] %( identifier[bad_wire_names] , identifier[get_stacks] (* identifier[allwires_minus_connected] ))) identifier[ins] = identifier[arg_set] . identifier[difference] ( identifier[dest_set] ) identifier[undriven] = identifier[ins] . identifier[difference] ( identifier[all_input_and_consts] ) keyword[if] identifier[len] ( identifier[undriven] )> literal[int] : keyword[raise] identifier[PyrtlError] ( literal[string] % ([ identifier[w] . identifier[name] keyword[for] identifier[w] keyword[in] identifier[undriven] ], identifier[get_stacks] (* identifier[undriven] ))) identifier[self] . identifier[sanity_check_memory_sync] ( identifier[wire_src_dict] ) keyword[if] identifier[debug_mode] : identifier[outs] = identifier[dest_set] . identifier[difference] ( identifier[arg_set] ) identifier[unused] = identifier[outs] . identifier[difference] ( identifier[self] . identifier[wirevector_subset] ( identifier[Output] )) keyword[if] identifier[len] ( identifier[unused] )> literal[int] : identifier[names] =[ identifier[w] . identifier[name] keyword[for] identifier[w] keyword[in] identifier[unused] ] identifier[print] ( literal[string] % identifier[names] ) identifier[print] ( identifier[get_stacks] (* identifier[unused] ))
def sanity_check(self): """ Check block and throw PyrtlError or PyrtlInternalError if there is an issue. Should not modify anything, only check data structures to make sure they have been built according to the assumptions stated in the Block comments.""" # TODO: check that the wirevector_by_name is sane from .wire import Input, Const, Output from .helperfuncs import get_stack, get_stacks # check for valid LogicNets (and wires) for net in self.logic: self.sanity_check_net(net) # depends on [control=['for'], data=['net']] for w in self.wirevector_subset(): if w.bitwidth is None: raise PyrtlError('error, missing bitwidth for WireVector "%s" \n\n %s' % (w.name, get_stack(w))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['w']] # check for unique names wirevector_names_set = set((x.name for x in self.wirevector_set)) if len(self.wirevector_set) != len(wirevector_names_set): wirevector_names_list = [x.name for x in self.wirevector_set] for w in wirevector_names_set: wirevector_names_list.remove(w) # depends on [control=['for'], data=['w']] raise PyrtlError('Duplicate wire names found for the following different signals: %s' % repr(wirevector_names_list)) # depends on [control=['if'], data=[]] # check for dead input wires (not connected to anything) all_input_and_consts = self.wirevector_subset((Input, Const)) # The following line also checks for duplicate wire drivers (wire_src_dict, wire_dst_dict) = self.net_connections() dest_set = set(wire_src_dict.keys()) arg_set = set(wire_dst_dict.keys()) full_set = dest_set | arg_set connected_minus_allwires = full_set.difference(self.wirevector_set) if len(connected_minus_allwires) > 0: bad_wire_names = '\n '.join((str(x) for x in connected_minus_allwires)) raise PyrtlError('Unknown wires found in net:\n %s \n\n %s' % (bad_wire_names, get_stacks(*connected_minus_allwires))) # depends on [control=['if'], data=[]] allwires_minus_connected = self.wirevector_set.difference(full_set) allwires_minus_connected = allwires_minus_connected.difference(all_input_and_consts) # ^ allow inputs and consts to be unconnected if len(allwires_minus_connected) > 0: bad_wire_names = '\n '.join((str(x) for x in allwires_minus_connected)) raise PyrtlError('Wires declared but not connected:\n %s \n\n %s' % (bad_wire_names, get_stacks(*allwires_minus_connected))) # depends on [control=['if'], data=[]] # Check for wires that are inputs to a logicNet, but are not block inputs and are never # driven. ins = arg_set.difference(dest_set) undriven = ins.difference(all_input_and_consts) if len(undriven) > 0: raise PyrtlError('Wires used but never driven: %s \n\n %s' % ([w.name for w in undriven], get_stacks(*undriven))) # depends on [control=['if'], data=[]] # Check for async memories not specified as such self.sanity_check_memory_sync(wire_src_dict) if debug_mode: # Check for wires that are destinations of a logicNet, but are not outputs and are never # used as args. outs = dest_set.difference(arg_set) unused = outs.difference(self.wirevector_subset(Output)) if len(unused) > 0: names = [w.name for w in unused] print('Warning: Wires driven but never used { %s } ' % names) print(get_stacks(*unused)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def create_drop_query(self, tokens): """ Parse tokens of drop query :param tokens: A list of InfluxDB query tokens """ if not tokens[Keyword.SERIES]: return None return DropQuery(self.parse_keyword(Keyword.SERIES, tokens))
def function[create_drop_query, parameter[self, tokens]]: constant[ Parse tokens of drop query :param tokens: A list of InfluxDB query tokens ] if <ast.UnaryOp object at 0x7da1affe5a20> begin[:] return[constant[None]] return[call[name[DropQuery], parameter[call[name[self].parse_keyword, parameter[name[Keyword].SERIES, name[tokens]]]]]]
keyword[def] identifier[create_drop_query] ( identifier[self] , identifier[tokens] ): literal[string] keyword[if] keyword[not] identifier[tokens] [ identifier[Keyword] . identifier[SERIES] ]: keyword[return] keyword[None] keyword[return] identifier[DropQuery] ( identifier[self] . identifier[parse_keyword] ( identifier[Keyword] . identifier[SERIES] , identifier[tokens] ))
def create_drop_query(self, tokens): """ Parse tokens of drop query :param tokens: A list of InfluxDB query tokens """ if not tokens[Keyword.SERIES]: return None # depends on [control=['if'], data=[]] return DropQuery(self.parse_keyword(Keyword.SERIES, tokens))
def assign_bus_results(grid, bus_data): """ Write results obtained from PF to graph Parameters ---------- grid: ding0.network bus_data: pandas.DataFrame DataFrame containing voltage levels obtained from PF analysis """ # iterate of nodes and assign voltage obtained from power flow analysis for node in grid._graph.nodes(): # check if node is connected to graph if (node not in grid.graph_isolated_nodes() and not isinstance(node, LVLoadAreaCentreDing0)): if isinstance(node, LVStationDing0): node.voltage_res = bus_data.loc[node.pypsa_id, 'v_mag_pu'] elif isinstance(node, (LVStationDing0, LVLoadAreaCentreDing0)): if node.lv_load_area.is_aggregated: node.voltage_res = bus_data.loc[node.pypsa_id, 'v_mag_pu'] elif not isinstance(node, CircuitBreakerDing0): node.voltage_res = bus_data.loc[node.pypsa_id, 'v_mag_pu'] else: logger.warning("Object {} has been skipped while importing " "results!")
def function[assign_bus_results, parameter[grid, bus_data]]: constant[ Write results obtained from PF to graph Parameters ---------- grid: ding0.network bus_data: pandas.DataFrame DataFrame containing voltage levels obtained from PF analysis ] for taget[name[node]] in starred[call[name[grid]._graph.nodes, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da18dc98be0> begin[:] if call[name[isinstance], parameter[name[node], name[LVStationDing0]]] begin[:] name[node].voltage_res assign[=] call[name[bus_data].loc][tuple[[<ast.Attribute object at 0x7da18dc9bfd0>, <ast.Constant object at 0x7da18dc9a950>]]]
keyword[def] identifier[assign_bus_results] ( identifier[grid] , identifier[bus_data] ): literal[string] keyword[for] identifier[node] keyword[in] identifier[grid] . identifier[_graph] . identifier[nodes] (): keyword[if] ( identifier[node] keyword[not] keyword[in] identifier[grid] . identifier[graph_isolated_nodes] () keyword[and] keyword[not] identifier[isinstance] ( identifier[node] , identifier[LVLoadAreaCentreDing0] )): keyword[if] identifier[isinstance] ( identifier[node] , identifier[LVStationDing0] ): identifier[node] . identifier[voltage_res] = identifier[bus_data] . identifier[loc] [ identifier[node] . identifier[pypsa_id] , literal[string] ] keyword[elif] identifier[isinstance] ( identifier[node] ,( identifier[LVStationDing0] , identifier[LVLoadAreaCentreDing0] )): keyword[if] identifier[node] . identifier[lv_load_area] . identifier[is_aggregated] : identifier[node] . identifier[voltage_res] = identifier[bus_data] . identifier[loc] [ identifier[node] . identifier[pypsa_id] , literal[string] ] keyword[elif] keyword[not] identifier[isinstance] ( identifier[node] , identifier[CircuitBreakerDing0] ): identifier[node] . identifier[voltage_res] = identifier[bus_data] . identifier[loc] [ identifier[node] . identifier[pypsa_id] , literal[string] ] keyword[else] : identifier[logger] . identifier[warning] ( literal[string] literal[string] )
def assign_bus_results(grid, bus_data): """ Write results obtained from PF to graph Parameters ---------- grid: ding0.network bus_data: pandas.DataFrame DataFrame containing voltage levels obtained from PF analysis """ # iterate of nodes and assign voltage obtained from power flow analysis for node in grid._graph.nodes(): # check if node is connected to graph if node not in grid.graph_isolated_nodes() and (not isinstance(node, LVLoadAreaCentreDing0)): if isinstance(node, LVStationDing0): node.voltage_res = bus_data.loc[node.pypsa_id, 'v_mag_pu'] # depends on [control=['if'], data=[]] elif isinstance(node, (LVStationDing0, LVLoadAreaCentreDing0)): if node.lv_load_area.is_aggregated: node.voltage_res = bus_data.loc[node.pypsa_id, 'v_mag_pu'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif not isinstance(node, CircuitBreakerDing0): node.voltage_res = bus_data.loc[node.pypsa_id, 'v_mag_pu'] # depends on [control=['if'], data=[]] else: logger.warning('Object {} has been skipped while importing results!') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']]
def _filter_version_specific_options(self, tmos_ver, **kwargs): '''Filter version-specific optional parameters Some optional parameters only exist in v12.1.0 and greater, filter these out for earlier versions to allow backward comatibility. ''' if LooseVersion(tmos_ver) < LooseVersion('12.1.0'): for k, parms in self._meta_data['optional_parameters'].items(): for r in kwargs.get(k, []): for parm in parms: value = r.pop(parm, None) if value is not None: logger.info( "Policy parameter %s:%s is invalid for v%s", k, parm, tmos_ver)
def function[_filter_version_specific_options, parameter[self, tmos_ver]]: constant[Filter version-specific optional parameters Some optional parameters only exist in v12.1.0 and greater, filter these out for earlier versions to allow backward comatibility. ] if compare[call[name[LooseVersion], parameter[name[tmos_ver]]] less[<] call[name[LooseVersion], parameter[constant[12.1.0]]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da2043456c0>, <ast.Name object at 0x7da204346b90>]]] in starred[call[call[name[self]._meta_data][constant[optional_parameters]].items, parameter[]]] begin[:] for taget[name[r]] in starred[call[name[kwargs].get, parameter[name[k], list[[]]]]] begin[:] for taget[name[parm]] in starred[name[parms]] begin[:] variable[value] assign[=] call[name[r].pop, parameter[name[parm], constant[None]]] if compare[name[value] is_not constant[None]] begin[:] call[name[logger].info, parameter[constant[Policy parameter %s:%s is invalid for v%s], name[k], name[parm], name[tmos_ver]]]
keyword[def] identifier[_filter_version_specific_options] ( identifier[self] , identifier[tmos_ver] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[LooseVersion] ( identifier[tmos_ver] )< identifier[LooseVersion] ( literal[string] ): keyword[for] identifier[k] , identifier[parms] keyword[in] identifier[self] . identifier[_meta_data] [ literal[string] ]. identifier[items] (): keyword[for] identifier[r] keyword[in] identifier[kwargs] . identifier[get] ( identifier[k] ,[]): keyword[for] identifier[parm] keyword[in] identifier[parms] : identifier[value] = identifier[r] . identifier[pop] ( identifier[parm] , keyword[None] ) keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : identifier[logger] . identifier[info] ( literal[string] , identifier[k] , identifier[parm] , identifier[tmos_ver] )
def _filter_version_specific_options(self, tmos_ver, **kwargs): """Filter version-specific optional parameters Some optional parameters only exist in v12.1.0 and greater, filter these out for earlier versions to allow backward comatibility. """ if LooseVersion(tmos_ver) < LooseVersion('12.1.0'): for (k, parms) in self._meta_data['optional_parameters'].items(): for r in kwargs.get(k, []): for parm in parms: value = r.pop(parm, None) if value is not None: logger.info('Policy parameter %s:%s is invalid for v%s', k, parm, tmos_ver) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['parm']] # depends on [control=['for'], data=['r']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
def axisar(axis, angle): """ Construct a rotation matrix that rotates vectors by a specified angle about a specified axis. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/axisar_c.html :param axis: Rotation axis. :type axis: 3 Element vector (list, tuple, numpy array) :param angle: Rotation angle, in radians. :type angle: float :return: Rotation matrix corresponding to axis and angle. :rtype: numpy array ((3, 3)) """ axis = stypes.toDoubleVector(axis) angle = ctypes.c_double(angle) r = stypes.emptyDoubleMatrix() libspice.axisar_c(axis, angle, r) return stypes.cMatrixToNumpy(r)
def function[axisar, parameter[axis, angle]]: constant[ Construct a rotation matrix that rotates vectors by a specified angle about a specified axis. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/axisar_c.html :param axis: Rotation axis. :type axis: 3 Element vector (list, tuple, numpy array) :param angle: Rotation angle, in radians. :type angle: float :return: Rotation matrix corresponding to axis and angle. :rtype: numpy array ((3, 3)) ] variable[axis] assign[=] call[name[stypes].toDoubleVector, parameter[name[axis]]] variable[angle] assign[=] call[name[ctypes].c_double, parameter[name[angle]]] variable[r] assign[=] call[name[stypes].emptyDoubleMatrix, parameter[]] call[name[libspice].axisar_c, parameter[name[axis], name[angle], name[r]]] return[call[name[stypes].cMatrixToNumpy, parameter[name[r]]]]
keyword[def] identifier[axisar] ( identifier[axis] , identifier[angle] ): literal[string] identifier[axis] = identifier[stypes] . identifier[toDoubleVector] ( identifier[axis] ) identifier[angle] = identifier[ctypes] . identifier[c_double] ( identifier[angle] ) identifier[r] = identifier[stypes] . identifier[emptyDoubleMatrix] () identifier[libspice] . identifier[axisar_c] ( identifier[axis] , identifier[angle] , identifier[r] ) keyword[return] identifier[stypes] . identifier[cMatrixToNumpy] ( identifier[r] )
def axisar(axis, angle): """ Construct a rotation matrix that rotates vectors by a specified angle about a specified axis. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/axisar_c.html :param axis: Rotation axis. :type axis: 3 Element vector (list, tuple, numpy array) :param angle: Rotation angle, in radians. :type angle: float :return: Rotation matrix corresponding to axis and angle. :rtype: numpy array ((3, 3)) """ axis = stypes.toDoubleVector(axis) angle = ctypes.c_double(angle) r = stypes.emptyDoubleMatrix() libspice.axisar_c(axis, angle, r) return stypes.cMatrixToNumpy(r)
def create_teams(obj, user, access): """ Will create new teams associated with the referenced obj and set the resulting relation to the correct attribute. The naming convention for team foreign keys is pluralname_team (for example, instructors_team). This function will take the access dictionary and apply the specified access types as follows: access = { 'trainees_team': ('open', 'add someone'), } Where the key name is the team name and the tuple contains the access types for member access and manager access respectively. If the foreign key already has a value associated with it, this function will NOT create a new team to replace it. """ for field_name, access_types in access.items(): id_field = "{}_id".format(field_name) # Check that the team is associated with the object via a FK... if hasattr(obj, id_field) and getattr(obj, id_field) is None: # ...and there is no existing related team. # TODO - the team name needs to be able to create a unique # slug that's < 50 characters long. # TODO - this is just a workaround: next_pk = next(iter(instance.pk for instance in obj.__class__.objects.order_by("-pk")), 0) + 1 # this is a thing a beauty. ;-) team_name = u"{} for {} {}".format( field_name, obj._meta.model_name, next_pk) new_team = Team( name=team_name, member_access=access_types[0], manager_access=access_types[1], creator=user) new_team.save() setattr(obj, field_name, new_team) return obj
def function[create_teams, parameter[obj, user, access]]: constant[ Will create new teams associated with the referenced obj and set the resulting relation to the correct attribute. The naming convention for team foreign keys is pluralname_team (for example, instructors_team). This function will take the access dictionary and apply the specified access types as follows: access = { 'trainees_team': ('open', 'add someone'), } Where the key name is the team name and the tuple contains the access types for member access and manager access respectively. If the foreign key already has a value associated with it, this function will NOT create a new team to replace it. ] for taget[tuple[[<ast.Name object at 0x7da1b192eb60>, <ast.Name object at 0x7da1b192eb30>]]] in starred[call[name[access].items, parameter[]]] begin[:] variable[id_field] assign[=] call[constant[{}_id].format, parameter[name[field_name]]] if <ast.BoolOp object at 0x7da1b192f4f0> begin[:] variable[next_pk] assign[=] binary_operation[call[name[next], parameter[call[name[iter], parameter[<ast.GeneratorExp object at 0x7da1b192ff70>]], constant[0]]] + constant[1]] variable[team_name] assign[=] call[constant[{} for {} {}].format, parameter[name[field_name], name[obj]._meta.model_name, name[next_pk]]] variable[new_team] assign[=] call[name[Team], parameter[]] call[name[new_team].save, parameter[]] call[name[setattr], parameter[name[obj], name[field_name], name[new_team]]] return[name[obj]]
keyword[def] identifier[create_teams] ( identifier[obj] , identifier[user] , identifier[access] ): literal[string] keyword[for] identifier[field_name] , identifier[access_types] keyword[in] identifier[access] . identifier[items] (): identifier[id_field] = literal[string] . identifier[format] ( identifier[field_name] ) keyword[if] identifier[hasattr] ( identifier[obj] , identifier[id_field] ) keyword[and] identifier[getattr] ( identifier[obj] , identifier[id_field] ) keyword[is] keyword[None] : identifier[next_pk] = identifier[next] ( identifier[iter] ( identifier[instance] . identifier[pk] keyword[for] identifier[instance] keyword[in] identifier[obj] . identifier[__class__] . identifier[objects] . identifier[order_by] ( literal[string] )), literal[int] )+ literal[int] identifier[team_name] = literal[string] . identifier[format] ( identifier[field_name] , identifier[obj] . identifier[_meta] . identifier[model_name] , identifier[next_pk] ) identifier[new_team] = identifier[Team] ( identifier[name] = identifier[team_name] , identifier[member_access] = identifier[access_types] [ literal[int] ], identifier[manager_access] = identifier[access_types] [ literal[int] ], identifier[creator] = identifier[user] ) identifier[new_team] . identifier[save] () identifier[setattr] ( identifier[obj] , identifier[field_name] , identifier[new_team] ) keyword[return] identifier[obj]
def create_teams(obj, user, access): """ Will create new teams associated with the referenced obj and set the resulting relation to the correct attribute. The naming convention for team foreign keys is pluralname_team (for example, instructors_team). This function will take the access dictionary and apply the specified access types as follows: access = { 'trainees_team': ('open', 'add someone'), } Where the key name is the team name and the tuple contains the access types for member access and manager access respectively. If the foreign key already has a value associated with it, this function will NOT create a new team to replace it. """ for (field_name, access_types) in access.items(): id_field = '{}_id'.format(field_name) # Check that the team is associated with the object via a FK... if hasattr(obj, id_field) and getattr(obj, id_field) is None: # ...and there is no existing related team. # TODO - the team name needs to be able to create a unique # slug that's < 50 characters long. # TODO - this is just a workaround: next_pk = next(iter((instance.pk for instance in obj.__class__.objects.order_by('-pk'))), 0) + 1 # this is a thing a beauty. ;-) team_name = u'{} for {} {}'.format(field_name, obj._meta.model_name, next_pk) new_team = Team(name=team_name, member_access=access_types[0], manager_access=access_types[1], creator=user) new_team.save() setattr(obj, field_name, new_team) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return obj
def parse_zone_file(text, ignore_invalid=False): """ Parse a zonefile into a dict """ text = remove_comments(text) text = flatten(text) text = remove_class(text) text = add_default_name(text) json_zone_file = parse_lines(text, ignore_invalid=ignore_invalid) return json_zone_file
def function[parse_zone_file, parameter[text, ignore_invalid]]: constant[ Parse a zonefile into a dict ] variable[text] assign[=] call[name[remove_comments], parameter[name[text]]] variable[text] assign[=] call[name[flatten], parameter[name[text]]] variable[text] assign[=] call[name[remove_class], parameter[name[text]]] variable[text] assign[=] call[name[add_default_name], parameter[name[text]]] variable[json_zone_file] assign[=] call[name[parse_lines], parameter[name[text]]] return[name[json_zone_file]]
keyword[def] identifier[parse_zone_file] ( identifier[text] , identifier[ignore_invalid] = keyword[False] ): literal[string] identifier[text] = identifier[remove_comments] ( identifier[text] ) identifier[text] = identifier[flatten] ( identifier[text] ) identifier[text] = identifier[remove_class] ( identifier[text] ) identifier[text] = identifier[add_default_name] ( identifier[text] ) identifier[json_zone_file] = identifier[parse_lines] ( identifier[text] , identifier[ignore_invalid] = identifier[ignore_invalid] ) keyword[return] identifier[json_zone_file]
def parse_zone_file(text, ignore_invalid=False): """ Parse a zonefile into a dict """ text = remove_comments(text) text = flatten(text) text = remove_class(text) text = add_default_name(text) json_zone_file = parse_lines(text, ignore_invalid=ignore_invalid) return json_zone_file
def get(self, segids, remove_duplicate_vertices=True, fuse=True, chunk_size=None): """ Merge fragments derived from these segids into a single vertex and face list. Why merge multiple segids into one mesh? For example, if you have a set of segids that belong to the same neuron. segids: (iterable or int) segids to render into a single mesh Optional: remove_duplicate_vertices: bool, fuse exactly matching vertices fuse: bool, merge all downloaded meshes into a single mesh chunk_size: [chunk_x, chunk_y, chunk_z] if pass only merge at chunk boundaries Returns: { num_vertices: int, vertices: [ (x,y,z), ... ] # floats faces: [ int, int, int, ... ] # int = vertex_index, 3 to a face } """ segids = toiter(segids) dne = self._check_missing_manifests(segids) if dne: missing = ', '.join([ str(segid) for segid in dne ]) raise ValueError(red( 'Segment ID(s) {} are missing corresponding mesh manifests.\nAborted.' \ .format(missing) )) fragments = self._get_manifests(segids) fragments = fragments.values() fragments = list(itertools.chain.from_iterable(fragments)) # flatten fragments = self._get_mesh_fragments(fragments) fragments = sorted(fragments, key=lambda frag: frag['filename']) # make decoding deterministic # decode all the fragments meshdata = defaultdict(list) for frag in tqdm(fragments, disable=(not self.vol.progress), desc="Decoding Mesh Buffer"): segid = filename_to_segid(frag['filename']) mesh = decode_mesh_buffer(frag['filename'], frag['content']) meshdata[segid].append(mesh) def produce_output(mdata): vertexct = np.zeros(len(mdata) + 1, np.uint32) vertexct[1:] = np.cumsum([ x['num_vertices'] for x in mdata ]) vertices = np.concatenate([ x['vertices'] for x in mdata ]) faces = np.concatenate([ mesh['faces'] + vertexct[i] for i, mesh in enumerate(mdata) ]) if remove_duplicate_vertices: if chunk_size: vertices, faces = remove_duplicate_vertices_cross_chunks(vertices, faces, chunk_size) else: vertices, faces = np.unique(vertices[faces], return_inverse=True, axis=0) faces = faces.astype(np.uint32) return { 'num_vertices': len(vertices), 'vertices': vertices, 'faces': faces, } if fuse: meshdata = [ (segid, mdata) for segid, mdata in six.iteritems(meshdata) ] meshdata = sorted(meshdata, key=lambda sm: sm[0]) meshdata = [ mdata for segid, mdata in meshdata ] meshdata = list(itertools.chain.from_iterable(meshdata)) # flatten return produce_output(meshdata) else: return { segid: produce_output(mdata) for segid, mdata in six.iteritems(meshdata) }
def function[get, parameter[self, segids, remove_duplicate_vertices, fuse, chunk_size]]: constant[ Merge fragments derived from these segids into a single vertex and face list. Why merge multiple segids into one mesh? For example, if you have a set of segids that belong to the same neuron. segids: (iterable or int) segids to render into a single mesh Optional: remove_duplicate_vertices: bool, fuse exactly matching vertices fuse: bool, merge all downloaded meshes into a single mesh chunk_size: [chunk_x, chunk_y, chunk_z] if pass only merge at chunk boundaries Returns: { num_vertices: int, vertices: [ (x,y,z), ... ] # floats faces: [ int, int, int, ... ] # int = vertex_index, 3 to a face } ] variable[segids] assign[=] call[name[toiter], parameter[name[segids]]] variable[dne] assign[=] call[name[self]._check_missing_manifests, parameter[name[segids]]] if name[dne] begin[:] variable[missing] assign[=] call[constant[, ].join, parameter[<ast.ListComp object at 0x7da20c794850>]] <ast.Raise object at 0x7da20c795c90> variable[fragments] assign[=] call[name[self]._get_manifests, parameter[name[segids]]] variable[fragments] assign[=] call[name[fragments].values, parameter[]] variable[fragments] assign[=] call[name[list], parameter[call[name[itertools].chain.from_iterable, parameter[name[fragments]]]]] variable[fragments] assign[=] call[name[self]._get_mesh_fragments, parameter[name[fragments]]] variable[fragments] assign[=] call[name[sorted], parameter[name[fragments]]] variable[meshdata] assign[=] call[name[defaultdict], parameter[name[list]]] for taget[name[frag]] in starred[call[name[tqdm], parameter[name[fragments]]]] begin[:] variable[segid] assign[=] call[name[filename_to_segid], parameter[call[name[frag]][constant[filename]]]] variable[mesh] assign[=] call[name[decode_mesh_buffer], parameter[call[name[frag]][constant[filename]], call[name[frag]][constant[content]]]] call[call[name[meshdata]][name[segid]].append, parameter[name[mesh]]] def function[produce_output, parameter[mdata]]: variable[vertexct] assign[=] call[name[np].zeros, parameter[binary_operation[call[name[len], parameter[name[mdata]]] + constant[1]], name[np].uint32]] call[name[vertexct]][<ast.Slice object at 0x7da2045646d0>] assign[=] call[name[np].cumsum, parameter[<ast.ListComp object at 0x7da204564eb0>]] variable[vertices] assign[=] call[name[np].concatenate, parameter[<ast.ListComp object at 0x7da204566e90>]] variable[faces] assign[=] call[name[np].concatenate, parameter[<ast.ListComp object at 0x7da204565180>]] if name[remove_duplicate_vertices] begin[:] if name[chunk_size] begin[:] <ast.Tuple object at 0x7da204564ee0> assign[=] call[name[remove_duplicate_vertices_cross_chunks], parameter[name[vertices], name[faces], name[chunk_size]]] return[dictionary[[<ast.Constant object at 0x7da2045660b0>, <ast.Constant object at 0x7da204566290>, <ast.Constant object at 0x7da204565780>], [<ast.Call object at 0x7da204566e00>, <ast.Name object at 0x7da204566260>, <ast.Name object at 0x7da204567b50>]]] if name[fuse] begin[:] variable[meshdata] assign[=] <ast.ListComp object at 0x7da2045671f0> variable[meshdata] assign[=] call[name[sorted], parameter[name[meshdata]]] variable[meshdata] assign[=] <ast.ListComp object at 0x7da18dc99150> variable[meshdata] assign[=] call[name[list], parameter[call[name[itertools].chain.from_iterable, parameter[name[meshdata]]]]] return[call[name[produce_output], parameter[name[meshdata]]]]
keyword[def] identifier[get] ( identifier[self] , identifier[segids] , identifier[remove_duplicate_vertices] = keyword[True] , identifier[fuse] = keyword[True] , identifier[chunk_size] = keyword[None] ): literal[string] identifier[segids] = identifier[toiter] ( identifier[segids] ) identifier[dne] = identifier[self] . identifier[_check_missing_manifests] ( identifier[segids] ) keyword[if] identifier[dne] : identifier[missing] = literal[string] . identifier[join] ([ identifier[str] ( identifier[segid] ) keyword[for] identifier[segid] keyword[in] identifier[dne] ]) keyword[raise] identifier[ValueError] ( identifier[red] ( literal[string] . identifier[format] ( identifier[missing] ) )) identifier[fragments] = identifier[self] . identifier[_get_manifests] ( identifier[segids] ) identifier[fragments] = identifier[fragments] . identifier[values] () identifier[fragments] = identifier[list] ( identifier[itertools] . identifier[chain] . identifier[from_iterable] ( identifier[fragments] )) identifier[fragments] = identifier[self] . identifier[_get_mesh_fragments] ( identifier[fragments] ) identifier[fragments] = identifier[sorted] ( identifier[fragments] , identifier[key] = keyword[lambda] identifier[frag] : identifier[frag] [ literal[string] ]) identifier[meshdata] = identifier[defaultdict] ( identifier[list] ) keyword[for] identifier[frag] keyword[in] identifier[tqdm] ( identifier[fragments] , identifier[disable] =( keyword[not] identifier[self] . identifier[vol] . identifier[progress] ), identifier[desc] = literal[string] ): identifier[segid] = identifier[filename_to_segid] ( identifier[frag] [ literal[string] ]) identifier[mesh] = identifier[decode_mesh_buffer] ( identifier[frag] [ literal[string] ], identifier[frag] [ literal[string] ]) identifier[meshdata] [ identifier[segid] ]. identifier[append] ( identifier[mesh] ) keyword[def] identifier[produce_output] ( identifier[mdata] ): identifier[vertexct] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[mdata] )+ literal[int] , identifier[np] . identifier[uint32] ) identifier[vertexct] [ literal[int] :]= identifier[np] . identifier[cumsum] ([ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[mdata] ]) identifier[vertices] = identifier[np] . identifier[concatenate] ([ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[mdata] ]) identifier[faces] = identifier[np] . identifier[concatenate] ([ identifier[mesh] [ literal[string] ]+ identifier[vertexct] [ identifier[i] ] keyword[for] identifier[i] , identifier[mesh] keyword[in] identifier[enumerate] ( identifier[mdata] ) ]) keyword[if] identifier[remove_duplicate_vertices] : keyword[if] identifier[chunk_size] : identifier[vertices] , identifier[faces] = identifier[remove_duplicate_vertices_cross_chunks] ( identifier[vertices] , identifier[faces] , identifier[chunk_size] ) keyword[else] : identifier[vertices] , identifier[faces] = identifier[np] . identifier[unique] ( identifier[vertices] [ identifier[faces] ], identifier[return_inverse] = keyword[True] , identifier[axis] = literal[int] ) identifier[faces] = identifier[faces] . identifier[astype] ( identifier[np] . identifier[uint32] ) keyword[return] { literal[string] : identifier[len] ( identifier[vertices] ), literal[string] : identifier[vertices] , literal[string] : identifier[faces] , } keyword[if] identifier[fuse] : identifier[meshdata] =[( identifier[segid] , identifier[mdata] ) keyword[for] identifier[segid] , identifier[mdata] keyword[in] identifier[six] . identifier[iteritems] ( identifier[meshdata] )] identifier[meshdata] = identifier[sorted] ( identifier[meshdata] , identifier[key] = keyword[lambda] identifier[sm] : identifier[sm] [ literal[int] ]) identifier[meshdata] =[ identifier[mdata] keyword[for] identifier[segid] , identifier[mdata] keyword[in] identifier[meshdata] ] identifier[meshdata] = identifier[list] ( identifier[itertools] . identifier[chain] . identifier[from_iterable] ( identifier[meshdata] )) keyword[return] identifier[produce_output] ( identifier[meshdata] ) keyword[else] : keyword[return] { identifier[segid] : identifier[produce_output] ( identifier[mdata] ) keyword[for] identifier[segid] , identifier[mdata] keyword[in] identifier[six] . identifier[iteritems] ( identifier[meshdata] )}
def get(self, segids, remove_duplicate_vertices=True, fuse=True, chunk_size=None): """ Merge fragments derived from these segids into a single vertex and face list. Why merge multiple segids into one mesh? For example, if you have a set of segids that belong to the same neuron. segids: (iterable or int) segids to render into a single mesh Optional: remove_duplicate_vertices: bool, fuse exactly matching vertices fuse: bool, merge all downloaded meshes into a single mesh chunk_size: [chunk_x, chunk_y, chunk_z] if pass only merge at chunk boundaries Returns: { num_vertices: int, vertices: [ (x,y,z), ... ] # floats faces: [ int, int, int, ... ] # int = vertex_index, 3 to a face } """ segids = toiter(segids) dne = self._check_missing_manifests(segids) if dne: missing = ', '.join([str(segid) for segid in dne]) raise ValueError(red('Segment ID(s) {} are missing corresponding mesh manifests.\nAborted.'.format(missing))) # depends on [control=['if'], data=[]] fragments = self._get_manifests(segids) fragments = fragments.values() fragments = list(itertools.chain.from_iterable(fragments)) # flatten fragments = self._get_mesh_fragments(fragments) fragments = sorted(fragments, key=lambda frag: frag['filename']) # make decoding deterministic # decode all the fragments meshdata = defaultdict(list) for frag in tqdm(fragments, disable=not self.vol.progress, desc='Decoding Mesh Buffer'): segid = filename_to_segid(frag['filename']) mesh = decode_mesh_buffer(frag['filename'], frag['content']) meshdata[segid].append(mesh) # depends on [control=['for'], data=['frag']] def produce_output(mdata): vertexct = np.zeros(len(mdata) + 1, np.uint32) vertexct[1:] = np.cumsum([x['num_vertices'] for x in mdata]) vertices = np.concatenate([x['vertices'] for x in mdata]) faces = np.concatenate([mesh['faces'] + vertexct[i] for (i, mesh) in enumerate(mdata)]) if remove_duplicate_vertices: if chunk_size: (vertices, faces) = remove_duplicate_vertices_cross_chunks(vertices, faces, chunk_size) # depends on [control=['if'], data=[]] else: (vertices, faces) = np.unique(vertices[faces], return_inverse=True, axis=0) faces = faces.astype(np.uint32) # depends on [control=['if'], data=[]] return {'num_vertices': len(vertices), 'vertices': vertices, 'faces': faces} if fuse: meshdata = [(segid, mdata) for (segid, mdata) in six.iteritems(meshdata)] meshdata = sorted(meshdata, key=lambda sm: sm[0]) meshdata = [mdata for (segid, mdata) in meshdata] meshdata = list(itertools.chain.from_iterable(meshdata)) # flatten return produce_output(meshdata) # depends on [control=['if'], data=[]] else: return {segid: produce_output(mdata) for (segid, mdata) in six.iteritems(meshdata)}
def _make_sj_out_dict(fns, jxns=None, define_sample_name=None): """Read multiple sj_outs, return dict with keys as sample names and values as sj_out dataframes. Parameters ---------- fns : list of strs of filenames or file handles List of filename of the SJ.out.tab files to read in jxns : set If provided, only keep junctions in this set. define_sample_name : function that takes string as input Function mapping filename to sample name. For instance, you may have the sample name in the path and use a regex to extract it. The sample names will be used as the column names. If this is not provided, the columns will be named as the input files. Returns ------- sj_outD : dict Dict whose keys are sample names and values are sj_out dataframes """ if define_sample_name == None: define_sample_name = lambda x: x else: assert len(set([define_sample_name(x) for x in fns])) == len(fns) sj_outD = dict() for fn in fns: sample = define_sample_name(fn) df = read_sj_out_tab(fn) # Remove any junctions that don't have any uniquely mapped junction # reads. Even if a junction passes the cutoff in other samples, we are # only concerned with unique counts. df = df[df.unique_junction_reads > 0] index = (df.chrom + ':' + df.start.astype(str) + '-' + df.end.astype(str)) assert len(index) == len(set(index)) df.index = index # If jxns is provided, only keep those. if jxns: df = df.ix[set(df.index) & jxns] sj_outD[sample] = df return sj_outD
def function[_make_sj_out_dict, parameter[fns, jxns, define_sample_name]]: constant[Read multiple sj_outs, return dict with keys as sample names and values as sj_out dataframes. Parameters ---------- fns : list of strs of filenames or file handles List of filename of the SJ.out.tab files to read in jxns : set If provided, only keep junctions in this set. define_sample_name : function that takes string as input Function mapping filename to sample name. For instance, you may have the sample name in the path and use a regex to extract it. The sample names will be used as the column names. If this is not provided, the columns will be named as the input files. Returns ------- sj_outD : dict Dict whose keys are sample names and values are sj_out dataframes ] if compare[name[define_sample_name] equal[==] constant[None]] begin[:] variable[define_sample_name] assign[=] <ast.Lambda object at 0x7da1b168ed70> variable[sj_outD] assign[=] call[name[dict], parameter[]] for taget[name[fn]] in starred[name[fns]] begin[:] variable[sample] assign[=] call[name[define_sample_name], parameter[name[fn]]] variable[df] assign[=] call[name[read_sj_out_tab], parameter[name[fn]]] variable[df] assign[=] call[name[df]][compare[name[df].unique_junction_reads greater[>] constant[0]]] variable[index] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[df].chrom + constant[:]] + call[name[df].start.astype, parameter[name[str]]]] + constant[-]] + call[name[df].end.astype, parameter[name[str]]]] assert[compare[call[name[len], parameter[name[index]]] equal[==] call[name[len], parameter[call[name[set], parameter[name[index]]]]]]] name[df].index assign[=] name[index] if name[jxns] begin[:] variable[df] assign[=] call[name[df].ix][binary_operation[call[name[set], parameter[name[df].index]] <ast.BitAnd object at 0x7da2590d6b60> name[jxns]]] call[name[sj_outD]][name[sample]] assign[=] name[df] return[name[sj_outD]]
keyword[def] identifier[_make_sj_out_dict] ( identifier[fns] , identifier[jxns] = keyword[None] , identifier[define_sample_name] = keyword[None] ): literal[string] keyword[if] identifier[define_sample_name] == keyword[None] : identifier[define_sample_name] = keyword[lambda] identifier[x] : identifier[x] keyword[else] : keyword[assert] identifier[len] ( identifier[set] ([ identifier[define_sample_name] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[fns] ]))== identifier[len] ( identifier[fns] ) identifier[sj_outD] = identifier[dict] () keyword[for] identifier[fn] keyword[in] identifier[fns] : identifier[sample] = identifier[define_sample_name] ( identifier[fn] ) identifier[df] = identifier[read_sj_out_tab] ( identifier[fn] ) identifier[df] = identifier[df] [ identifier[df] . identifier[unique_junction_reads] > literal[int] ] identifier[index] =( identifier[df] . identifier[chrom] + literal[string] + identifier[df] . identifier[start] . identifier[astype] ( identifier[str] )+ literal[string] + identifier[df] . identifier[end] . identifier[astype] ( identifier[str] )) keyword[assert] identifier[len] ( identifier[index] )== identifier[len] ( identifier[set] ( identifier[index] )) identifier[df] . identifier[index] = identifier[index] keyword[if] identifier[jxns] : identifier[df] = identifier[df] . identifier[ix] [ identifier[set] ( identifier[df] . identifier[index] )& identifier[jxns] ] identifier[sj_outD] [ identifier[sample] ]= identifier[df] keyword[return] identifier[sj_outD]
def _make_sj_out_dict(fns, jxns=None, define_sample_name=None): """Read multiple sj_outs, return dict with keys as sample names and values as sj_out dataframes. Parameters ---------- fns : list of strs of filenames or file handles List of filename of the SJ.out.tab files to read in jxns : set If provided, only keep junctions in this set. define_sample_name : function that takes string as input Function mapping filename to sample name. For instance, you may have the sample name in the path and use a regex to extract it. The sample names will be used as the column names. If this is not provided, the columns will be named as the input files. Returns ------- sj_outD : dict Dict whose keys are sample names and values are sj_out dataframes """ if define_sample_name == None: define_sample_name = lambda x: x # depends on [control=['if'], data=['define_sample_name']] else: assert len(set([define_sample_name(x) for x in fns])) == len(fns) sj_outD = dict() for fn in fns: sample = define_sample_name(fn) df = read_sj_out_tab(fn) # Remove any junctions that don't have any uniquely mapped junction # reads. Even if a junction passes the cutoff in other samples, we are # only concerned with unique counts. df = df[df.unique_junction_reads > 0] index = df.chrom + ':' + df.start.astype(str) + '-' + df.end.astype(str) assert len(index) == len(set(index)) df.index = index # If jxns is provided, only keep those. if jxns: df = df.ix[set(df.index) & jxns] # depends on [control=['if'], data=[]] sj_outD[sample] = df # depends on [control=['for'], data=['fn']] return sj_outD
def InitUI(self): """ initialize window """ self.main_sizer = wx.BoxSizer(wx.VERTICAL) if self.grid_type in self.contribution.tables: dataframe = self.contribution.tables[self.grid_type] else: dataframe = None self.grid_builder = GridBuilder(self.contribution, self.grid_type, self.panel, parent_type=self.parent_type, reqd_headers=self.reqd_headers, exclude_cols=self.exclude_cols, huge=self.huge) self.grid = self.grid_builder.make_grid() self.grid.InitUI() ## Column management buttons self.add_cols_button = wx.Button(self.panel, label="Add additional columns", name='add_cols_btn', size=(170, 20)) self.Bind(wx.EVT_BUTTON, self.on_add_cols, self.add_cols_button) self.remove_cols_button = wx.Button(self.panel, label="Remove columns", name='remove_cols_btn', size=(170, 20)) self.Bind(wx.EVT_BUTTON, self.on_remove_cols, self.remove_cols_button) ## Row management buttons self.remove_row_button = wx.Button(self.panel, label="Remove last row", name='remove_last_row_btn') self.Bind(wx.EVT_BUTTON, self.on_remove_row, self.remove_row_button) many_rows_box = wx.BoxSizer(wx.HORIZONTAL) self.add_many_rows_button = wx.Button(self.panel, label="Add row(s)", name='add_many_rows_btn') self.rows_spin_ctrl = wx.SpinCtrl(self.panel, value='1', initial=1, name='rows_spin_ctrl') many_rows_box.Add(self.add_many_rows_button, flag=wx.ALIGN_CENTRE) many_rows_box.Add(self.rows_spin_ctrl) self.Bind(wx.EVT_BUTTON, self.on_add_rows, self.add_many_rows_button) self.deleteRowButton = wx.Button(self.panel, id=-1, label='Delete selected row(s)', name='delete_row_btn') self.Bind(wx.EVT_BUTTON, lambda event: self.on_remove_row(event, False), self.deleteRowButton) self.deleteRowButton.Disable() # measurements table should not be able to add new rows # that should be done elsewhere if self.huge: self.add_many_rows_button.Disable() self.rows_spin_ctrl.Disable() self.remove_row_button.Disable() # can't remove cols (seg fault), but can add them #self.add_cols_button.Disable() self.remove_cols_button.Disable() ## Data management buttons self.importButton = wx.Button(self.panel, id=-1, label='Import MagIC-format file', name='import_btn') self.Bind(wx.EVT_BUTTON, self.onImport, self.importButton) self.exitButton = wx.Button(self.panel, id=-1, label='Save and close grid', name='save_and_quit_btn') self.Bind(wx.EVT_BUTTON, self.onSave, self.exitButton) self.cancelButton = wx.Button(self.panel, id=-1, label='Cancel', name='cancel_btn') self.Bind(wx.EVT_BUTTON, self.onCancelButton, self.cancelButton) self.Bind(wx.EVT_CLOSE, self.onCancelButton) ## Input/output buttons self.copyButton = wx.Button(self.panel, id=-1, label="Start copy mode", name="copy_mode_btn") self.Bind(wx.EVT_BUTTON, self.onCopyMode, self.copyButton) self.selectAllButton = wx.Button(self.panel, id=-1, label="Copy all cells", name="select_all_btn") self.Bind(wx.EVT_BUTTON, self.onSelectAll, self.selectAllButton) self.copySelectionButton = wx.Button(self.panel, id=-1, label="Copy selected cells", name="copy_selection_btn") self.Bind(wx.EVT_BUTTON, self.onCopySelection, self.copySelectionButton) self.copySelectionButton.Disable() ## Help message and button # button self.toggle_help_btn = wx.Button(self.panel, id=-1, label="Show help", name='toggle_help_btn') self.Bind(wx.EVT_BUTTON, self.toggle_help, self.toggle_help_btn) # message self.help_msg_boxsizer = wx.StaticBoxSizer(wx.StaticBox(self.panel, -1, name='help_msg_boxsizer'), wx.VERTICAL) if self.grid_type == 'measurements': self.default_msg_text = "Edit measurements here.\nIn general, measurements should be imported directly into Pmag GUI,\nwhich has protocols for converting many lab formats into the MagIC format.\nIf we are missing your particular lab format, please let us know: https://github.com/PmagPy/PmagPy/issues.\nThis grid is just meant for looking at your measurements and doing small edits.\nCurrently, you can't add/remove rows here. You can add columns and edit cell values." else: self.default_msg_text = 'Edit {} here.\nYou can add or remove both rows and columns, however required columns may not be deleted.\nControlled vocabularies are indicated by **, and will have drop-down-menus.\nSuggested vocabularies are indicated by ^^, and also have drop-down-menus.\nTo edit all values in a column, click the column header.\nYou can cut and paste a block of cells from an Excel-like file.\nJust click the top left cell and use command "v".'.format(self.grid_type) txt = '' if self.grid_type == 'locations': txt = '\n\nNote: you can fill in location start/end latitude/longitude here.\nHowever, if you add sites in step 2, the program will calculate those values automatically,\nbased on site latitudes/logitudes.\nThese values will be written to your upload file.' if self.grid_type == 'samples': txt = "\n\nNote: you can fill in lithology, class, and type for each sample here.\nHowever, if the sample's class, lithology, and type are the same as its parent site,\nthose values will propagate down, and will be written to your sample file automatically." if self.grid_type == 'specimens': txt = "\n\nNote: you can fill in lithology, class, and type for each specimen here.\nHowever, if the specimen's class, lithology, and type are the same as its parent sample,\nthose values will propagate down, and will be written to your specimen file automatically." if self.grid_type == 'ages': txt = "\n\nNote: only ages for which you provide data will be written to your upload file." self.default_msg_text += txt self.msg_text = wx.StaticText(self.panel, label=self.default_msg_text, style=wx.TE_CENTER, name='msg text') self.help_msg_boxsizer.Add(self.msg_text) self.help_msg_boxsizer.ShowItems(False) ## Code message and button # button self.toggle_codes_btn = wx.Button(self.panel, id=-1, label="Show method codes", name='toggle_codes_btn') self.Bind(wx.EVT_BUTTON, self.toggle_codes, self.toggle_codes_btn) # message self.code_msg_boxsizer = pw.MethodCodeDemystifier(self.panel, self.contribution.vocab) self.code_msg_boxsizer.ShowItems(False) ## Add content to sizers self.hbox = wx.BoxSizer(wx.HORIZONTAL) col_btn_vbox = wx.StaticBoxSizer(wx.StaticBox(self.panel, -1, label='Columns', name='manage columns'), wx.VERTICAL) row_btn_vbox = wx.StaticBoxSizer(wx.StaticBox(self.panel, -1, label='Rows', name='manage rows'), wx.VERTICAL) self.main_btn_vbox = wx.StaticBoxSizer(wx.StaticBox(self.panel, -1, label='Manage data', name='manage data'), wx.VERTICAL) input_output_vbox = wx.StaticBoxSizer(wx.StaticBox(self.panel, -1, label='In/Out', name='manage in out'), wx.VERTICAL) col_btn_vbox.Add(self.add_cols_button, flag=wx.ALL, border=5) col_btn_vbox.Add(self.remove_cols_button, flag=wx.ALL, border=5) row_btn_vbox.Add(many_rows_box, flag=wx.ALL, border=5) row_btn_vbox.Add(self.remove_row_button, flag=wx.ALL, border=5) row_btn_vbox.Add(self.deleteRowButton, flag=wx.ALL, border=5) self.main_btn_vbox.Add(self.importButton, flag=wx.ALL, border=5) self.main_btn_vbox.Add(self.exitButton, flag=wx.ALL, border=5) self.main_btn_vbox.Add(self.cancelButton, flag=wx.ALL, border=5) input_output_vbox.Add(self.copyButton, flag=wx.ALL, border=5) input_output_vbox.Add(self.selectAllButton, flag=wx.ALL, border=5) input_output_vbox.Add(self.copySelectionButton, flag=wx.ALL, border=5) self.hbox.Add(col_btn_vbox) self.hbox.Add(row_btn_vbox) self.hbox.Add(self.main_btn_vbox) self.hbox.Add(input_output_vbox) #self.panel.Bind(wx.grid.EVT_GRID_LABEL_LEFT_CLICK, self.onLeftClickLabel, self.grid) self.grid.Bind(wx.grid.EVT_GRID_LABEL_LEFT_CLICK, self.onLeftClickLabel, self.grid) # self.Bind(wx.EVT_KEY_DOWN, self.on_key_down) self.panel.Bind(wx.EVT_TEXT_PASTE, self.do_fit) # add actual data! self.grid_builder.add_data_to_grid(self.grid, self.grid_type) # fill in some default values self.grid_builder.fill_defaults() # set scrollbars self.grid.set_scrollbars() ## this would be a way to prevent editing ## some cells in age grid. ## with multiple types of ages, though, ## this doesn't make much sense #if self.grid_type == 'ages': # attr = wx.grid.GridCellAttr() # attr.SetReadOnly(True) # self.grid.SetColAttr(1, attr) self.drop_down_menu = drop_down_menus.Menus(self.grid_type, self.contribution, self.grid) self.grid_box = wx.StaticBoxSizer(wx.StaticBox(self.panel, -1, name='grid container'), wx.VERTICAL) self.grid_box.Add(self.grid, 1, flag=wx.ALL|wx.EXPAND, border=5) # final layout, set size self.main_sizer.Add(self.hbox, flag=wx.ALL|wx.ALIGN_CENTER,#|wx.SHAPED, border=20) self.main_sizer.Add(self.toggle_help_btn, 0, flag=wx.BOTTOM|wx.ALIGN_CENTRE,#|wx.SHAPED, border=5) self.main_sizer.Add(self.help_msg_boxsizer, 0, flag=wx.BOTTOM|wx.ALIGN_CENTRE, border=10) self.main_sizer.Add(self.toggle_codes_btn, 0, flag=wx.BOTTOM|wx.ALIGN_CENTRE,#|wx.SHAPED, border=5) self.main_sizer.Add(self.code_msg_boxsizer, 0, flag=wx.BOTTOM|wx.ALIGN_CENTRE,#|wx.SHAPED, border=5) self.main_sizer.Add(self.grid_box, 2, flag=wx.ALL|wx.ALIGN_CENTER|wx.EXPAND, border=10) self.panel.SetSizer(self.main_sizer) panel_sizer = wx.BoxSizer(wx.VERTICAL) panel_sizer.Add(self.panel, 1, wx.EXPAND) self.SetSizer(panel_sizer) panel_sizer.Fit(self) ## this keeps sizing correct if the user resizes the window manually #self.Bind(wx.EVT_SIZE, self.do_fit) # self.Centre() self.Show()
def function[InitUI, parameter[self]]: constant[ initialize window ] name[self].main_sizer assign[=] call[name[wx].BoxSizer, parameter[name[wx].VERTICAL]] if compare[name[self].grid_type in name[self].contribution.tables] begin[:] variable[dataframe] assign[=] call[name[self].contribution.tables][name[self].grid_type] name[self].grid_builder assign[=] call[name[GridBuilder], parameter[name[self].contribution, name[self].grid_type, name[self].panel]] name[self].grid assign[=] call[name[self].grid_builder.make_grid, parameter[]] call[name[self].grid.InitUI, parameter[]] name[self].add_cols_button assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].on_add_cols, name[self].add_cols_button]] name[self].remove_cols_button assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].on_remove_cols, name[self].remove_cols_button]] name[self].remove_row_button assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].on_remove_row, name[self].remove_row_button]] variable[many_rows_box] assign[=] call[name[wx].BoxSizer, parameter[name[wx].HORIZONTAL]] name[self].add_many_rows_button assign[=] call[name[wx].Button, parameter[name[self].panel]] name[self].rows_spin_ctrl assign[=] call[name[wx].SpinCtrl, parameter[name[self].panel]] call[name[many_rows_box].Add, parameter[name[self].add_many_rows_button]] call[name[many_rows_box].Add, parameter[name[self].rows_spin_ctrl]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].on_add_rows, name[self].add_many_rows_button]] name[self].deleteRowButton assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, <ast.Lambda object at 0x7da20c9919f0>, name[self].deleteRowButton]] call[name[self].deleteRowButton.Disable, parameter[]] if name[self].huge begin[:] call[name[self].add_many_rows_button.Disable, parameter[]] call[name[self].rows_spin_ctrl.Disable, parameter[]] call[name[self].remove_row_button.Disable, parameter[]] call[name[self].remove_cols_button.Disable, parameter[]] name[self].importButton assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].onImport, name[self].importButton]] name[self].exitButton assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].onSave, name[self].exitButton]] name[self].cancelButton assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].onCancelButton, name[self].cancelButton]] call[name[self].Bind, parameter[name[wx].EVT_CLOSE, name[self].onCancelButton]] name[self].copyButton assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].onCopyMode, name[self].copyButton]] name[self].selectAllButton assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].onSelectAll, name[self].selectAllButton]] name[self].copySelectionButton assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].onCopySelection, name[self].copySelectionButton]] call[name[self].copySelectionButton.Disable, parameter[]] name[self].toggle_help_btn assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].toggle_help, name[self].toggle_help_btn]] name[self].help_msg_boxsizer assign[=] call[name[wx].StaticBoxSizer, parameter[call[name[wx].StaticBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da1b0552a10>]], name[wx].VERTICAL]] if compare[name[self].grid_type equal[==] constant[measurements]] begin[:] name[self].default_msg_text assign[=] constant[Edit measurements here. In general, measurements should be imported directly into Pmag GUI, which has protocols for converting many lab formats into the MagIC format. If we are missing your particular lab format, please let us know: https://github.com/PmagPy/PmagPy/issues. This grid is just meant for looking at your measurements and doing small edits. Currently, you can't add/remove rows here. You can add columns and edit cell values.] variable[txt] assign[=] constant[] if compare[name[self].grid_type equal[==] constant[locations]] begin[:] variable[txt] assign[=] constant[ Note: you can fill in location start/end latitude/longitude here. However, if you add sites in step 2, the program will calculate those values automatically, based on site latitudes/logitudes. These values will be written to your upload file.] if compare[name[self].grid_type equal[==] constant[samples]] begin[:] variable[txt] assign[=] constant[ Note: you can fill in lithology, class, and type for each sample here. However, if the sample's class, lithology, and type are the same as its parent site, those values will propagate down, and will be written to your sample file automatically.] if compare[name[self].grid_type equal[==] constant[specimens]] begin[:] variable[txt] assign[=] constant[ Note: you can fill in lithology, class, and type for each specimen here. However, if the specimen's class, lithology, and type are the same as its parent sample, those values will propagate down, and will be written to your specimen file automatically.] if compare[name[self].grid_type equal[==] constant[ages]] begin[:] variable[txt] assign[=] constant[ Note: only ages for which you provide data will be written to your upload file.] <ast.AugAssign object at 0x7da1b05538e0> name[self].msg_text assign[=] call[name[wx].StaticText, parameter[name[self].panel]] call[name[self].help_msg_boxsizer.Add, parameter[name[self].msg_text]] call[name[self].help_msg_boxsizer.ShowItems, parameter[constant[False]]] name[self].toggle_codes_btn assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].toggle_codes, name[self].toggle_codes_btn]] name[self].code_msg_boxsizer assign[=] call[name[pw].MethodCodeDemystifier, parameter[name[self].panel, name[self].contribution.vocab]] call[name[self].code_msg_boxsizer.ShowItems, parameter[constant[False]]] name[self].hbox assign[=] call[name[wx].BoxSizer, parameter[name[wx].HORIZONTAL]] variable[col_btn_vbox] assign[=] call[name[wx].StaticBoxSizer, parameter[call[name[wx].StaticBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da1b0553e50>]], name[wx].VERTICAL]] variable[row_btn_vbox] assign[=] call[name[wx].StaticBoxSizer, parameter[call[name[wx].StaticBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da1b056c790>]], name[wx].VERTICAL]] name[self].main_btn_vbox assign[=] call[name[wx].StaticBoxSizer, parameter[call[name[wx].StaticBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da1b056d150>]], name[wx].VERTICAL]] variable[input_output_vbox] assign[=] call[name[wx].StaticBoxSizer, parameter[call[name[wx].StaticBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da1b056c040>]], name[wx].VERTICAL]] call[name[col_btn_vbox].Add, parameter[name[self].add_cols_button]] call[name[col_btn_vbox].Add, parameter[name[self].remove_cols_button]] call[name[row_btn_vbox].Add, parameter[name[many_rows_box]]] call[name[row_btn_vbox].Add, parameter[name[self].remove_row_button]] call[name[row_btn_vbox].Add, parameter[name[self].deleteRowButton]] call[name[self].main_btn_vbox.Add, parameter[name[self].importButton]] call[name[self].main_btn_vbox.Add, parameter[name[self].exitButton]] call[name[self].main_btn_vbox.Add, parameter[name[self].cancelButton]] call[name[input_output_vbox].Add, parameter[name[self].copyButton]] call[name[input_output_vbox].Add, parameter[name[self].selectAllButton]] call[name[input_output_vbox].Add, parameter[name[self].copySelectionButton]] call[name[self].hbox.Add, parameter[name[col_btn_vbox]]] call[name[self].hbox.Add, parameter[name[row_btn_vbox]]] call[name[self].hbox.Add, parameter[name[self].main_btn_vbox]] call[name[self].hbox.Add, parameter[name[input_output_vbox]]] call[name[self].grid.Bind, parameter[name[wx].grid.EVT_GRID_LABEL_LEFT_CLICK, name[self].onLeftClickLabel, name[self].grid]] call[name[self].Bind, parameter[name[wx].EVT_KEY_DOWN, name[self].on_key_down]] call[name[self].panel.Bind, parameter[name[wx].EVT_TEXT_PASTE, name[self].do_fit]] call[name[self].grid_builder.add_data_to_grid, parameter[name[self].grid, name[self].grid_type]] call[name[self].grid_builder.fill_defaults, parameter[]] call[name[self].grid.set_scrollbars, parameter[]] name[self].drop_down_menu assign[=] call[name[drop_down_menus].Menus, parameter[name[self].grid_type, name[self].contribution, name[self].grid]] name[self].grid_box assign[=] call[name[wx].StaticBoxSizer, parameter[call[name[wx].StaticBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da18bcc8b80>]], name[wx].VERTICAL]] call[name[self].grid_box.Add, parameter[name[self].grid, constant[1]]] call[name[self].main_sizer.Add, parameter[name[self].hbox]] call[name[self].main_sizer.Add, parameter[name[self].toggle_help_btn, constant[0]]] call[name[self].main_sizer.Add, parameter[name[self].help_msg_boxsizer, constant[0]]] call[name[self].main_sizer.Add, parameter[name[self].toggle_codes_btn, constant[0]]] call[name[self].main_sizer.Add, parameter[name[self].code_msg_boxsizer, constant[0]]] call[name[self].main_sizer.Add, parameter[name[self].grid_box, constant[2]]] call[name[self].panel.SetSizer, parameter[name[self].main_sizer]] variable[panel_sizer] assign[=] call[name[wx].BoxSizer, parameter[name[wx].VERTICAL]] call[name[panel_sizer].Add, parameter[name[self].panel, constant[1], name[wx].EXPAND]] call[name[self].SetSizer, parameter[name[panel_sizer]]] call[name[panel_sizer].Fit, parameter[name[self]]] call[name[self].Show, parameter[]]
keyword[def] identifier[InitUI] ( identifier[self] ): literal[string] identifier[self] . identifier[main_sizer] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[VERTICAL] ) keyword[if] identifier[self] . identifier[grid_type] keyword[in] identifier[self] . identifier[contribution] . identifier[tables] : identifier[dataframe] = identifier[self] . identifier[contribution] . identifier[tables] [ identifier[self] . identifier[grid_type] ] keyword[else] : identifier[dataframe] = keyword[None] identifier[self] . identifier[grid_builder] = identifier[GridBuilder] ( identifier[self] . identifier[contribution] , identifier[self] . identifier[grid_type] , identifier[self] . identifier[panel] , identifier[parent_type] = identifier[self] . identifier[parent_type] , identifier[reqd_headers] = identifier[self] . identifier[reqd_headers] , identifier[exclude_cols] = identifier[self] . identifier[exclude_cols] , identifier[huge] = identifier[self] . identifier[huge] ) identifier[self] . identifier[grid] = identifier[self] . identifier[grid_builder] . identifier[make_grid] () identifier[self] . identifier[grid] . identifier[InitUI] () identifier[self] . identifier[add_cols_button] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[label] = literal[string] , identifier[name] = literal[string] , identifier[size] =( literal[int] , literal[int] )) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[on_add_cols] , identifier[self] . identifier[add_cols_button] ) identifier[self] . identifier[remove_cols_button] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[label] = literal[string] , identifier[name] = literal[string] , identifier[size] =( literal[int] , literal[int] )) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[on_remove_cols] , identifier[self] . identifier[remove_cols_button] ) identifier[self] . identifier[remove_row_button] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[label] = literal[string] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[on_remove_row] , identifier[self] . identifier[remove_row_button] ) identifier[many_rows_box] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[HORIZONTAL] ) identifier[self] . identifier[add_many_rows_button] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[label] = literal[string] , identifier[name] = literal[string] ) identifier[self] . identifier[rows_spin_ctrl] = identifier[wx] . identifier[SpinCtrl] ( identifier[self] . identifier[panel] , identifier[value] = literal[string] , identifier[initial] = literal[int] , identifier[name] = literal[string] ) identifier[many_rows_box] . identifier[Add] ( identifier[self] . identifier[add_many_rows_button] , identifier[flag] = identifier[wx] . identifier[ALIGN_CENTRE] ) identifier[many_rows_box] . identifier[Add] ( identifier[self] . identifier[rows_spin_ctrl] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[on_add_rows] , identifier[self] . identifier[add_many_rows_button] ) identifier[self] . identifier[deleteRowButton] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , keyword[lambda] identifier[event] : identifier[self] . identifier[on_remove_row] ( identifier[event] , keyword[False] ), identifier[self] . identifier[deleteRowButton] ) identifier[self] . identifier[deleteRowButton] . identifier[Disable] () keyword[if] identifier[self] . identifier[huge] : identifier[self] . identifier[add_many_rows_button] . identifier[Disable] () identifier[self] . identifier[rows_spin_ctrl] . identifier[Disable] () identifier[self] . identifier[remove_row_button] . identifier[Disable] () identifier[self] . identifier[remove_cols_button] . identifier[Disable] () identifier[self] . identifier[importButton] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[onImport] , identifier[self] . identifier[importButton] ) identifier[self] . identifier[exitButton] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[onSave] , identifier[self] . identifier[exitButton] ) identifier[self] . identifier[cancelButton] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[onCancelButton] , identifier[self] . identifier[cancelButton] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_CLOSE] , identifier[self] . identifier[onCancelButton] ) identifier[self] . identifier[copyButton] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[onCopyMode] , identifier[self] . identifier[copyButton] ) identifier[self] . identifier[selectAllButton] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[onSelectAll] , identifier[self] . identifier[selectAllButton] ) identifier[self] . identifier[copySelectionButton] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[onCopySelection] , identifier[self] . identifier[copySelectionButton] ) identifier[self] . identifier[copySelectionButton] . identifier[Disable] () identifier[self] . identifier[toggle_help_btn] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[toggle_help] , identifier[self] . identifier[toggle_help_btn] ) identifier[self] . identifier[help_msg_boxsizer] = identifier[wx] . identifier[StaticBoxSizer] ( identifier[wx] . identifier[StaticBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[name] = literal[string] ), identifier[wx] . identifier[VERTICAL] ) keyword[if] identifier[self] . identifier[grid_type] == literal[string] : identifier[self] . identifier[default_msg_text] = literal[string] keyword[else] : identifier[self] . identifier[default_msg_text] = literal[string] . identifier[format] ( identifier[self] . identifier[grid_type] ) identifier[txt] = literal[string] keyword[if] identifier[self] . identifier[grid_type] == literal[string] : identifier[txt] = literal[string] keyword[if] identifier[self] . identifier[grid_type] == literal[string] : identifier[txt] = literal[string] keyword[if] identifier[self] . identifier[grid_type] == literal[string] : identifier[txt] = literal[string] keyword[if] identifier[self] . identifier[grid_type] == literal[string] : identifier[txt] = literal[string] identifier[self] . identifier[default_msg_text] += identifier[txt] identifier[self] . identifier[msg_text] = identifier[wx] . identifier[StaticText] ( identifier[self] . identifier[panel] , identifier[label] = identifier[self] . identifier[default_msg_text] , identifier[style] = identifier[wx] . identifier[TE_CENTER] , identifier[name] = literal[string] ) identifier[self] . identifier[help_msg_boxsizer] . identifier[Add] ( identifier[self] . identifier[msg_text] ) identifier[self] . identifier[help_msg_boxsizer] . identifier[ShowItems] ( keyword[False] ) identifier[self] . identifier[toggle_codes_btn] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[toggle_codes] , identifier[self] . identifier[toggle_codes_btn] ) identifier[self] . identifier[code_msg_boxsizer] = identifier[pw] . identifier[MethodCodeDemystifier] ( identifier[self] . identifier[panel] , identifier[self] . identifier[contribution] . identifier[vocab] ) identifier[self] . identifier[code_msg_boxsizer] . identifier[ShowItems] ( keyword[False] ) identifier[self] . identifier[hbox] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[HORIZONTAL] ) identifier[col_btn_vbox] = identifier[wx] . identifier[StaticBoxSizer] ( identifier[wx] . identifier[StaticBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[label] = literal[string] , identifier[name] = literal[string] ), identifier[wx] . identifier[VERTICAL] ) identifier[row_btn_vbox] = identifier[wx] . identifier[StaticBoxSizer] ( identifier[wx] . identifier[StaticBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[label] = literal[string] , identifier[name] = literal[string] ), identifier[wx] . identifier[VERTICAL] ) identifier[self] . identifier[main_btn_vbox] = identifier[wx] . identifier[StaticBoxSizer] ( identifier[wx] . identifier[StaticBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[label] = literal[string] , identifier[name] = literal[string] ), identifier[wx] . identifier[VERTICAL] ) identifier[input_output_vbox] = identifier[wx] . identifier[StaticBoxSizer] ( identifier[wx] . identifier[StaticBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[label] = literal[string] , identifier[name] = literal[string] ), identifier[wx] . identifier[VERTICAL] ) identifier[col_btn_vbox] . identifier[Add] ( identifier[self] . identifier[add_cols_button] , identifier[flag] = identifier[wx] . identifier[ALL] , identifier[border] = literal[int] ) identifier[col_btn_vbox] . identifier[Add] ( identifier[self] . identifier[remove_cols_button] , identifier[flag] = identifier[wx] . identifier[ALL] , identifier[border] = literal[int] ) identifier[row_btn_vbox] . identifier[Add] ( identifier[many_rows_box] , identifier[flag] = identifier[wx] . identifier[ALL] , identifier[border] = literal[int] ) identifier[row_btn_vbox] . identifier[Add] ( identifier[self] . identifier[remove_row_button] , identifier[flag] = identifier[wx] . identifier[ALL] , identifier[border] = literal[int] ) identifier[row_btn_vbox] . identifier[Add] ( identifier[self] . identifier[deleteRowButton] , identifier[flag] = identifier[wx] . identifier[ALL] , identifier[border] = literal[int] ) identifier[self] . identifier[main_btn_vbox] . identifier[Add] ( identifier[self] . identifier[importButton] , identifier[flag] = identifier[wx] . identifier[ALL] , identifier[border] = literal[int] ) identifier[self] . identifier[main_btn_vbox] . identifier[Add] ( identifier[self] . identifier[exitButton] , identifier[flag] = identifier[wx] . identifier[ALL] , identifier[border] = literal[int] ) identifier[self] . identifier[main_btn_vbox] . identifier[Add] ( identifier[self] . identifier[cancelButton] , identifier[flag] = identifier[wx] . identifier[ALL] , identifier[border] = literal[int] ) identifier[input_output_vbox] . identifier[Add] ( identifier[self] . identifier[copyButton] , identifier[flag] = identifier[wx] . identifier[ALL] , identifier[border] = literal[int] ) identifier[input_output_vbox] . identifier[Add] ( identifier[self] . identifier[selectAllButton] , identifier[flag] = identifier[wx] . identifier[ALL] , identifier[border] = literal[int] ) identifier[input_output_vbox] . identifier[Add] ( identifier[self] . identifier[copySelectionButton] , identifier[flag] = identifier[wx] . identifier[ALL] , identifier[border] = literal[int] ) identifier[self] . identifier[hbox] . identifier[Add] ( identifier[col_btn_vbox] ) identifier[self] . identifier[hbox] . identifier[Add] ( identifier[row_btn_vbox] ) identifier[self] . identifier[hbox] . identifier[Add] ( identifier[self] . identifier[main_btn_vbox] ) identifier[self] . identifier[hbox] . identifier[Add] ( identifier[input_output_vbox] ) identifier[self] . identifier[grid] . identifier[Bind] ( identifier[wx] . identifier[grid] . identifier[EVT_GRID_LABEL_LEFT_CLICK] , identifier[self] . identifier[onLeftClickLabel] , identifier[self] . identifier[grid] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_KEY_DOWN] , identifier[self] . identifier[on_key_down] ) identifier[self] . identifier[panel] . identifier[Bind] ( identifier[wx] . identifier[EVT_TEXT_PASTE] , identifier[self] . identifier[do_fit] ) identifier[self] . identifier[grid_builder] . identifier[add_data_to_grid] ( identifier[self] . identifier[grid] , identifier[self] . identifier[grid_type] ) identifier[self] . identifier[grid_builder] . identifier[fill_defaults] () identifier[self] . identifier[grid] . identifier[set_scrollbars] () identifier[self] . identifier[drop_down_menu] = identifier[drop_down_menus] . identifier[Menus] ( identifier[self] . identifier[grid_type] , identifier[self] . identifier[contribution] , identifier[self] . identifier[grid] ) identifier[self] . identifier[grid_box] = identifier[wx] . identifier[StaticBoxSizer] ( identifier[wx] . identifier[StaticBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[name] = literal[string] ), identifier[wx] . identifier[VERTICAL] ) identifier[self] . identifier[grid_box] . identifier[Add] ( identifier[self] . identifier[grid] , literal[int] , identifier[flag] = identifier[wx] . identifier[ALL] | identifier[wx] . identifier[EXPAND] , identifier[border] = literal[int] ) identifier[self] . identifier[main_sizer] . identifier[Add] ( identifier[self] . identifier[hbox] , identifier[flag] = identifier[wx] . identifier[ALL] | identifier[wx] . identifier[ALIGN_CENTER] , identifier[border] = literal[int] ) identifier[self] . identifier[main_sizer] . identifier[Add] ( identifier[self] . identifier[toggle_help_btn] , literal[int] , identifier[flag] = identifier[wx] . identifier[BOTTOM] | identifier[wx] . identifier[ALIGN_CENTRE] , identifier[border] = literal[int] ) identifier[self] . identifier[main_sizer] . identifier[Add] ( identifier[self] . identifier[help_msg_boxsizer] , literal[int] , identifier[flag] = identifier[wx] . identifier[BOTTOM] | identifier[wx] . identifier[ALIGN_CENTRE] , identifier[border] = literal[int] ) identifier[self] . identifier[main_sizer] . identifier[Add] ( identifier[self] . identifier[toggle_codes_btn] , literal[int] , identifier[flag] = identifier[wx] . identifier[BOTTOM] | identifier[wx] . identifier[ALIGN_CENTRE] , identifier[border] = literal[int] ) identifier[self] . identifier[main_sizer] . identifier[Add] ( identifier[self] . identifier[code_msg_boxsizer] , literal[int] , identifier[flag] = identifier[wx] . identifier[BOTTOM] | identifier[wx] . identifier[ALIGN_CENTRE] , identifier[border] = literal[int] ) identifier[self] . identifier[main_sizer] . identifier[Add] ( identifier[self] . identifier[grid_box] , literal[int] , identifier[flag] = identifier[wx] . identifier[ALL] | identifier[wx] . identifier[ALIGN_CENTER] | identifier[wx] . identifier[EXPAND] , identifier[border] = literal[int] ) identifier[self] . identifier[panel] . identifier[SetSizer] ( identifier[self] . identifier[main_sizer] ) identifier[panel_sizer] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[VERTICAL] ) identifier[panel_sizer] . identifier[Add] ( identifier[self] . identifier[panel] , literal[int] , identifier[wx] . identifier[EXPAND] ) identifier[self] . identifier[SetSizer] ( identifier[panel_sizer] ) identifier[panel_sizer] . identifier[Fit] ( identifier[self] ) identifier[self] . identifier[Show] ()
def InitUI(self): """ initialize window """ self.main_sizer = wx.BoxSizer(wx.VERTICAL) if self.grid_type in self.contribution.tables: dataframe = self.contribution.tables[self.grid_type] # depends on [control=['if'], data=[]] else: dataframe = None self.grid_builder = GridBuilder(self.contribution, self.grid_type, self.panel, parent_type=self.parent_type, reqd_headers=self.reqd_headers, exclude_cols=self.exclude_cols, huge=self.huge) self.grid = self.grid_builder.make_grid() self.grid.InitUI() ## Column management buttons self.add_cols_button = wx.Button(self.panel, label='Add additional columns', name='add_cols_btn', size=(170, 20)) self.Bind(wx.EVT_BUTTON, self.on_add_cols, self.add_cols_button) self.remove_cols_button = wx.Button(self.panel, label='Remove columns', name='remove_cols_btn', size=(170, 20)) self.Bind(wx.EVT_BUTTON, self.on_remove_cols, self.remove_cols_button) ## Row management buttons self.remove_row_button = wx.Button(self.panel, label='Remove last row', name='remove_last_row_btn') self.Bind(wx.EVT_BUTTON, self.on_remove_row, self.remove_row_button) many_rows_box = wx.BoxSizer(wx.HORIZONTAL) self.add_many_rows_button = wx.Button(self.panel, label='Add row(s)', name='add_many_rows_btn') self.rows_spin_ctrl = wx.SpinCtrl(self.panel, value='1', initial=1, name='rows_spin_ctrl') many_rows_box.Add(self.add_many_rows_button, flag=wx.ALIGN_CENTRE) many_rows_box.Add(self.rows_spin_ctrl) self.Bind(wx.EVT_BUTTON, self.on_add_rows, self.add_many_rows_button) self.deleteRowButton = wx.Button(self.panel, id=-1, label='Delete selected row(s)', name='delete_row_btn') self.Bind(wx.EVT_BUTTON, lambda event: self.on_remove_row(event, False), self.deleteRowButton) self.deleteRowButton.Disable() # measurements table should not be able to add new rows # that should be done elsewhere if self.huge: self.add_many_rows_button.Disable() self.rows_spin_ctrl.Disable() self.remove_row_button.Disable() # can't remove cols (seg fault), but can add them #self.add_cols_button.Disable() self.remove_cols_button.Disable() # depends on [control=['if'], data=[]] ## Data management buttons self.importButton = wx.Button(self.panel, id=-1, label='Import MagIC-format file', name='import_btn') self.Bind(wx.EVT_BUTTON, self.onImport, self.importButton) self.exitButton = wx.Button(self.panel, id=-1, label='Save and close grid', name='save_and_quit_btn') self.Bind(wx.EVT_BUTTON, self.onSave, self.exitButton) self.cancelButton = wx.Button(self.panel, id=-1, label='Cancel', name='cancel_btn') self.Bind(wx.EVT_BUTTON, self.onCancelButton, self.cancelButton) self.Bind(wx.EVT_CLOSE, self.onCancelButton) ## Input/output buttons self.copyButton = wx.Button(self.panel, id=-1, label='Start copy mode', name='copy_mode_btn') self.Bind(wx.EVT_BUTTON, self.onCopyMode, self.copyButton) self.selectAllButton = wx.Button(self.panel, id=-1, label='Copy all cells', name='select_all_btn') self.Bind(wx.EVT_BUTTON, self.onSelectAll, self.selectAllButton) self.copySelectionButton = wx.Button(self.panel, id=-1, label='Copy selected cells', name='copy_selection_btn') self.Bind(wx.EVT_BUTTON, self.onCopySelection, self.copySelectionButton) self.copySelectionButton.Disable() ## Help message and button # button self.toggle_help_btn = wx.Button(self.panel, id=-1, label='Show help', name='toggle_help_btn') self.Bind(wx.EVT_BUTTON, self.toggle_help, self.toggle_help_btn) # message self.help_msg_boxsizer = wx.StaticBoxSizer(wx.StaticBox(self.panel, -1, name='help_msg_boxsizer'), wx.VERTICAL) if self.grid_type == 'measurements': self.default_msg_text = "Edit measurements here.\nIn general, measurements should be imported directly into Pmag GUI,\nwhich has protocols for converting many lab formats into the MagIC format.\nIf we are missing your particular lab format, please let us know: https://github.com/PmagPy/PmagPy/issues.\nThis grid is just meant for looking at your measurements and doing small edits.\nCurrently, you can't add/remove rows here. You can add columns and edit cell values." # depends on [control=['if'], data=[]] else: self.default_msg_text = 'Edit {} here.\nYou can add or remove both rows and columns, however required columns may not be deleted.\nControlled vocabularies are indicated by **, and will have drop-down-menus.\nSuggested vocabularies are indicated by ^^, and also have drop-down-menus.\nTo edit all values in a column, click the column header.\nYou can cut and paste a block of cells from an Excel-like file.\nJust click the top left cell and use command "v".'.format(self.grid_type) txt = '' if self.grid_type == 'locations': txt = '\n\nNote: you can fill in location start/end latitude/longitude here.\nHowever, if you add sites in step 2, the program will calculate those values automatically,\nbased on site latitudes/logitudes.\nThese values will be written to your upload file.' # depends on [control=['if'], data=[]] if self.grid_type == 'samples': txt = "\n\nNote: you can fill in lithology, class, and type for each sample here.\nHowever, if the sample's class, lithology, and type are the same as its parent site,\nthose values will propagate down, and will be written to your sample file automatically." # depends on [control=['if'], data=[]] if self.grid_type == 'specimens': txt = "\n\nNote: you can fill in lithology, class, and type for each specimen here.\nHowever, if the specimen's class, lithology, and type are the same as its parent sample,\nthose values will propagate down, and will be written to your specimen file automatically." # depends on [control=['if'], data=[]] if self.grid_type == 'ages': txt = '\n\nNote: only ages for which you provide data will be written to your upload file.' # depends on [control=['if'], data=[]] self.default_msg_text += txt self.msg_text = wx.StaticText(self.panel, label=self.default_msg_text, style=wx.TE_CENTER, name='msg text') self.help_msg_boxsizer.Add(self.msg_text) self.help_msg_boxsizer.ShowItems(False) ## Code message and button # button self.toggle_codes_btn = wx.Button(self.panel, id=-1, label='Show method codes', name='toggle_codes_btn') self.Bind(wx.EVT_BUTTON, self.toggle_codes, self.toggle_codes_btn) # message self.code_msg_boxsizer = pw.MethodCodeDemystifier(self.panel, self.contribution.vocab) self.code_msg_boxsizer.ShowItems(False) ## Add content to sizers self.hbox = wx.BoxSizer(wx.HORIZONTAL) col_btn_vbox = wx.StaticBoxSizer(wx.StaticBox(self.panel, -1, label='Columns', name='manage columns'), wx.VERTICAL) row_btn_vbox = wx.StaticBoxSizer(wx.StaticBox(self.panel, -1, label='Rows', name='manage rows'), wx.VERTICAL) self.main_btn_vbox = wx.StaticBoxSizer(wx.StaticBox(self.panel, -1, label='Manage data', name='manage data'), wx.VERTICAL) input_output_vbox = wx.StaticBoxSizer(wx.StaticBox(self.panel, -1, label='In/Out', name='manage in out'), wx.VERTICAL) col_btn_vbox.Add(self.add_cols_button, flag=wx.ALL, border=5) col_btn_vbox.Add(self.remove_cols_button, flag=wx.ALL, border=5) row_btn_vbox.Add(many_rows_box, flag=wx.ALL, border=5) row_btn_vbox.Add(self.remove_row_button, flag=wx.ALL, border=5) row_btn_vbox.Add(self.deleteRowButton, flag=wx.ALL, border=5) self.main_btn_vbox.Add(self.importButton, flag=wx.ALL, border=5) self.main_btn_vbox.Add(self.exitButton, flag=wx.ALL, border=5) self.main_btn_vbox.Add(self.cancelButton, flag=wx.ALL, border=5) input_output_vbox.Add(self.copyButton, flag=wx.ALL, border=5) input_output_vbox.Add(self.selectAllButton, flag=wx.ALL, border=5) input_output_vbox.Add(self.copySelectionButton, flag=wx.ALL, border=5) self.hbox.Add(col_btn_vbox) self.hbox.Add(row_btn_vbox) self.hbox.Add(self.main_btn_vbox) self.hbox.Add(input_output_vbox) #self.panel.Bind(wx.grid.EVT_GRID_LABEL_LEFT_CLICK, self.onLeftClickLabel, self.grid) self.grid.Bind(wx.grid.EVT_GRID_LABEL_LEFT_CLICK, self.onLeftClickLabel, self.grid) # self.Bind(wx.EVT_KEY_DOWN, self.on_key_down) self.panel.Bind(wx.EVT_TEXT_PASTE, self.do_fit) # add actual data! self.grid_builder.add_data_to_grid(self.grid, self.grid_type) # fill in some default values self.grid_builder.fill_defaults() # set scrollbars self.grid.set_scrollbars() ## this would be a way to prevent editing ## some cells in age grid. ## with multiple types of ages, though, ## this doesn't make much sense #if self.grid_type == 'ages': # attr = wx.grid.GridCellAttr() # attr.SetReadOnly(True) # self.grid.SetColAttr(1, attr) self.drop_down_menu = drop_down_menus.Menus(self.grid_type, self.contribution, self.grid) self.grid_box = wx.StaticBoxSizer(wx.StaticBox(self.panel, -1, name='grid container'), wx.VERTICAL) self.grid_box.Add(self.grid, 1, flag=wx.ALL | wx.EXPAND, border=5) # final layout, set size #|wx.SHAPED, self.main_sizer.Add(self.hbox, flag=wx.ALL | wx.ALIGN_CENTER, border=20) #|wx.SHAPED, self.main_sizer.Add(self.toggle_help_btn, 0, flag=wx.BOTTOM | wx.ALIGN_CENTRE, border=5) self.main_sizer.Add(self.help_msg_boxsizer, 0, flag=wx.BOTTOM | wx.ALIGN_CENTRE, border=10) #|wx.SHAPED, self.main_sizer.Add(self.toggle_codes_btn, 0, flag=wx.BOTTOM | wx.ALIGN_CENTRE, border=5) #|wx.SHAPED, self.main_sizer.Add(self.code_msg_boxsizer, 0, flag=wx.BOTTOM | wx.ALIGN_CENTRE, border=5) self.main_sizer.Add(self.grid_box, 2, flag=wx.ALL | wx.ALIGN_CENTER | wx.EXPAND, border=10) self.panel.SetSizer(self.main_sizer) panel_sizer = wx.BoxSizer(wx.VERTICAL) panel_sizer.Add(self.panel, 1, wx.EXPAND) self.SetSizer(panel_sizer) panel_sizer.Fit(self) ## this keeps sizing correct if the user resizes the window manually #self.Bind(wx.EVT_SIZE, self.do_fit) # self.Centre() self.Show()
def convert_model_from_external_data(model): # type: (ModelProto) -> None """ call to set all tensors data as embedded data. save_model saves all the tensors data as embedded data after calling this function. @params model: ModelProto to be converted. """ for tensor in _get_all_tensors(model): if uses_external_data(tensor): if not tensor.HasField("raw_data"): raise ValueError("raw_data field doesn't exist.") del tensor.external_data[:] tensor.data_location = TensorProto.DEFAULT
def function[convert_model_from_external_data, parameter[model]]: constant[ call to set all tensors data as embedded data. save_model saves all the tensors data as embedded data after calling this function. @params model: ModelProto to be converted. ] for taget[name[tensor]] in starred[call[name[_get_all_tensors], parameter[name[model]]]] begin[:] if call[name[uses_external_data], parameter[name[tensor]]] begin[:] if <ast.UnaryOp object at 0x7da1b1ef2170> begin[:] <ast.Raise object at 0x7da1b1ef1e70> <ast.Delete object at 0x7da20c992bc0> name[tensor].data_location assign[=] name[TensorProto].DEFAULT
keyword[def] identifier[convert_model_from_external_data] ( identifier[model] ): literal[string] keyword[for] identifier[tensor] keyword[in] identifier[_get_all_tensors] ( identifier[model] ): keyword[if] identifier[uses_external_data] ( identifier[tensor] ): keyword[if] keyword[not] identifier[tensor] . identifier[HasField] ( literal[string] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[del] identifier[tensor] . identifier[external_data] [:] identifier[tensor] . identifier[data_location] = identifier[TensorProto] . identifier[DEFAULT]
def convert_model_from_external_data(model): # type: (ModelProto) -> None '\n call to set all tensors data as embedded data. save_model saves all the tensors data as embedded data after calling this function.\n @params\n model: ModelProto to be converted.\n ' for tensor in _get_all_tensors(model): if uses_external_data(tensor): if not tensor.HasField('raw_data'): raise ValueError("raw_data field doesn't exist.") # depends on [control=['if'], data=[]] del tensor.external_data[:] tensor.data_location = TensorProto.DEFAULT # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['tensor']]
def sanitize(vpc_config): """ Checks that an instance of VpcConfig has the expected keys and values, removes unexpected keys, and raises ValueErrors if any expectations are violated Args: vpc_config (dict): a VpcConfig dict containing 'Subnets' and 'SecurityGroupIds' Returns: A valid VpcConfig dict containing only 'Subnets' and 'SecurityGroupIds' from the vpc_config parameter If vpc_config parameter is None, returns None Raises: ValueError if any expectations are violated: * vpc_config must be a non-empty dict * vpc_config must have key `Subnets` and the value must be a non-empty list * vpc_config must have key `SecurityGroupIds` and the value must be a non-empty list """ if vpc_config is None: return vpc_config elif type(vpc_config) is not dict: raise ValueError('vpc_config is not a dict: {}'.format(vpc_config)) elif not vpc_config: raise ValueError('vpc_config is empty') subnets = vpc_config.get(SUBNETS_KEY) if subnets is None: raise ValueError('vpc_config is missing key: {}'.format(SUBNETS_KEY)) if type(subnets) is not list: raise ValueError('vpc_config value for {} is not a list: {}'.format(SUBNETS_KEY, subnets)) elif not subnets: raise ValueError('vpc_config value for {} is empty'.format(SUBNETS_KEY)) security_group_ids = vpc_config.get(SECURITY_GROUP_IDS_KEY) if security_group_ids is None: raise ValueError('vpc_config is missing key: {}'.format(SECURITY_GROUP_IDS_KEY)) if type(security_group_ids) is not list: raise ValueError('vpc_config value for {} is not a list: {}'.format(SECURITY_GROUP_IDS_KEY, security_group_ids)) elif not security_group_ids: raise ValueError('vpc_config value for {} is empty'.format(SECURITY_GROUP_IDS_KEY)) return to_dict(subnets, security_group_ids)
def function[sanitize, parameter[vpc_config]]: constant[ Checks that an instance of VpcConfig has the expected keys and values, removes unexpected keys, and raises ValueErrors if any expectations are violated Args: vpc_config (dict): a VpcConfig dict containing 'Subnets' and 'SecurityGroupIds' Returns: A valid VpcConfig dict containing only 'Subnets' and 'SecurityGroupIds' from the vpc_config parameter If vpc_config parameter is None, returns None Raises: ValueError if any expectations are violated: * vpc_config must be a non-empty dict * vpc_config must have key `Subnets` and the value must be a non-empty list * vpc_config must have key `SecurityGroupIds` and the value must be a non-empty list ] if compare[name[vpc_config] is constant[None]] begin[:] return[name[vpc_config]] variable[subnets] assign[=] call[name[vpc_config].get, parameter[name[SUBNETS_KEY]]] if compare[name[subnets] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1c1a1d0> if compare[call[name[type], parameter[name[subnets]]] is_not name[list]] begin[:] <ast.Raise object at 0x7da1b1c1ae90> variable[security_group_ids] assign[=] call[name[vpc_config].get, parameter[name[SECURITY_GROUP_IDS_KEY]]] if compare[name[security_group_ids] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1c18fa0> if compare[call[name[type], parameter[name[security_group_ids]]] is_not name[list]] begin[:] <ast.Raise object at 0x7da1b1c185b0> return[call[name[to_dict], parameter[name[subnets], name[security_group_ids]]]]
keyword[def] identifier[sanitize] ( identifier[vpc_config] ): literal[string] keyword[if] identifier[vpc_config] keyword[is] keyword[None] : keyword[return] identifier[vpc_config] keyword[elif] identifier[type] ( identifier[vpc_config] ) keyword[is] keyword[not] identifier[dict] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[vpc_config] )) keyword[elif] keyword[not] identifier[vpc_config] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[subnets] = identifier[vpc_config] . identifier[get] ( identifier[SUBNETS_KEY] ) keyword[if] identifier[subnets] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[SUBNETS_KEY] )) keyword[if] identifier[type] ( identifier[subnets] ) keyword[is] keyword[not] identifier[list] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[SUBNETS_KEY] , identifier[subnets] )) keyword[elif] keyword[not] identifier[subnets] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[SUBNETS_KEY] )) identifier[security_group_ids] = identifier[vpc_config] . identifier[get] ( identifier[SECURITY_GROUP_IDS_KEY] ) keyword[if] identifier[security_group_ids] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[SECURITY_GROUP_IDS_KEY] )) keyword[if] identifier[type] ( identifier[security_group_ids] ) keyword[is] keyword[not] identifier[list] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[SECURITY_GROUP_IDS_KEY] , identifier[security_group_ids] )) keyword[elif] keyword[not] identifier[security_group_ids] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[SECURITY_GROUP_IDS_KEY] )) keyword[return] identifier[to_dict] ( identifier[subnets] , identifier[security_group_ids] )
def sanitize(vpc_config): """ Checks that an instance of VpcConfig has the expected keys and values, removes unexpected keys, and raises ValueErrors if any expectations are violated Args: vpc_config (dict): a VpcConfig dict containing 'Subnets' and 'SecurityGroupIds' Returns: A valid VpcConfig dict containing only 'Subnets' and 'SecurityGroupIds' from the vpc_config parameter If vpc_config parameter is None, returns None Raises: ValueError if any expectations are violated: * vpc_config must be a non-empty dict * vpc_config must have key `Subnets` and the value must be a non-empty list * vpc_config must have key `SecurityGroupIds` and the value must be a non-empty list """ if vpc_config is None: return vpc_config # depends on [control=['if'], data=['vpc_config']] elif type(vpc_config) is not dict: raise ValueError('vpc_config is not a dict: {}'.format(vpc_config)) # depends on [control=['if'], data=[]] elif not vpc_config: raise ValueError('vpc_config is empty') # depends on [control=['if'], data=[]] subnets = vpc_config.get(SUBNETS_KEY) if subnets is None: raise ValueError('vpc_config is missing key: {}'.format(SUBNETS_KEY)) # depends on [control=['if'], data=[]] if type(subnets) is not list: raise ValueError('vpc_config value for {} is not a list: {}'.format(SUBNETS_KEY, subnets)) # depends on [control=['if'], data=[]] elif not subnets: raise ValueError('vpc_config value for {} is empty'.format(SUBNETS_KEY)) # depends on [control=['if'], data=[]] security_group_ids = vpc_config.get(SECURITY_GROUP_IDS_KEY) if security_group_ids is None: raise ValueError('vpc_config is missing key: {}'.format(SECURITY_GROUP_IDS_KEY)) # depends on [control=['if'], data=[]] if type(security_group_ids) is not list: raise ValueError('vpc_config value for {} is not a list: {}'.format(SECURITY_GROUP_IDS_KEY, security_group_ids)) # depends on [control=['if'], data=[]] elif not security_group_ids: raise ValueError('vpc_config value for {} is empty'.format(SECURITY_GROUP_IDS_KEY)) # depends on [control=['if'], data=[]] return to_dict(subnets, security_group_ids)
def _index_key_for(self, att, value=None): """Returns a key based on the attribute and its value. The key is used for indexing. """ if value is None: value = getattr(self, att) if callable(value): value = value() if value is None: return None if att not in self.lists: return self._get_index_key_for_non_list_attr(att, value) else: return self._tuple_for_index_key_attr_list(att, value)
def function[_index_key_for, parameter[self, att, value]]: constant[Returns a key based on the attribute and its value. The key is used for indexing. ] if compare[name[value] is constant[None]] begin[:] variable[value] assign[=] call[name[getattr], parameter[name[self], name[att]]] if call[name[callable], parameter[name[value]]] begin[:] variable[value] assign[=] call[name[value], parameter[]] if compare[name[value] is constant[None]] begin[:] return[constant[None]] if compare[name[att] <ast.NotIn object at 0x7da2590d7190> name[self].lists] begin[:] return[call[name[self]._get_index_key_for_non_list_attr, parameter[name[att], name[value]]]]
keyword[def] identifier[_index_key_for] ( identifier[self] , identifier[att] , identifier[value] = keyword[None] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[None] : identifier[value] = identifier[getattr] ( identifier[self] , identifier[att] ) keyword[if] identifier[callable] ( identifier[value] ): identifier[value] = identifier[value] () keyword[if] identifier[value] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[if] identifier[att] keyword[not] keyword[in] identifier[self] . identifier[lists] : keyword[return] identifier[self] . identifier[_get_index_key_for_non_list_attr] ( identifier[att] , identifier[value] ) keyword[else] : keyword[return] identifier[self] . identifier[_tuple_for_index_key_attr_list] ( identifier[att] , identifier[value] )
def _index_key_for(self, att, value=None): """Returns a key based on the attribute and its value. The key is used for indexing. """ if value is None: value = getattr(self, att) if callable(value): value = value() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['value']] if value is None: return None # depends on [control=['if'], data=[]] if att not in self.lists: return self._get_index_key_for_non_list_attr(att, value) # depends on [control=['if'], data=['att']] else: return self._tuple_for_index_key_attr_list(att, value)
def get_fobj(fname, mode='w+'): """Obtain a proper file object. Parameters ---------- fname : string, file object, file descriptor If a string or file descriptor, then we create a file object. If *fname* is a file object, then we do nothing and ignore the specified *mode* parameter. mode : str The mode of the file to be opened. Returns ------- fobj : file object The file object. close : bool If *fname* was a string, then *close* will be *True* to signify that the file object should be closed after writing to it. Otherwise, *close* will be *False* signifying that the user, in essence, created the file object already and that subsequent operations should not close it. """ if is_string_like(fname): fobj = open(fname, mode) close = True elif hasattr(fname, 'write'): # fname is a file-like object, perhaps a StringIO (for example) fobj = fname close = False else: # assume it is a file descriptor fobj = os.fdopen(fname, mode) close = False return fobj, close
def function[get_fobj, parameter[fname, mode]]: constant[Obtain a proper file object. Parameters ---------- fname : string, file object, file descriptor If a string or file descriptor, then we create a file object. If *fname* is a file object, then we do nothing and ignore the specified *mode* parameter. mode : str The mode of the file to be opened. Returns ------- fobj : file object The file object. close : bool If *fname* was a string, then *close* will be *True* to signify that the file object should be closed after writing to it. Otherwise, *close* will be *False* signifying that the user, in essence, created the file object already and that subsequent operations should not close it. ] if call[name[is_string_like], parameter[name[fname]]] begin[:] variable[fobj] assign[=] call[name[open], parameter[name[fname], name[mode]]] variable[close] assign[=] constant[True] return[tuple[[<ast.Name object at 0x7da1b10d4f10>, <ast.Name object at 0x7da1b10d5240>]]]
keyword[def] identifier[get_fobj] ( identifier[fname] , identifier[mode] = literal[string] ): literal[string] keyword[if] identifier[is_string_like] ( identifier[fname] ): identifier[fobj] = identifier[open] ( identifier[fname] , identifier[mode] ) identifier[close] = keyword[True] keyword[elif] identifier[hasattr] ( identifier[fname] , literal[string] ): identifier[fobj] = identifier[fname] identifier[close] = keyword[False] keyword[else] : identifier[fobj] = identifier[os] . identifier[fdopen] ( identifier[fname] , identifier[mode] ) identifier[close] = keyword[False] keyword[return] identifier[fobj] , identifier[close]
def get_fobj(fname, mode='w+'): """Obtain a proper file object. Parameters ---------- fname : string, file object, file descriptor If a string or file descriptor, then we create a file object. If *fname* is a file object, then we do nothing and ignore the specified *mode* parameter. mode : str The mode of the file to be opened. Returns ------- fobj : file object The file object. close : bool If *fname* was a string, then *close* will be *True* to signify that the file object should be closed after writing to it. Otherwise, *close* will be *False* signifying that the user, in essence, created the file object already and that subsequent operations should not close it. """ if is_string_like(fname): fobj = open(fname, mode) close = True # depends on [control=['if'], data=[]] elif hasattr(fname, 'write'): # fname is a file-like object, perhaps a StringIO (for example) fobj = fname close = False # depends on [control=['if'], data=[]] else: # assume it is a file descriptor fobj = os.fdopen(fname, mode) close = False return (fobj, close)
def _check_series_convert_timestamps_internal(s, timezone): """ Convert a tz-naive timestamp in the specified timezone or local timezone to UTC normalized for Spark internal storage :param s: a pandas.Series :param timezone: the timezone to convert. if None then use local timezone :return pandas.Series where if it is a timestamp, has been UTC normalized without a time zone """ from pyspark.sql.utils import require_minimum_pandas_version require_minimum_pandas_version() from pandas.api.types import is_datetime64_dtype, is_datetime64tz_dtype # TODO: handle nested timestamps, such as ArrayType(TimestampType())? if is_datetime64_dtype(s.dtype): # When tz_localize a tz-naive timestamp, the result is ambiguous if the tz-naive # timestamp is during the hour when the clock is adjusted backward during due to # daylight saving time (dst). # E.g., for America/New_York, the clock is adjusted backward on 2015-11-01 2:00 to # 2015-11-01 1:00 from dst-time to standard time, and therefore, when tz_localize # a tz-naive timestamp 2015-11-01 1:30 with America/New_York timezone, it can be either # dst time (2015-01-01 1:30-0400) or standard time (2015-11-01 1:30-0500). # # Here we explicit choose to use standard time. This matches the default behavior of # pytz. # # Here are some code to help understand this behavior: # >>> import datetime # >>> import pandas as pd # >>> import pytz # >>> # >>> t = datetime.datetime(2015, 11, 1, 1, 30) # >>> ts = pd.Series([t]) # >>> tz = pytz.timezone('America/New_York') # >>> # >>> ts.dt.tz_localize(tz, ambiguous=True) # 0 2015-11-01 01:30:00-04:00 # dtype: datetime64[ns, America/New_York] # >>> # >>> ts.dt.tz_localize(tz, ambiguous=False) # 0 2015-11-01 01:30:00-05:00 # dtype: datetime64[ns, America/New_York] # >>> # >>> str(tz.localize(t)) # '2015-11-01 01:30:00-05:00' tz = timezone or _get_local_timezone() return s.dt.tz_localize(tz, ambiguous=False).dt.tz_convert('UTC') elif is_datetime64tz_dtype(s.dtype): return s.dt.tz_convert('UTC') else: return s
def function[_check_series_convert_timestamps_internal, parameter[s, timezone]]: constant[ Convert a tz-naive timestamp in the specified timezone or local timezone to UTC normalized for Spark internal storage :param s: a pandas.Series :param timezone: the timezone to convert. if None then use local timezone :return pandas.Series where if it is a timestamp, has been UTC normalized without a time zone ] from relative_module[pyspark.sql.utils] import module[require_minimum_pandas_version] call[name[require_minimum_pandas_version], parameter[]] from relative_module[pandas.api.types] import module[is_datetime64_dtype], module[is_datetime64tz_dtype] if call[name[is_datetime64_dtype], parameter[name[s].dtype]] begin[:] variable[tz] assign[=] <ast.BoolOp object at 0x7da18dc9abc0> return[call[call[name[s].dt.tz_localize, parameter[name[tz]]].dt.tz_convert, parameter[constant[UTC]]]]
keyword[def] identifier[_check_series_convert_timestamps_internal] ( identifier[s] , identifier[timezone] ): literal[string] keyword[from] identifier[pyspark] . identifier[sql] . identifier[utils] keyword[import] identifier[require_minimum_pandas_version] identifier[require_minimum_pandas_version] () keyword[from] identifier[pandas] . identifier[api] . identifier[types] keyword[import] identifier[is_datetime64_dtype] , identifier[is_datetime64tz_dtype] keyword[if] identifier[is_datetime64_dtype] ( identifier[s] . identifier[dtype] ): identifier[tz] = identifier[timezone] keyword[or] identifier[_get_local_timezone] () keyword[return] identifier[s] . identifier[dt] . identifier[tz_localize] ( identifier[tz] , identifier[ambiguous] = keyword[False] ). identifier[dt] . identifier[tz_convert] ( literal[string] ) keyword[elif] identifier[is_datetime64tz_dtype] ( identifier[s] . identifier[dtype] ): keyword[return] identifier[s] . identifier[dt] . identifier[tz_convert] ( literal[string] ) keyword[else] : keyword[return] identifier[s]
def _check_series_convert_timestamps_internal(s, timezone): """ Convert a tz-naive timestamp in the specified timezone or local timezone to UTC normalized for Spark internal storage :param s: a pandas.Series :param timezone: the timezone to convert. if None then use local timezone :return pandas.Series where if it is a timestamp, has been UTC normalized without a time zone """ from pyspark.sql.utils import require_minimum_pandas_version require_minimum_pandas_version() from pandas.api.types import is_datetime64_dtype, is_datetime64tz_dtype # TODO: handle nested timestamps, such as ArrayType(TimestampType())? if is_datetime64_dtype(s.dtype): # When tz_localize a tz-naive timestamp, the result is ambiguous if the tz-naive # timestamp is during the hour when the clock is adjusted backward during due to # daylight saving time (dst). # E.g., for America/New_York, the clock is adjusted backward on 2015-11-01 2:00 to # 2015-11-01 1:00 from dst-time to standard time, and therefore, when tz_localize # a tz-naive timestamp 2015-11-01 1:30 with America/New_York timezone, it can be either # dst time (2015-01-01 1:30-0400) or standard time (2015-11-01 1:30-0500). # # Here we explicit choose to use standard time. This matches the default behavior of # pytz. # # Here are some code to help understand this behavior: # >>> import datetime # >>> import pandas as pd # >>> import pytz # >>> # >>> t = datetime.datetime(2015, 11, 1, 1, 30) # >>> ts = pd.Series([t]) # >>> tz = pytz.timezone('America/New_York') # >>> # >>> ts.dt.tz_localize(tz, ambiguous=True) # 0 2015-11-01 01:30:00-04:00 # dtype: datetime64[ns, America/New_York] # >>> # >>> ts.dt.tz_localize(tz, ambiguous=False) # 0 2015-11-01 01:30:00-05:00 # dtype: datetime64[ns, America/New_York] # >>> # >>> str(tz.localize(t)) # '2015-11-01 01:30:00-05:00' tz = timezone or _get_local_timezone() return s.dt.tz_localize(tz, ambiguous=False).dt.tz_convert('UTC') # depends on [control=['if'], data=[]] elif is_datetime64tz_dtype(s.dtype): return s.dt.tz_convert('UTC') # depends on [control=['if'], data=[]] else: return s
def _create_conda_cmd(conda_cmd, args=None, env=None, user=None): """ Utility to create a valid conda command """ cmd = [_get_conda_path(user=user), conda_cmd] if env: cmd.extend(['-n', env]) if args is not None and isinstance(args, list) and args != []: cmd.extend(args) return cmd
def function[_create_conda_cmd, parameter[conda_cmd, args, env, user]]: constant[ Utility to create a valid conda command ] variable[cmd] assign[=] list[[<ast.Call object at 0x7da1b0af8520>, <ast.Name object at 0x7da1b0af8130>]] if name[env] begin[:] call[name[cmd].extend, parameter[list[[<ast.Constant object at 0x7da1b0af8940>, <ast.Name object at 0x7da1b0af8370>]]]] if <ast.BoolOp object at 0x7da1b0af82b0> begin[:] call[name[cmd].extend, parameter[name[args]]] return[name[cmd]]
keyword[def] identifier[_create_conda_cmd] ( identifier[conda_cmd] , identifier[args] = keyword[None] , identifier[env] = keyword[None] , identifier[user] = keyword[None] ): literal[string] identifier[cmd] =[ identifier[_get_conda_path] ( identifier[user] = identifier[user] ), identifier[conda_cmd] ] keyword[if] identifier[env] : identifier[cmd] . identifier[extend] ([ literal[string] , identifier[env] ]) keyword[if] identifier[args] keyword[is] keyword[not] keyword[None] keyword[and] identifier[isinstance] ( identifier[args] , identifier[list] ) keyword[and] identifier[args] !=[]: identifier[cmd] . identifier[extend] ( identifier[args] ) keyword[return] identifier[cmd]
def _create_conda_cmd(conda_cmd, args=None, env=None, user=None): """ Utility to create a valid conda command """ cmd = [_get_conda_path(user=user), conda_cmd] if env: cmd.extend(['-n', env]) # depends on [control=['if'], data=[]] if args is not None and isinstance(args, list) and (args != []): cmd.extend(args) # depends on [control=['if'], data=[]] return cmd
def avgMultiplePlots(data, calc_mean=True, calc_std=False, calc_density=False, nsample=None): """ return the average (x,y) for a set of multiple x,y arrays which can have a different length and resolution data = ((xvals,yvals),(),,,) assumes that x values are sorted """ xArr, yArr = bringPlotOverSameX(data, nsample) out = [xArr] if calc_mean: out.append(np.nanmean(yArr, axis=0)) if calc_std: out.append(np.nanstd(yArr, axis=0)) if calc_density: out.append(len(yArr) - np.isnan(yArr).sum(axis=0)) return tuple(out)
def function[avgMultiplePlots, parameter[data, calc_mean, calc_std, calc_density, nsample]]: constant[ return the average (x,y) for a set of multiple x,y arrays which can have a different length and resolution data = ((xvals,yvals),(),,,) assumes that x values are sorted ] <ast.Tuple object at 0x7da18c4ce1a0> assign[=] call[name[bringPlotOverSameX], parameter[name[data], name[nsample]]] variable[out] assign[=] list[[<ast.Name object at 0x7da18c4cc700>]] if name[calc_mean] begin[:] call[name[out].append, parameter[call[name[np].nanmean, parameter[name[yArr]]]]] if name[calc_std] begin[:] call[name[out].append, parameter[call[name[np].nanstd, parameter[name[yArr]]]]] if name[calc_density] begin[:] call[name[out].append, parameter[binary_operation[call[name[len], parameter[name[yArr]]] - call[call[name[np].isnan, parameter[name[yArr]]].sum, parameter[]]]]] return[call[name[tuple], parameter[name[out]]]]
keyword[def] identifier[avgMultiplePlots] ( identifier[data] , identifier[calc_mean] = keyword[True] , identifier[calc_std] = keyword[False] , identifier[calc_density] = keyword[False] , identifier[nsample] = keyword[None] ): literal[string] identifier[xArr] , identifier[yArr] = identifier[bringPlotOverSameX] ( identifier[data] , identifier[nsample] ) identifier[out] =[ identifier[xArr] ] keyword[if] identifier[calc_mean] : identifier[out] . identifier[append] ( identifier[np] . identifier[nanmean] ( identifier[yArr] , identifier[axis] = literal[int] )) keyword[if] identifier[calc_std] : identifier[out] . identifier[append] ( identifier[np] . identifier[nanstd] ( identifier[yArr] , identifier[axis] = literal[int] )) keyword[if] identifier[calc_density] : identifier[out] . identifier[append] ( identifier[len] ( identifier[yArr] )- identifier[np] . identifier[isnan] ( identifier[yArr] ). identifier[sum] ( identifier[axis] = literal[int] )) keyword[return] identifier[tuple] ( identifier[out] )
def avgMultiplePlots(data, calc_mean=True, calc_std=False, calc_density=False, nsample=None): """ return the average (x,y) for a set of multiple x,y arrays which can have a different length and resolution data = ((xvals,yvals),(),,,) assumes that x values are sorted """ (xArr, yArr) = bringPlotOverSameX(data, nsample) out = [xArr] if calc_mean: out.append(np.nanmean(yArr, axis=0)) # depends on [control=['if'], data=[]] if calc_std: out.append(np.nanstd(yArr, axis=0)) # depends on [control=['if'], data=[]] if calc_density: out.append(len(yArr) - np.isnan(yArr).sum(axis=0)) # depends on [control=['if'], data=[]] return tuple(out)
def fcoe_get_login_input_fcoe_login_vlan(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") fcoe_get_login = ET.Element("fcoe_get_login") config = fcoe_get_login input = ET.SubElement(fcoe_get_login, "input") fcoe_login_vlan = ET.SubElement(input, "fcoe-login-vlan") fcoe_login_vlan.text = kwargs.pop('fcoe_login_vlan') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[fcoe_get_login_input_fcoe_login_vlan, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[fcoe_get_login] assign[=] call[name[ET].Element, parameter[constant[fcoe_get_login]]] variable[config] assign[=] name[fcoe_get_login] variable[input] assign[=] call[name[ET].SubElement, parameter[name[fcoe_get_login], constant[input]]] variable[fcoe_login_vlan] assign[=] call[name[ET].SubElement, parameter[name[input], constant[fcoe-login-vlan]]] name[fcoe_login_vlan].text assign[=] call[name[kwargs].pop, parameter[constant[fcoe_login_vlan]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[fcoe_get_login_input_fcoe_login_vlan] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[fcoe_get_login] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[config] = identifier[fcoe_get_login] identifier[input] = identifier[ET] . identifier[SubElement] ( identifier[fcoe_get_login] , literal[string] ) identifier[fcoe_login_vlan] = identifier[ET] . identifier[SubElement] ( identifier[input] , literal[string] ) identifier[fcoe_login_vlan] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def fcoe_get_login_input_fcoe_login_vlan(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') fcoe_get_login = ET.Element('fcoe_get_login') config = fcoe_get_login input = ET.SubElement(fcoe_get_login, 'input') fcoe_login_vlan = ET.SubElement(input, 'fcoe-login-vlan') fcoe_login_vlan.text = kwargs.pop('fcoe_login_vlan') callback = kwargs.pop('callback', self._callback) return callback(config)