code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def add(self, rule_name, session, **kwargs): '''taobao.crm.rule.add 分组规则添加 添加分组规则,规则可用于筛选一定条件的会员。过滤条件可以选择客户来源、会员级别 、交易笔数、交易额、上次交易时间、平均客单价、宝贝件数、省份、关闭交易数等,新建规则时必须至少选择一个以上筛选条件。如果输入的规则的筛选条件不正确则不会进行处理,可以将某些分组挂在这个规则下,对被挂在该规则下的分组,系统对现有满足规则的客户都划分到这个分组(异步任务),若某些会员分组数或规则数超最大限额,则该会员不被操作,同时不影响其余会员操作,接口调用依然返回成功。每个规则可以应用到多个分组,一个用户的规则上限为5个。''' request = TOPRequest('taobao.crm.rule.add') request['rule_name'] = rule_name for k, v in kwargs.iteritems(): if k not in ('relation_source', 'grade', 'min_trade_amount', 'max_trade_amount', 'min_trade_count', 'max_trade_count', 'min_last_trade_time', 'max_last_trade_time', 'min_item_num', 'min_avg_price', 'min_close_trade_num', 'province', 'group_ids', 'max_avg_price', 'max_item_num', 'max_close_trade_num') and v==None: continue request[k] = v self.create(self.execute(request, session)) return self.is_success, self.rule_id
def function[add, parameter[self, rule_name, session]]: constant[taobao.crm.rule.add 分组规则添加 添加分组规则,规则可用于筛选一定条件的会员。过滤条件可以选择客户来源、会员级别 、交易笔数、交易额、上次交易时间、平均客单价、宝贝件数、省份、关闭交易数等,新建规则时必须至少选择一个以上筛选条件。如果输入的规则的筛选条件不正确则不会进行处理,可以将某些分组挂在这个规则下,对被挂在该规则下的分组,系统对现有满足规则的客户都划分到这个分组(异步任务),若某些会员分组数或规则数超最大限额,则该会员不被操作,同时不影响其余会员操作,接口调用依然返回成功。每个规则可以应用到多个分组,一个用户的规则上限为5个。] variable[request] assign[=] call[name[TOPRequest], parameter[constant[taobao.crm.rule.add]]] call[name[request]][constant[rule_name]] assign[=] name[rule_name] for taget[tuple[[<ast.Name object at 0x7da1b2614e20>, <ast.Name object at 0x7da1b26175e0>]]] in starred[call[name[kwargs].iteritems, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da1b2614940> begin[:] continue call[name[request]][name[k]] assign[=] name[v] call[name[self].create, parameter[call[name[self].execute, parameter[name[request], name[session]]]]] return[tuple[[<ast.Attribute object at 0x7da1b2593d00>, <ast.Attribute object at 0x7da1b2592b90>]]]
keyword[def] identifier[add] ( identifier[self] , identifier[rule_name] , identifier[session] ,** identifier[kwargs] ): literal[string] identifier[request] = identifier[TOPRequest] ( literal[string] ) identifier[request] [ literal[string] ]= identifier[rule_name] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[iteritems] (): keyword[if] identifier[k] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ) keyword[and] identifier[v] == keyword[None] : keyword[continue] identifier[request] [ identifier[k] ]= identifier[v] identifier[self] . identifier[create] ( identifier[self] . identifier[execute] ( identifier[request] , identifier[session] )) keyword[return] identifier[self] . identifier[is_success] , identifier[self] . identifier[rule_id]
def add(self, rule_name, session, **kwargs): """taobao.crm.rule.add 分组规则添加 添加分组规则,规则可用于筛选一定条件的会员。过滤条件可以选择客户来源、会员级别 、交易笔数、交易额、上次交易时间、平均客单价、宝贝件数、省份、关闭交易数等,新建规则时必须至少选择一个以上筛选条件。如果输入的规则的筛选条件不正确则不会进行处理,可以将某些分组挂在这个规则下,对被挂在该规则下的分组,系统对现有满足规则的客户都划分到这个分组(异步任务),若某些会员分组数或规则数超最大限额,则该会员不被操作,同时不影响其余会员操作,接口调用依然返回成功。每个规则可以应用到多个分组,一个用户的规则上限为5个。""" request = TOPRequest('taobao.crm.rule.add') request['rule_name'] = rule_name for (k, v) in kwargs.iteritems(): if k not in ('relation_source', 'grade', 'min_trade_amount', 'max_trade_amount', 'min_trade_count', 'max_trade_count', 'min_last_trade_time', 'max_last_trade_time', 'min_item_num', 'min_avg_price', 'min_close_trade_num', 'province', 'group_ids', 'max_avg_price', 'max_item_num', 'max_close_trade_num') and v == None: continue # depends on [control=['if'], data=[]] request[k] = v # depends on [control=['for'], data=[]] self.create(self.execute(request, session)) return (self.is_success, self.rule_id)
def _action_enabled(self, event, action): """Check if an action for a notification is enabled.""" event_actions = self._aconfig.get(event) if event_actions is None: return True if event_actions is False: return False return action in event_actions
def function[_action_enabled, parameter[self, event, action]]: constant[Check if an action for a notification is enabled.] variable[event_actions] assign[=] call[name[self]._aconfig.get, parameter[name[event]]] if compare[name[event_actions] is constant[None]] begin[:] return[constant[True]] if compare[name[event_actions] is constant[False]] begin[:] return[constant[False]] return[compare[name[action] in name[event_actions]]]
keyword[def] identifier[_action_enabled] ( identifier[self] , identifier[event] , identifier[action] ): literal[string] identifier[event_actions] = identifier[self] . identifier[_aconfig] . identifier[get] ( identifier[event] ) keyword[if] identifier[event_actions] keyword[is] keyword[None] : keyword[return] keyword[True] keyword[if] identifier[event_actions] keyword[is] keyword[False] : keyword[return] keyword[False] keyword[return] identifier[action] keyword[in] identifier[event_actions]
def _action_enabled(self, event, action): """Check if an action for a notification is enabled.""" event_actions = self._aconfig.get(event) if event_actions is None: return True # depends on [control=['if'], data=[]] if event_actions is False: return False # depends on [control=['if'], data=[]] return action in event_actions
def detach(self): """ Detach the underlying LLVM resource without disposing of it. """ if not self._closed: del self._as_parameter_ self._closed = True self._ptr = None
def function[detach, parameter[self]]: constant[ Detach the underlying LLVM resource without disposing of it. ] if <ast.UnaryOp object at 0x7da1b1950820> begin[:] <ast.Delete object at 0x7da1b1950f10> name[self]._closed assign[=] constant[True] name[self]._ptr assign[=] constant[None]
keyword[def] identifier[detach] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_closed] : keyword[del] identifier[self] . identifier[_as_parameter_] identifier[self] . identifier[_closed] = keyword[True] identifier[self] . identifier[_ptr] = keyword[None]
def detach(self): """ Detach the underlying LLVM resource without disposing of it. """ if not self._closed: del self._as_parameter_ self._closed = True self._ptr = None # depends on [control=['if'], data=[]]
def get_page_properties(self, page_id): """ Get the page (content) properties :param page_id: content_id format :return: get properties """ url = 'rest/api/content/{page_id}/property'.format(page_id=page_id) return self.get(path=url)
def function[get_page_properties, parameter[self, page_id]]: constant[ Get the page (content) properties :param page_id: content_id format :return: get properties ] variable[url] assign[=] call[constant[rest/api/content/{page_id}/property].format, parameter[]] return[call[name[self].get, parameter[]]]
keyword[def] identifier[get_page_properties] ( identifier[self] , identifier[page_id] ): literal[string] identifier[url] = literal[string] . identifier[format] ( identifier[page_id] = identifier[page_id] ) keyword[return] identifier[self] . identifier[get] ( identifier[path] = identifier[url] )
def get_page_properties(self, page_id): """ Get the page (content) properties :param page_id: content_id format :return: get properties """ url = 'rest/api/content/{page_id}/property'.format(page_id=page_id) return self.get(path=url)
def teardown(self): """ Stop and remove the container if it exists. """ while self._http_clients: self._http_clients.pop().close() if self.created: self.halt()
def function[teardown, parameter[self]]: constant[ Stop and remove the container if it exists. ] while name[self]._http_clients begin[:] call[call[name[self]._http_clients.pop, parameter[]].close, parameter[]] if name[self].created begin[:] call[name[self].halt, parameter[]]
keyword[def] identifier[teardown] ( identifier[self] ): literal[string] keyword[while] identifier[self] . identifier[_http_clients] : identifier[self] . identifier[_http_clients] . identifier[pop] (). identifier[close] () keyword[if] identifier[self] . identifier[created] : identifier[self] . identifier[halt] ()
def teardown(self): """ Stop and remove the container if it exists. """ while self._http_clients: self._http_clients.pop().close() # depends on [control=['while'], data=[]] if self.created: self.halt() # depends on [control=['if'], data=[]]
def show_status(self): """Show status of unregistered migrations""" if not self.check_directory(): return migrations = self.get_unregistered_migrations() if migrations: logger.info('Unregistered migrations:') for migration in migrations: logger.info(migration.filename) else: logger.info(self.NO_MIGRATIONS_MSG)
def function[show_status, parameter[self]]: constant[Show status of unregistered migrations] if <ast.UnaryOp object at 0x7da18dc9b0a0> begin[:] return[None] variable[migrations] assign[=] call[name[self].get_unregistered_migrations, parameter[]] if name[migrations] begin[:] call[name[logger].info, parameter[constant[Unregistered migrations:]]] for taget[name[migration]] in starred[name[migrations]] begin[:] call[name[logger].info, parameter[name[migration].filename]]
keyword[def] identifier[show_status] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[check_directory] (): keyword[return] identifier[migrations] = identifier[self] . identifier[get_unregistered_migrations] () keyword[if] identifier[migrations] : identifier[logger] . identifier[info] ( literal[string] ) keyword[for] identifier[migration] keyword[in] identifier[migrations] : identifier[logger] . identifier[info] ( identifier[migration] . identifier[filename] ) keyword[else] : identifier[logger] . identifier[info] ( identifier[self] . identifier[NO_MIGRATIONS_MSG] )
def show_status(self): """Show status of unregistered migrations""" if not self.check_directory(): return # depends on [control=['if'], data=[]] migrations = self.get_unregistered_migrations() if migrations: logger.info('Unregistered migrations:') for migration in migrations: logger.info(migration.filename) # depends on [control=['for'], data=['migration']] # depends on [control=['if'], data=[]] else: logger.info(self.NO_MIGRATIONS_MSG)
def stem(self, word, early_english=False): """Return the Porter2 (Snowball English) stem. Parameters ---------- word : str The word to stem early_english : bool Set to True in order to remove -eth & -est (2nd & 3rd person singular verbal agreement suffixes) Returns ------- str Word stem Examples -------- >>> stmr = Porter2() >>> stmr.stem('reading') 'read' >>> stmr.stem('suspension') 'suspens' >>> stmr.stem('elusiveness') 'elus' >>> stmr.stem('eateth', early_english=True) 'eat' """ # lowercase, normalize, and compose word = normalize('NFC', text_type(word.lower())) # replace apostrophe-like characters with U+0027, per # http://snowball.tartarus.org/texts/apostrophe.html word = word.replace('’', '\'') word = word.replace('’', '\'') # Exceptions 1 if word in self._exception1dict: return self._exception1dict[word] elif word in self._exception1set: return word # Return word if stem is shorter than 3 if len(word) < 3: return word # Remove initial ', if present. while word and word[0] == '\'': word = word[1:] # Return word if stem is shorter than 2 if len(word) < 2: return word # Re-map vocalic Y to y (Y will be C, y will be V) if word[0] == 'y': word = 'Y' + word[1:] for i in range(1, len(word)): if word[i] == 'y' and word[i - 1] in self._vowels: word = word[:i] + 'Y' + word[i + 1 :] r1_start = self._sb_r1(word, self._r1_prefixes) r2_start = self._sb_r2(word, self._r1_prefixes) # Step 0 if word[-3:] == '\'s\'': word = word[:-3] elif word[-2:] == '\'s': word = word[:-2] elif word[-1:] == '\'': word = word[:-1] # Return word if stem is shorter than 2 if len(word) < 3: return word # Step 1a if word[-4:] == 'sses': word = word[:-2] elif word[-3:] in {'ied', 'ies'}: if len(word) > 4: word = word[:-2] else: word = word[:-1] elif word[-2:] in {'us', 'ss'}: pass elif word[-1] == 's': if self._sb_has_vowel(word[:-2]): word = word[:-1] # Exceptions 2 if word in self._exception2set: return word # Step 1b step1b_flag = False if word[-5:] == 'eedly': if len(word[r1_start:]) >= 5: word = word[:-3] elif word[-5:] == 'ingly': if self._sb_has_vowel(word[:-5]): word = word[:-5] step1b_flag = True elif word[-4:] == 'edly': if self._sb_has_vowel(word[:-4]): word = word[:-4] step1b_flag = True elif word[-3:] == 'eed': if len(word[r1_start:]) >= 3: word = word[:-1] elif word[-3:] == 'ing': if self._sb_has_vowel(word[:-3]): word = word[:-3] step1b_flag = True elif word[-2:] == 'ed': if self._sb_has_vowel(word[:-2]): word = word[:-2] step1b_flag = True elif early_english: if word[-3:] == 'est': if self._sb_has_vowel(word[:-3]): word = word[:-3] step1b_flag = True elif word[-3:] == 'eth': if self._sb_has_vowel(word[:-3]): word = word[:-3] step1b_flag = True if step1b_flag: if word[-2:] in {'at', 'bl', 'iz'}: word += 'e' elif word[-2:] in self._doubles: word = word[:-1] elif self._sb_short_word(word, self._r1_prefixes): word += 'e' # Step 1c if ( len(word) > 2 and word[-1] in {'Y', 'y'} and word[-2] not in self._vowels ): word = word[:-1] + 'i' # Step 2 if word[-2] == 'a': if word[-7:] == 'ational': if len(word[r1_start:]) >= 7: word = word[:-5] + 'e' elif word[-6:] == 'tional': if len(word[r1_start:]) >= 6: word = word[:-2] elif word[-2] == 'c': if word[-4:] in {'enci', 'anci'}: if len(word[r1_start:]) >= 4: word = word[:-1] + 'e' elif word[-2] == 'e': if word[-4:] == 'izer': if len(word[r1_start:]) >= 4: word = word[:-1] elif word[-2] == 'g': if word[-3:] == 'ogi': if ( r1_start >= 1 and len(word[r1_start:]) >= 3 and word[-4] == 'l' ): word = word[:-1] elif word[-2] == 'l': if word[-6:] == 'lessli': if len(word[r1_start:]) >= 6: word = word[:-2] elif word[-5:] in {'entli', 'fulli', 'ousli'}: if len(word[r1_start:]) >= 5: word = word[:-2] elif word[-4:] == 'abli': if len(word[r1_start:]) >= 4: word = word[:-1] + 'e' elif word[-4:] == 'alli': if len(word[r1_start:]) >= 4: word = word[:-2] elif word[-3:] == 'bli': if len(word[r1_start:]) >= 3: word = word[:-1] + 'e' elif word[-2:] == 'li': if ( r1_start >= 1 and len(word[r1_start:]) >= 2 and word[-3] in self._li ): word = word[:-2] elif word[-2] == 'o': if word[-7:] == 'ization': if len(word[r1_start:]) >= 7: word = word[:-5] + 'e' elif word[-5:] == 'ation': if len(word[r1_start:]) >= 5: word = word[:-3] + 'e' elif word[-4:] == 'ator': if len(word[r1_start:]) >= 4: word = word[:-2] + 'e' elif word[-2] == 's': if word[-7:] in {'fulness', 'ousness', 'iveness'}: if len(word[r1_start:]) >= 7: word = word[:-4] elif word[-5:] == 'alism': if len(word[r1_start:]) >= 5: word = word[:-3] elif word[-2] == 't': if word[-6:] == 'biliti': if len(word[r1_start:]) >= 6: word = word[:-5] + 'le' elif word[-5:] == 'aliti': if len(word[r1_start:]) >= 5: word = word[:-3] elif word[-5:] == 'iviti': if len(word[r1_start:]) >= 5: word = word[:-3] + 'e' # Step 3 if word[-7:] == 'ational': if len(word[r1_start:]) >= 7: word = word[:-5] + 'e' elif word[-6:] == 'tional': if len(word[r1_start:]) >= 6: word = word[:-2] elif word[-5:] in {'alize', 'icate', 'iciti'}: if len(word[r1_start:]) >= 5: word = word[:-3] elif word[-5:] == 'ative': if len(word[r2_start:]) >= 5: word = word[:-5] elif word[-4:] == 'ical': if len(word[r1_start:]) >= 4: word = word[:-2] elif word[-4:] == 'ness': if len(word[r1_start:]) >= 4: word = word[:-4] elif word[-3:] == 'ful': if len(word[r1_start:]) >= 3: word = word[:-3] # Step 4 for suffix in ( 'ement', 'ance', 'ence', 'able', 'ible', 'ment', 'ant', 'ent', 'ism', 'ate', 'iti', 'ous', 'ive', 'ize', 'al', 'er', 'ic', ): if word[-len(suffix) :] == suffix: if len(word[r2_start:]) >= len(suffix): word = word[: -len(suffix)] break else: if word[-3:] == 'ion': if ( len(word[r2_start:]) >= 3 and len(word) >= 4 and word[-4] in tuple('st') ): word = word[:-3] # Step 5 if word[-1] == 'e': if len(word[r2_start:]) >= 1 or ( len(word[r1_start:]) >= 1 and not self._sb_ends_in_short_syllable(word[:-1]) ): word = word[:-1] elif word[-1] == 'l': if len(word[r2_start:]) >= 1 and word[-2] == 'l': word = word[:-1] # Change 'Y' back to 'y' if it survived stemming for i in range(0, len(word)): if word[i] == 'Y': word = word[:i] + 'y' + word[i + 1 :] return word
def function[stem, parameter[self, word, early_english]]: constant[Return the Porter2 (Snowball English) stem. Parameters ---------- word : str The word to stem early_english : bool Set to True in order to remove -eth & -est (2nd & 3rd person singular verbal agreement suffixes) Returns ------- str Word stem Examples -------- >>> stmr = Porter2() >>> stmr.stem('reading') 'read' >>> stmr.stem('suspension') 'suspens' >>> stmr.stem('elusiveness') 'elus' >>> stmr.stem('eateth', early_english=True) 'eat' ] variable[word] assign[=] call[name[normalize], parameter[constant[NFC], call[name[text_type], parameter[call[name[word].lower, parameter[]]]]]] variable[word] assign[=] call[name[word].replace, parameter[constant[’], constant[']]] variable[word] assign[=] call[name[word].replace, parameter[constant[’], constant[']]] if compare[name[word] in name[self]._exception1dict] begin[:] return[call[name[self]._exception1dict][name[word]]] if compare[call[name[len], parameter[name[word]]] less[<] constant[3]] begin[:] return[name[word]] while <ast.BoolOp object at 0x7da1b0023400> begin[:] variable[word] assign[=] call[name[word]][<ast.Slice object at 0x7da1b00231f0>] if compare[call[name[len], parameter[name[word]]] less[<] constant[2]] begin[:] return[name[word]] if compare[call[name[word]][constant[0]] equal[==] constant[y]] begin[:] variable[word] assign[=] binary_operation[constant[Y] + call[name[word]][<ast.Slice object at 0x7da1b0022da0>]] for taget[name[i]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[word]]]]]] begin[:] if <ast.BoolOp object at 0x7da1b0022b90> begin[:] variable[word] assign[=] binary_operation[binary_operation[call[name[word]][<ast.Slice object at 0x7da20e9b3b50>] + constant[Y]] + call[name[word]][<ast.Slice object at 0x7da1b01c6380>]] variable[r1_start] assign[=] call[name[self]._sb_r1, parameter[name[word], name[self]._r1_prefixes]] variable[r2_start] assign[=] call[name[self]._sb_r2, parameter[name[word], name[self]._r1_prefixes]] if compare[call[name[word]][<ast.Slice object at 0x7da1b01c5a80>] equal[==] constant['s']] begin[:] variable[word] assign[=] call[name[word]][<ast.Slice object at 0x7da1b01c42e0>] if compare[call[name[len], parameter[name[word]]] less[<] constant[3]] begin[:] return[name[word]] if compare[call[name[word]][<ast.Slice object at 0x7da1b00223e0>] equal[==] constant[sses]] begin[:] variable[word] assign[=] call[name[word]][<ast.Slice object at 0x7da1b0022260>] if compare[name[word] in name[self]._exception2set] begin[:] return[name[word]] variable[step1b_flag] assign[=] constant[False] if compare[call[name[word]][<ast.Slice object at 0x7da1b0142980>] equal[==] constant[eedly]] begin[:] if compare[call[name[len], parameter[call[name[word]][<ast.Slice object at 0x7da1b0142b60>]]] greater_or_equal[>=] constant[5]] begin[:] variable[word] assign[=] call[name[word]][<ast.Slice object at 0x7da1b0142cb0>] if name[step1b_flag] begin[:] if compare[call[name[word]][<ast.Slice object at 0x7da1b011bee0>] in <ast.Set object at 0x7da1b011ba90>] begin[:] <ast.AugAssign object at 0x7da1b011bbb0> if <ast.BoolOp object at 0x7da1b011b3a0> begin[:] variable[word] assign[=] binary_operation[call[name[word]][<ast.Slice object at 0x7da1b011b4f0>] + constant[i]] if compare[call[name[word]][<ast.UnaryOp object at 0x7da1b011b6a0>] equal[==] constant[a]] begin[:] if compare[call[name[word]][<ast.Slice object at 0x7da1b011a3b0>] equal[==] constant[ational]] begin[:] if compare[call[name[len], parameter[call[name[word]][<ast.Slice object at 0x7da1b011a320>]]] greater_or_equal[>=] constant[7]] begin[:] variable[word] assign[=] binary_operation[call[name[word]][<ast.Slice object at 0x7da1b011a260>] + constant[e]] if compare[call[name[word]][<ast.Slice object at 0x7da1b0196290>] equal[==] constant[ational]] begin[:] if compare[call[name[len], parameter[call[name[word]][<ast.Slice object at 0x7da1b0197520>]]] greater_or_equal[>=] constant[7]] begin[:] variable[word] assign[=] binary_operation[call[name[word]][<ast.Slice object at 0x7da1b0197700>] + constant[e]] for taget[name[suffix]] in starred[tuple[[<ast.Constant object at 0x7da1b0195d80>, <ast.Constant object at 0x7da1b0195d50>, <ast.Constant object at 0x7da1b0195db0>, <ast.Constant object at 0x7da1b0195de0>, <ast.Constant object at 0x7da1b0195d20>, <ast.Constant object at 0x7da1b0195ae0>, <ast.Constant object at 0x7da1b0195ab0>, <ast.Constant object at 0x7da1b01969b0>, <ast.Constant object at 0x7da1b0196a10>, <ast.Constant object at 0x7da1b0195a80>, <ast.Constant object at 0x7da204565c30>, <ast.Constant object at 0x7da204564100>, <ast.Constant object at 0x7da204566f20>, <ast.Constant object at 0x7da204566080>, <ast.Constant object at 0x7da2045669e0>, <ast.Constant object at 0x7da204567a90>, <ast.Constant object at 0x7da204564910>]]] begin[:] if compare[call[name[word]][<ast.Slice object at 0x7da2045650c0>] equal[==] name[suffix]] begin[:] if compare[call[name[len], parameter[call[name[word]][<ast.Slice object at 0x7da204565cf0>]]] greater_or_equal[>=] call[name[len], parameter[name[suffix]]]] begin[:] variable[word] assign[=] call[name[word]][<ast.Slice object at 0x7da204567dc0>] break if compare[call[name[word]][<ast.UnaryOp object at 0x7da204566920>] equal[==] constant[e]] begin[:] if <ast.BoolOp object at 0x7da204565060> begin[:] variable[word] assign[=] call[name[word]][<ast.Slice object at 0x7da1b01c6080>] for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[word]]]]]] begin[:] if compare[call[name[word]][name[i]] equal[==] constant[Y]] begin[:] variable[word] assign[=] binary_operation[binary_operation[call[name[word]][<ast.Slice object at 0x7da1b01c7100>] + constant[y]] + call[name[word]][<ast.Slice object at 0x7da1b01c7190>]] return[name[word]]
keyword[def] identifier[stem] ( identifier[self] , identifier[word] , identifier[early_english] = keyword[False] ): literal[string] identifier[word] = identifier[normalize] ( literal[string] , identifier[text_type] ( identifier[word] . identifier[lower] ())) identifier[word] = identifier[word] . identifier[replace] ( literal[string] , literal[string] ) identifier[word] = identifier[word] . identifier[replace] ( literal[string] , literal[string] ) keyword[if] identifier[word] keyword[in] identifier[self] . identifier[_exception1dict] : keyword[return] identifier[self] . identifier[_exception1dict] [ identifier[word] ] keyword[elif] identifier[word] keyword[in] identifier[self] . identifier[_exception1set] : keyword[return] identifier[word] keyword[if] identifier[len] ( identifier[word] )< literal[int] : keyword[return] identifier[word] keyword[while] identifier[word] keyword[and] identifier[word] [ literal[int] ]== literal[string] : identifier[word] = identifier[word] [ literal[int] :] keyword[if] identifier[len] ( identifier[word] )< literal[int] : keyword[return] identifier[word] keyword[if] identifier[word] [ literal[int] ]== literal[string] : identifier[word] = literal[string] + identifier[word] [ literal[int] :] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[word] )): keyword[if] identifier[word] [ identifier[i] ]== literal[string] keyword[and] identifier[word] [ identifier[i] - literal[int] ] keyword[in] identifier[self] . identifier[_vowels] : identifier[word] = identifier[word] [: identifier[i] ]+ literal[string] + identifier[word] [ identifier[i] + literal[int] :] identifier[r1_start] = identifier[self] . identifier[_sb_r1] ( identifier[word] , identifier[self] . identifier[_r1_prefixes] ) identifier[r2_start] = identifier[self] . identifier[_sb_r2] ( identifier[word] , identifier[self] . identifier[_r1_prefixes] ) keyword[if] identifier[word] [- literal[int] :]== literal[string] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : identifier[word] = identifier[word] [:- literal[int] ] keyword[if] identifier[len] ( identifier[word] )< literal[int] : keyword[return] identifier[word] keyword[if] identifier[word] [- literal[int] :]== literal[string] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :] keyword[in] { literal[string] , literal[string] }: keyword[if] identifier[len] ( identifier[word] )> literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[else] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :] keyword[in] { literal[string] , literal[string] }: keyword[pass] keyword[elif] identifier[word] [- literal[int] ]== literal[string] : keyword[if] identifier[self] . identifier[_sb_has_vowel] ( identifier[word] [:- literal[int] ]): identifier[word] = identifier[word] [:- literal[int] ] keyword[if] identifier[word] keyword[in] identifier[self] . identifier[_exception2set] : keyword[return] identifier[word] identifier[step1b_flag] = keyword[False] keyword[if] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[self] . identifier[_sb_has_vowel] ( identifier[word] [:- literal[int] ]): identifier[word] = identifier[word] [:- literal[int] ] identifier[step1b_flag] = keyword[True] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[self] . identifier[_sb_has_vowel] ( identifier[word] [:- literal[int] ]): identifier[word] = identifier[word] [:- literal[int] ] identifier[step1b_flag] = keyword[True] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[self] . identifier[_sb_has_vowel] ( identifier[word] [:- literal[int] ]): identifier[word] = identifier[word] [:- literal[int] ] identifier[step1b_flag] = keyword[True] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[self] . identifier[_sb_has_vowel] ( identifier[word] [:- literal[int] ]): identifier[word] = identifier[word] [:- literal[int] ] identifier[step1b_flag] = keyword[True] keyword[elif] identifier[early_english] : keyword[if] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[self] . identifier[_sb_has_vowel] ( identifier[word] [:- literal[int] ]): identifier[word] = identifier[word] [:- literal[int] ] identifier[step1b_flag] = keyword[True] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[self] . identifier[_sb_has_vowel] ( identifier[word] [:- literal[int] ]): identifier[word] = identifier[word] [:- literal[int] ] identifier[step1b_flag] = keyword[True] keyword[if] identifier[step1b_flag] : keyword[if] identifier[word] [- literal[int] :] keyword[in] { literal[string] , literal[string] , literal[string] }: identifier[word] += literal[string] keyword[elif] identifier[word] [- literal[int] :] keyword[in] identifier[self] . identifier[_doubles] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[self] . identifier[_sb_short_word] ( identifier[word] , identifier[self] . identifier[_r1_prefixes] ): identifier[word] += literal[string] keyword[if] ( identifier[len] ( identifier[word] )> literal[int] keyword[and] identifier[word] [- literal[int] ] keyword[in] { literal[string] , literal[string] } keyword[and] identifier[word] [- literal[int] ] keyword[not] keyword[in] identifier[self] . identifier[_vowels] ): identifier[word] = identifier[word] [:- literal[int] ]+ literal[string] keyword[if] identifier[word] [- literal[int] ]== literal[string] : keyword[if] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ]+ literal[string] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] ]== literal[string] : keyword[if] identifier[word] [- literal[int] :] keyword[in] { literal[string] , literal[string] }: keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ]+ literal[string] keyword[elif] identifier[word] [- literal[int] ]== literal[string] : keyword[if] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] ]== literal[string] : keyword[if] identifier[word] [- literal[int] :]== literal[string] : keyword[if] ( identifier[r1_start] >= literal[int] keyword[and] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] keyword[and] identifier[word] [- literal[int] ]== literal[string] ): identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] ]== literal[string] : keyword[if] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :] keyword[in] { literal[string] , literal[string] , literal[string] }: keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ]+ literal[string] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ]+ literal[string] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] ( identifier[r1_start] >= literal[int] keyword[and] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] keyword[and] identifier[word] [- literal[int] ] keyword[in] identifier[self] . identifier[_li] ): identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] ]== literal[string] : keyword[if] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ]+ literal[string] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ]+ literal[string] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ]+ literal[string] keyword[elif] identifier[word] [- literal[int] ]== literal[string] : keyword[if] identifier[word] [- literal[int] :] keyword[in] { literal[string] , literal[string] , literal[string] }: keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] ]== literal[string] : keyword[if] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ]+ literal[string] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ]+ literal[string] keyword[if] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ]+ literal[string] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :] keyword[in] { literal[string] , literal[string] , literal[string] }: keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r2_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] :]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] : identifier[word] = identifier[word] [:- literal[int] ] keyword[for] identifier[suffix] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , ): keyword[if] identifier[word] [- identifier[len] ( identifier[suffix] ):]== identifier[suffix] : keyword[if] identifier[len] ( identifier[word] [ identifier[r2_start] :])>= identifier[len] ( identifier[suffix] ): identifier[word] = identifier[word] [:- identifier[len] ( identifier[suffix] )] keyword[break] keyword[else] : keyword[if] identifier[word] [- literal[int] :]== literal[string] : keyword[if] ( identifier[len] ( identifier[word] [ identifier[r2_start] :])>= literal[int] keyword[and] identifier[len] ( identifier[word] )>= literal[int] keyword[and] identifier[word] [- literal[int] ] keyword[in] identifier[tuple] ( literal[string] ) ): identifier[word] = identifier[word] [:- literal[int] ] keyword[if] identifier[word] [- literal[int] ]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r2_start] :])>= literal[int] keyword[or] ( identifier[len] ( identifier[word] [ identifier[r1_start] :])>= literal[int] keyword[and] keyword[not] identifier[self] . identifier[_sb_ends_in_short_syllable] ( identifier[word] [:- literal[int] ]) ): identifier[word] = identifier[word] [:- literal[int] ] keyword[elif] identifier[word] [- literal[int] ]== literal[string] : keyword[if] identifier[len] ( identifier[word] [ identifier[r2_start] :])>= literal[int] keyword[and] identifier[word] [- literal[int] ]== literal[string] : identifier[word] = identifier[word] [:- literal[int] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[word] )): keyword[if] identifier[word] [ identifier[i] ]== literal[string] : identifier[word] = identifier[word] [: identifier[i] ]+ literal[string] + identifier[word] [ identifier[i] + literal[int] :] keyword[return] identifier[word]
def stem(self, word, early_english=False): """Return the Porter2 (Snowball English) stem. Parameters ---------- word : str The word to stem early_english : bool Set to True in order to remove -eth & -est (2nd & 3rd person singular verbal agreement suffixes) Returns ------- str Word stem Examples -------- >>> stmr = Porter2() >>> stmr.stem('reading') 'read' >>> stmr.stem('suspension') 'suspens' >>> stmr.stem('elusiveness') 'elus' >>> stmr.stem('eateth', early_english=True) 'eat' """ # lowercase, normalize, and compose word = normalize('NFC', text_type(word.lower())) # replace apostrophe-like characters with U+0027, per # http://snowball.tartarus.org/texts/apostrophe.html word = word.replace('’', "'") word = word.replace('’', "'") # Exceptions 1 if word in self._exception1dict: return self._exception1dict[word] # depends on [control=['if'], data=['word']] elif word in self._exception1set: return word # depends on [control=['if'], data=['word']] # Return word if stem is shorter than 3 if len(word) < 3: return word # depends on [control=['if'], data=[]] # Remove initial ', if present. while word and word[0] == "'": word = word[1:] # Return word if stem is shorter than 2 if len(word) < 2: return word # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # Re-map vocalic Y to y (Y will be C, y will be V) if word[0] == 'y': word = 'Y' + word[1:] # depends on [control=['if'], data=[]] for i in range(1, len(word)): if word[i] == 'y' and word[i - 1] in self._vowels: word = word[:i] + 'Y' + word[i + 1:] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] r1_start = self._sb_r1(word, self._r1_prefixes) r2_start = self._sb_r2(word, self._r1_prefixes) # Step 0 if word[-3:] == "'s'": word = word[:-3] # depends on [control=['if'], data=[]] elif word[-2:] == "'s": word = word[:-2] # depends on [control=['if'], data=[]] elif word[-1:] == "'": word = word[:-1] # depends on [control=['if'], data=[]] # Return word if stem is shorter than 2 if len(word) < 3: return word # depends on [control=['if'], data=[]] # Step 1a if word[-4:] == 'sses': word = word[:-2] # depends on [control=['if'], data=[]] elif word[-3:] in {'ied', 'ies'}: if len(word) > 4: word = word[:-2] # depends on [control=['if'], data=[]] else: word = word[:-1] # depends on [control=['if'], data=[]] elif word[-2:] in {'us', 'ss'}: pass # depends on [control=['if'], data=[]] elif word[-1] == 's': if self._sb_has_vowel(word[:-2]): word = word[:-1] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Exceptions 2 if word in self._exception2set: return word # depends on [control=['if'], data=['word']] # Step 1b step1b_flag = False if word[-5:] == 'eedly': if len(word[r1_start:]) >= 5: word = word[:-3] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-5:] == 'ingly': if self._sb_has_vowel(word[:-5]): word = word[:-5] step1b_flag = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-4:] == 'edly': if self._sb_has_vowel(word[:-4]): word = word[:-4] step1b_flag = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-3:] == 'eed': if len(word[r1_start:]) >= 3: word = word[:-1] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-3:] == 'ing': if self._sb_has_vowel(word[:-3]): word = word[:-3] step1b_flag = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-2:] == 'ed': if self._sb_has_vowel(word[:-2]): word = word[:-2] step1b_flag = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif early_english: if word[-3:] == 'est': if self._sb_has_vowel(word[:-3]): word = word[:-3] step1b_flag = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-3:] == 'eth': if self._sb_has_vowel(word[:-3]): word = word[:-3] step1b_flag = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if step1b_flag: if word[-2:] in {'at', 'bl', 'iz'}: word += 'e' # depends on [control=['if'], data=[]] elif word[-2:] in self._doubles: word = word[:-1] # depends on [control=['if'], data=[]] elif self._sb_short_word(word, self._r1_prefixes): word += 'e' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Step 1c if len(word) > 2 and word[-1] in {'Y', 'y'} and (word[-2] not in self._vowels): word = word[:-1] + 'i' # depends on [control=['if'], data=[]] # Step 2 if word[-2] == 'a': if word[-7:] == 'ational': if len(word[r1_start:]) >= 7: word = word[:-5] + 'e' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-6:] == 'tional': if len(word[r1_start:]) >= 6: word = word[:-2] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-2] == 'c': if word[-4:] in {'enci', 'anci'}: if len(word[r1_start:]) >= 4: word = word[:-1] + 'e' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-2] == 'e': if word[-4:] == 'izer': if len(word[r1_start:]) >= 4: word = word[:-1] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-2] == 'g': if word[-3:] == 'ogi': if r1_start >= 1 and len(word[r1_start:]) >= 3 and (word[-4] == 'l'): word = word[:-1] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-2] == 'l': if word[-6:] == 'lessli': if len(word[r1_start:]) >= 6: word = word[:-2] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-5:] in {'entli', 'fulli', 'ousli'}: if len(word[r1_start:]) >= 5: word = word[:-2] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-4:] == 'abli': if len(word[r1_start:]) >= 4: word = word[:-1] + 'e' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-4:] == 'alli': if len(word[r1_start:]) >= 4: word = word[:-2] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-3:] == 'bli': if len(word[r1_start:]) >= 3: word = word[:-1] + 'e' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-2:] == 'li': if r1_start >= 1 and len(word[r1_start:]) >= 2 and (word[-3] in self._li): word = word[:-2] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-2] == 'o': if word[-7:] == 'ization': if len(word[r1_start:]) >= 7: word = word[:-5] + 'e' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-5:] == 'ation': if len(word[r1_start:]) >= 5: word = word[:-3] + 'e' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-4:] == 'ator': if len(word[r1_start:]) >= 4: word = word[:-2] + 'e' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-2] == 's': if word[-7:] in {'fulness', 'ousness', 'iveness'}: if len(word[r1_start:]) >= 7: word = word[:-4] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-5:] == 'alism': if len(word[r1_start:]) >= 5: word = word[:-3] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-2] == 't': if word[-6:] == 'biliti': if len(word[r1_start:]) >= 6: word = word[:-5] + 'le' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-5:] == 'aliti': if len(word[r1_start:]) >= 5: word = word[:-3] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-5:] == 'iviti': if len(word[r1_start:]) >= 5: word = word[:-3] + 'e' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Step 3 if word[-7:] == 'ational': if len(word[r1_start:]) >= 7: word = word[:-5] + 'e' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-6:] == 'tional': if len(word[r1_start:]) >= 6: word = word[:-2] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-5:] in {'alize', 'icate', 'iciti'}: if len(word[r1_start:]) >= 5: word = word[:-3] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-5:] == 'ative': if len(word[r2_start:]) >= 5: word = word[:-5] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-4:] == 'ical': if len(word[r1_start:]) >= 4: word = word[:-2] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-4:] == 'ness': if len(word[r1_start:]) >= 4: word = word[:-4] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-3:] == 'ful': if len(word[r1_start:]) >= 3: word = word[:-3] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Step 4 for suffix in ('ement', 'ance', 'ence', 'able', 'ible', 'ment', 'ant', 'ent', 'ism', 'ate', 'iti', 'ous', 'ive', 'ize', 'al', 'er', 'ic'): if word[-len(suffix):] == suffix: if len(word[r2_start:]) >= len(suffix): word = word[:-len(suffix)] # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=['suffix']] # depends on [control=['for'], data=['suffix']] else: if word[-3:] == 'ion': if len(word[r2_start:]) >= 3 and len(word) >= 4 and (word[-4] in tuple('st')): word = word[:-3] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Step 5 if word[-1] == 'e': if len(word[r2_start:]) >= 1 or (len(word[r1_start:]) >= 1 and (not self._sb_ends_in_short_syllable(word[:-1]))): word = word[:-1] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif word[-1] == 'l': if len(word[r2_start:]) >= 1 and word[-2] == 'l': word = word[:-1] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Change 'Y' back to 'y' if it survived stemming for i in range(0, len(word)): if word[i] == 'Y': word = word[:i] + 'y' + word[i + 1:] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] return word
def build_container_vm(self, container, disk, zone="us-east1-b", tags=None, preemptible=True): """Build kwargs for a container VM. :param container: Container declaration. :type container: ``dict`` :param disk: Disk definition structure. :type disk: ``dict`` :param zone: The zone in which the instance should run. :type zone: ``str`` :param tags: Tags associated with the instance. :type tags: ``dict`` :param preemptible: Wether the instance is a preemtible or not. :type preemptible: ``bool`` """ if tags is None: tags = [] if container is None: raise ComputeEngineManagerException("Container declaration must not be None.") if disk is None: raise ComputeEngineManagerException("Disk structure must not be None.") return { 'ex_metadata': { "gce-container-declaration": container, "google-logging-enabled": "true" }, 'location': zone, 'ex_tags': tags, 'ex_disks_gce_struct': [disk], 'ex_preemptible': preemptible }
def function[build_container_vm, parameter[self, container, disk, zone, tags, preemptible]]: constant[Build kwargs for a container VM. :param container: Container declaration. :type container: ``dict`` :param disk: Disk definition structure. :type disk: ``dict`` :param zone: The zone in which the instance should run. :type zone: ``str`` :param tags: Tags associated with the instance. :type tags: ``dict`` :param preemptible: Wether the instance is a preemtible or not. :type preemptible: ``bool`` ] if compare[name[tags] is constant[None]] begin[:] variable[tags] assign[=] list[[]] if compare[name[container] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1395240> if compare[name[disk] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1395f90> return[dictionary[[<ast.Constant object at 0x7da1b1396e30>, <ast.Constant object at 0x7da1b1395ff0>, <ast.Constant object at 0x7da1b1395bd0>, <ast.Constant object at 0x7da1b1395e70>, <ast.Constant object at 0x7da1b1394f40>], [<ast.Dict object at 0x7da1b13948b0>, <ast.Name object at 0x7da1b1395f00>, <ast.Name object at 0x7da1b1396e00>, <ast.List object at 0x7da1b1395d20>, <ast.Name object at 0x7da1b13942b0>]]]
keyword[def] identifier[build_container_vm] ( identifier[self] , identifier[container] , identifier[disk] , identifier[zone] = literal[string] , identifier[tags] = keyword[None] , identifier[preemptible] = keyword[True] ): literal[string] keyword[if] identifier[tags] keyword[is] keyword[None] : identifier[tags] =[] keyword[if] identifier[container] keyword[is] keyword[None] : keyword[raise] identifier[ComputeEngineManagerException] ( literal[string] ) keyword[if] identifier[disk] keyword[is] keyword[None] : keyword[raise] identifier[ComputeEngineManagerException] ( literal[string] ) keyword[return] { literal[string] :{ literal[string] : identifier[container] , literal[string] : literal[string] }, literal[string] : identifier[zone] , literal[string] : identifier[tags] , literal[string] :[ identifier[disk] ], literal[string] : identifier[preemptible] }
def build_container_vm(self, container, disk, zone='us-east1-b', tags=None, preemptible=True): """Build kwargs for a container VM. :param container: Container declaration. :type container: ``dict`` :param disk: Disk definition structure. :type disk: ``dict`` :param zone: The zone in which the instance should run. :type zone: ``str`` :param tags: Tags associated with the instance. :type tags: ``dict`` :param preemptible: Wether the instance is a preemtible or not. :type preemptible: ``bool`` """ if tags is None: tags = [] # depends on [control=['if'], data=['tags']] if container is None: raise ComputeEngineManagerException('Container declaration must not be None.') # depends on [control=['if'], data=[]] if disk is None: raise ComputeEngineManagerException('Disk structure must not be None.') # depends on [control=['if'], data=[]] return {'ex_metadata': {'gce-container-declaration': container, 'google-logging-enabled': 'true'}, 'location': zone, 'ex_tags': tags, 'ex_disks_gce_struct': [disk], 'ex_preemptible': preemptible}
def sorted_query_paths(self): """ RETURN A LIST OF ALL SCHEMA'S IN DEPTH-FIRST TOPOLOGICAL ORDER """ return list(reversed(sorted(p[0] for p in self.namespace.alias_to_query_paths.get(self.name))))
def function[sorted_query_paths, parameter[self]]: constant[ RETURN A LIST OF ALL SCHEMA'S IN DEPTH-FIRST TOPOLOGICAL ORDER ] return[call[name[list], parameter[call[name[reversed], parameter[call[name[sorted], parameter[<ast.GeneratorExp object at 0x7da1b0b70b80>]]]]]]]
keyword[def] identifier[sorted_query_paths] ( identifier[self] ): literal[string] keyword[return] identifier[list] ( identifier[reversed] ( identifier[sorted] ( identifier[p] [ literal[int] ] keyword[for] identifier[p] keyword[in] identifier[self] . identifier[namespace] . identifier[alias_to_query_paths] . identifier[get] ( identifier[self] . identifier[name] ))))
def sorted_query_paths(self): """ RETURN A LIST OF ALL SCHEMA'S IN DEPTH-FIRST TOPOLOGICAL ORDER """ return list(reversed(sorted((p[0] for p in self.namespace.alias_to_query_paths.get(self.name)))))
def add_ephemeral_listener(self, callback, event_type=None): """Add a callback handler for ephemeral events going to this room. Args: callback (func(room, event)): Callback called when an ephemeral event arrives. event_type (str): The event_type to filter for. Returns: uuid.UUID: Unique id of the listener, can be used to identify the listener. """ listener_id = uuid4() self.ephemeral_listeners.append( { 'uid': listener_id, 'callback': callback, 'event_type': event_type } ) return listener_id
def function[add_ephemeral_listener, parameter[self, callback, event_type]]: constant[Add a callback handler for ephemeral events going to this room. Args: callback (func(room, event)): Callback called when an ephemeral event arrives. event_type (str): The event_type to filter for. Returns: uuid.UUID: Unique id of the listener, can be used to identify the listener. ] variable[listener_id] assign[=] call[name[uuid4], parameter[]] call[name[self].ephemeral_listeners.append, parameter[dictionary[[<ast.Constant object at 0x7da1b16c2e00>, <ast.Constant object at 0x7da1b16c1960>, <ast.Constant object at 0x7da1b16c22c0>], [<ast.Name object at 0x7da1b16c31f0>, <ast.Name object at 0x7da1b16c2b30>, <ast.Name object at 0x7da1b16c2cb0>]]]] return[name[listener_id]]
keyword[def] identifier[add_ephemeral_listener] ( identifier[self] , identifier[callback] , identifier[event_type] = keyword[None] ): literal[string] identifier[listener_id] = identifier[uuid4] () identifier[self] . identifier[ephemeral_listeners] . identifier[append] ( { literal[string] : identifier[listener_id] , literal[string] : identifier[callback] , literal[string] : identifier[event_type] } ) keyword[return] identifier[listener_id]
def add_ephemeral_listener(self, callback, event_type=None): """Add a callback handler for ephemeral events going to this room. Args: callback (func(room, event)): Callback called when an ephemeral event arrives. event_type (str): The event_type to filter for. Returns: uuid.UUID: Unique id of the listener, can be used to identify the listener. """ listener_id = uuid4() self.ephemeral_listeners.append({'uid': listener_id, 'callback': callback, 'event_type': event_type}) return listener_id
def encode_request(uuid, address, interrupt): """ Encode request into client_message""" client_message = ClientMessage(payload_size=calculate_size(uuid, address, interrupt)) client_message.set_message_type(REQUEST_TYPE) client_message.set_retryable(RETRYABLE) client_message.append_str(uuid) AddressCodec.encode(client_message, address) client_message.append_bool(interrupt) client_message.update_frame_length() return client_message
def function[encode_request, parameter[uuid, address, interrupt]]: constant[ Encode request into client_message] variable[client_message] assign[=] call[name[ClientMessage], parameter[]] call[name[client_message].set_message_type, parameter[name[REQUEST_TYPE]]] call[name[client_message].set_retryable, parameter[name[RETRYABLE]]] call[name[client_message].append_str, parameter[name[uuid]]] call[name[AddressCodec].encode, parameter[name[client_message], name[address]]] call[name[client_message].append_bool, parameter[name[interrupt]]] call[name[client_message].update_frame_length, parameter[]] return[name[client_message]]
keyword[def] identifier[encode_request] ( identifier[uuid] , identifier[address] , identifier[interrupt] ): literal[string] identifier[client_message] = identifier[ClientMessage] ( identifier[payload_size] = identifier[calculate_size] ( identifier[uuid] , identifier[address] , identifier[interrupt] )) identifier[client_message] . identifier[set_message_type] ( identifier[REQUEST_TYPE] ) identifier[client_message] . identifier[set_retryable] ( identifier[RETRYABLE] ) identifier[client_message] . identifier[append_str] ( identifier[uuid] ) identifier[AddressCodec] . identifier[encode] ( identifier[client_message] , identifier[address] ) identifier[client_message] . identifier[append_bool] ( identifier[interrupt] ) identifier[client_message] . identifier[update_frame_length] () keyword[return] identifier[client_message]
def encode_request(uuid, address, interrupt): """ Encode request into client_message""" client_message = ClientMessage(payload_size=calculate_size(uuid, address, interrupt)) client_message.set_message_type(REQUEST_TYPE) client_message.set_retryable(RETRYABLE) client_message.append_str(uuid) AddressCodec.encode(client_message, address) client_message.append_bool(interrupt) client_message.update_frame_length() return client_message
def delete(self, refobj): """Delete the content of the given refobj :param refobj: the refobj that represents the content that should be deleted :type refobj: refobj :returns: None :rtype: None :raises: None """ refobjinter = self.get_refobjinter() reference = refobjinter.get_reference(refobj) if reference: fullns = cmds.referenceQuery(reference, namespace=True) cmds.file(removeReference=True, referenceNode=reference) else: parentns = common.get_namespace(refobj) ns = cmds.getAttr("%s.namespace" % refobj) fullns = ":".join((parentns.rstrip(":"), ns.lstrip(":"))) cmds.namespace(removeNamespace=fullns, deleteNamespaceContent=True)
def function[delete, parameter[self, refobj]]: constant[Delete the content of the given refobj :param refobj: the refobj that represents the content that should be deleted :type refobj: refobj :returns: None :rtype: None :raises: None ] variable[refobjinter] assign[=] call[name[self].get_refobjinter, parameter[]] variable[reference] assign[=] call[name[refobjinter].get_reference, parameter[name[refobj]]] if name[reference] begin[:] variable[fullns] assign[=] call[name[cmds].referenceQuery, parameter[name[reference]]] call[name[cmds].file, parameter[]] call[name[cmds].namespace, parameter[]]
keyword[def] identifier[delete] ( identifier[self] , identifier[refobj] ): literal[string] identifier[refobjinter] = identifier[self] . identifier[get_refobjinter] () identifier[reference] = identifier[refobjinter] . identifier[get_reference] ( identifier[refobj] ) keyword[if] identifier[reference] : identifier[fullns] = identifier[cmds] . identifier[referenceQuery] ( identifier[reference] , identifier[namespace] = keyword[True] ) identifier[cmds] . identifier[file] ( identifier[removeReference] = keyword[True] , identifier[referenceNode] = identifier[reference] ) keyword[else] : identifier[parentns] = identifier[common] . identifier[get_namespace] ( identifier[refobj] ) identifier[ns] = identifier[cmds] . identifier[getAttr] ( literal[string] % identifier[refobj] ) identifier[fullns] = literal[string] . identifier[join] (( identifier[parentns] . identifier[rstrip] ( literal[string] ), identifier[ns] . identifier[lstrip] ( literal[string] ))) identifier[cmds] . identifier[namespace] ( identifier[removeNamespace] = identifier[fullns] , identifier[deleteNamespaceContent] = keyword[True] )
def delete(self, refobj): """Delete the content of the given refobj :param refobj: the refobj that represents the content that should be deleted :type refobj: refobj :returns: None :rtype: None :raises: None """ refobjinter = self.get_refobjinter() reference = refobjinter.get_reference(refobj) if reference: fullns = cmds.referenceQuery(reference, namespace=True) cmds.file(removeReference=True, referenceNode=reference) # depends on [control=['if'], data=[]] else: parentns = common.get_namespace(refobj) ns = cmds.getAttr('%s.namespace' % refobj) fullns = ':'.join((parentns.rstrip(':'), ns.lstrip(':'))) cmds.namespace(removeNamespace=fullns, deleteNamespaceContent=True)
def smartypants(text): """ Transforms sequences of characters into HTML entities. =================================== ===================== ========= Markdown HTML Result =================================== ===================== ========= ``'s`` (s, t, m, d, re, ll, ve) &rsquo;s ’s ``"Quotes"`` &ldquo;Quotes&rdquo; “Quotes” ``---`` &mdash; — ``--`` &ndash; – ``...`` &hellip; … ``. . .`` &hellip; … ``(c)`` &copy; © ``(r)`` &reg; ® ``(tm)`` &trade; ™ ``3/4`` &frac34; ¾ ``1/2`` &frac12; ½ ``1/4`` &frac14; ¼ =================================== ===================== ========= """ byte_str = text.encode('utf-8') ob = lib.hoedown_buffer_new(OUNIT) lib.hoedown_html_smartypants(ob, byte_str, len(byte_str)) try: return to_string(ob) finally: lib.hoedown_buffer_free(ob);
def function[smartypants, parameter[text]]: constant[ Transforms sequences of characters into HTML entities. =================================== ===================== ========= Markdown HTML Result =================================== ===================== ========= ``'s`` (s, t, m, d, re, ll, ve) &rsquo;s ’s ``"Quotes"`` &ldquo;Quotes&rdquo; “Quotes” ``---`` &mdash; — ``--`` &ndash; – ``...`` &hellip; … ``. . .`` &hellip; … ``(c)`` &copy; © ``(r)`` &reg; ® ``(tm)`` &trade; ™ ``3/4`` &frac34; ¾ ``1/2`` &frac12; ½ ``1/4`` &frac14; ¼ =================================== ===================== ========= ] variable[byte_str] assign[=] call[name[text].encode, parameter[constant[utf-8]]] variable[ob] assign[=] call[name[lib].hoedown_buffer_new, parameter[name[OUNIT]]] call[name[lib].hoedown_html_smartypants, parameter[name[ob], name[byte_str], call[name[len], parameter[name[byte_str]]]]] <ast.Try object at 0x7da18fe90cd0>
keyword[def] identifier[smartypants] ( identifier[text] ): literal[string] identifier[byte_str] = identifier[text] . identifier[encode] ( literal[string] ) identifier[ob] = identifier[lib] . identifier[hoedown_buffer_new] ( identifier[OUNIT] ) identifier[lib] . identifier[hoedown_html_smartypants] ( identifier[ob] , identifier[byte_str] , identifier[len] ( identifier[byte_str] )) keyword[try] : keyword[return] identifier[to_string] ( identifier[ob] ) keyword[finally] : identifier[lib] . identifier[hoedown_buffer_free] ( identifier[ob] );
def smartypants(text): """ Transforms sequences of characters into HTML entities. =================================== ===================== ========= Markdown HTML Result =================================== ===================== ========= ``'s`` (s, t, m, d, re, ll, ve) &rsquo;s ’s ``"Quotes"`` &ldquo;Quotes&rdquo; “Quotes” ``---`` &mdash; — ``--`` &ndash; – ``...`` &hellip; … ``. . .`` &hellip; … ``(c)`` &copy; © ``(r)`` &reg; ® ``(tm)`` &trade; ™ ``3/4`` &frac34; ¾ ``1/2`` &frac12; ½ ``1/4`` &frac14; ¼ =================================== ===================== ========= """ byte_str = text.encode('utf-8') ob = lib.hoedown_buffer_new(OUNIT) lib.hoedown_html_smartypants(ob, byte_str, len(byte_str)) try: return to_string(ob) # depends on [control=['try'], data=[]] finally: lib.hoedown_buffer_free(ob)
def always_iterable(obj, base_type=(str, bytes)): """If *obj* is iterable, return an iterator over its items:: >>> obj = (1, 2, 3) >>> list(always_iterable(obj)) [1, 2, 3] If *obj* is not iterable, return a one-item iterable containing *obj*:: >>> obj = 1 >>> list(always_iterable(obj)) [1] If *obj* is ``None``, return an empty iterable: >>> obj = None >>> list(always_iterable(None)) [] By default, binary and text strings are not considered iterable:: >>> obj = 'foo' >>> list(always_iterable(obj)) ['foo'] If *base_type* is set, objects for which ``isinstance(obj, base_type)`` returns ``True`` won't be considered iterable. >>> obj = {'a': 1} >>> list(always_iterable(obj)) # Iterate over the dict's keys ['a'] >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit [{'a': 1}] Set *base_type* to ``None`` to avoid any special handling and treat objects Python considers iterable as iterable: >>> obj = 'foo' >>> list(always_iterable(obj, base_type=None)) ['f', 'o', 'o'] """ if obj is None: return iter(()) if (base_type is not None) and isinstance(obj, base_type): return iter((obj,)) try: return iter(obj) except TypeError: return iter((obj,))
def function[always_iterable, parameter[obj, base_type]]: constant[If *obj* is iterable, return an iterator over its items:: >>> obj = (1, 2, 3) >>> list(always_iterable(obj)) [1, 2, 3] If *obj* is not iterable, return a one-item iterable containing *obj*:: >>> obj = 1 >>> list(always_iterable(obj)) [1] If *obj* is ``None``, return an empty iterable: >>> obj = None >>> list(always_iterable(None)) [] By default, binary and text strings are not considered iterable:: >>> obj = 'foo' >>> list(always_iterable(obj)) ['foo'] If *base_type* is set, objects for which ``isinstance(obj, base_type)`` returns ``True`` won't be considered iterable. >>> obj = {'a': 1} >>> list(always_iterable(obj)) # Iterate over the dict's keys ['a'] >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit [{'a': 1}] Set *base_type* to ``None`` to avoid any special handling and treat objects Python considers iterable as iterable: >>> obj = 'foo' >>> list(always_iterable(obj, base_type=None)) ['f', 'o', 'o'] ] if compare[name[obj] is constant[None]] begin[:] return[call[name[iter], parameter[tuple[[]]]]] if <ast.BoolOp object at 0x7da1b1da2b00> begin[:] return[call[name[iter], parameter[tuple[[<ast.Name object at 0x7da1b1da3a60>]]]]] <ast.Try object at 0x7da1b1da3370>
keyword[def] identifier[always_iterable] ( identifier[obj] , identifier[base_type] =( identifier[str] , identifier[bytes] )): literal[string] keyword[if] identifier[obj] keyword[is] keyword[None] : keyword[return] identifier[iter] (()) keyword[if] ( identifier[base_type] keyword[is] keyword[not] keyword[None] ) keyword[and] identifier[isinstance] ( identifier[obj] , identifier[base_type] ): keyword[return] identifier[iter] (( identifier[obj] ,)) keyword[try] : keyword[return] identifier[iter] ( identifier[obj] ) keyword[except] identifier[TypeError] : keyword[return] identifier[iter] (( identifier[obj] ,))
def always_iterable(obj, base_type=(str, bytes)): """If *obj* is iterable, return an iterator over its items:: >>> obj = (1, 2, 3) >>> list(always_iterable(obj)) [1, 2, 3] If *obj* is not iterable, return a one-item iterable containing *obj*:: >>> obj = 1 >>> list(always_iterable(obj)) [1] If *obj* is ``None``, return an empty iterable: >>> obj = None >>> list(always_iterable(None)) [] By default, binary and text strings are not considered iterable:: >>> obj = 'foo' >>> list(always_iterable(obj)) ['foo'] If *base_type* is set, objects for which ``isinstance(obj, base_type)`` returns ``True`` won't be considered iterable. >>> obj = {'a': 1} >>> list(always_iterable(obj)) # Iterate over the dict's keys ['a'] >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit [{'a': 1}] Set *base_type* to ``None`` to avoid any special handling and treat objects Python considers iterable as iterable: >>> obj = 'foo' >>> list(always_iterable(obj, base_type=None)) ['f', 'o', 'o'] """ if obj is None: return iter(()) # depends on [control=['if'], data=[]] if base_type is not None and isinstance(obj, base_type): return iter((obj,)) # depends on [control=['if'], data=[]] try: return iter(obj) # depends on [control=['try'], data=[]] except TypeError: return iter((obj,)) # depends on [control=['except'], data=[]]
def make_order_and_cancel(api_svr_ip, api_svr_port, unlock_password, test_code, trade_env, acc_id): """ 使用请先配置正确参数: :param api_svr_ip: (string) ip :param api_svr_port: (string) ip :param unlock_password: (string) 交易解锁密码, 必需修改! :param test_code: (string) 股票 :param trade_env: 参见 ft.TrdEnv的定义 :param acc_id: 交易子账号id """ if unlock_password == "": raise Exception("请先配置交易解锁密码!") quote_ctx = ft.OpenQuoteContext(host=api_svr_ip, port=api_svr_port) # 创建行情api quote_ctx.subscribe(test_code, ft.SubType.ORDER_BOOK) # 定阅摆盘 # 创建交易api is_hk_trade = 'HK.' in test_code if is_hk_trade: trade_ctx = ft.OpenHKTradeContext(host=api_svr_ip, port=api_svr_port) else: trade_ctx = ft.OpenUSTradeContext(host=api_svr_ip, port=api_svr_port) # 每手股数 lot_size = 0 is_unlock_trade = False is_fire_trade = False while not is_fire_trade: sleep(2) # 解锁交易 if not is_unlock_trade and trade_env == ft.TrdEnv.REAL: print("unlocking trade...") ret_code, ret_data = trade_ctx.unlock_trade(unlock_password) is_unlock_trade = (ret_code == ft.RET_OK) if not is_unlock_trade: print("请求交易解锁失败:{}".format(ret_data)) break if lot_size == 0: print("get lotsize...") ret, data = quote_ctx.get_market_snapshot(test_code) lot_size = data.iloc[0]['lot_size'] if ret == ft.RET_OK else 0 if ret != ft.RET_OK: print("取不到每手信息,重试中: {}".format(data)) continue elif lot_size <= 0: raise BaseException("该股票每手信息错误,可能不支持交易 code ={}".format(test_code)) print("get order book...") ret, data = quote_ctx.get_order_book(test_code) # 得到第十档数据 if ret != ft.RET_OK: continue # 计算交易价格 bid_order_arr = data['Bid'] if is_hk_trade: if len(bid_order_arr) != 10: continue # 港股下单: 价格定为第十档 price, _, _ = bid_order_arr[9] else: if len(bid_order_arr) == 0: continue # 美股下单: 价格定为一档降10% price, _, _ = bid_order_arr[0] price = round(price * 0.9, 2) qty = lot_size # 价格和数量判断 if qty == 0 or price == 0.0: continue # 下单 order_id = 0 print("place order : price={} qty={} code={}".format(price, qty, test_code)) ret_code, ret_data = trade_ctx.place_order(price=price, qty=qty, code=test_code, trd_side=ft.TrdSide.BUY, order_type=ft.OrderType.NORMAL, trd_env=trade_env, acc_id=acc_id) is_fire_trade = True print('下单ret={} data={}'.format(ret_code, ret_data)) if ret_code == ft.RET_OK: row = ret_data.iloc[0] order_id = row['order_id'] # 循环撤单 sleep(2) if order_id: while True: ret_code, ret_data = trade_ctx.order_list_query(order_id=order_id, status_filter_list=[], code='', start='', end='', trd_env=trade_env, acc_id=acc_id) if ret_code != ft.RET_OK: sleep(2) continue order_status = ret_data.iloc[0]['order_status'] if order_status in [ft.OrderStatus.SUBMIT_FAILED, ft.OrderStatus.TIMEOUT, ft.OrderStatus.FILLED_ALL, ft.OrderStatus.FAILED, ft.OrderStatus.DELETED]: break print("cancel order...") ret_code, ret_data = trade_ctx.modify_order(modify_order_op=ft.ModifyOrderOp.CANCEL, order_id=order_id, price=price, qty=qty, adjust_limit=0, trd_env=trade_env, acc_id=acc_id) print("撤单ret={} data={}".format(ret_code, ret_data)) if ret_code == ft.RET_OK: break else: sleep(2) # destroy object quote_ctx.close() trade_ctx.close()
def function[make_order_and_cancel, parameter[api_svr_ip, api_svr_port, unlock_password, test_code, trade_env, acc_id]]: constant[ 使用请先配置正确参数: :param api_svr_ip: (string) ip :param api_svr_port: (string) ip :param unlock_password: (string) 交易解锁密码, 必需修改! :param test_code: (string) 股票 :param trade_env: 参见 ft.TrdEnv的定义 :param acc_id: 交易子账号id ] if compare[name[unlock_password] equal[==] constant[]] begin[:] <ast.Raise object at 0x7da18f09fbe0> variable[quote_ctx] assign[=] call[name[ft].OpenQuoteContext, parameter[]] call[name[quote_ctx].subscribe, parameter[name[test_code], name[ft].SubType.ORDER_BOOK]] variable[is_hk_trade] assign[=] compare[constant[HK.] in name[test_code]] if name[is_hk_trade] begin[:] variable[trade_ctx] assign[=] call[name[ft].OpenHKTradeContext, parameter[]] variable[lot_size] assign[=] constant[0] variable[is_unlock_trade] assign[=] constant[False] variable[is_fire_trade] assign[=] constant[False] while <ast.UnaryOp object at 0x7da18f09e4d0> begin[:] call[name[sleep], parameter[constant[2]]] if <ast.BoolOp object at 0x7da18f09e410> begin[:] call[name[print], parameter[constant[unlocking trade...]]] <ast.Tuple object at 0x7da18f09cf70> assign[=] call[name[trade_ctx].unlock_trade, parameter[name[unlock_password]]] variable[is_unlock_trade] assign[=] compare[name[ret_code] equal[==] name[ft].RET_OK] if <ast.UnaryOp object at 0x7da18f09ebf0> begin[:] call[name[print], parameter[call[constant[请求交易解锁失败:{}].format, parameter[name[ret_data]]]]] break if compare[name[lot_size] equal[==] constant[0]] begin[:] call[name[print], parameter[constant[get lotsize...]]] <ast.Tuple object at 0x7da18f09fe20> assign[=] call[name[quote_ctx].get_market_snapshot, parameter[name[test_code]]] variable[lot_size] assign[=] <ast.IfExp object at 0x7da18f09e2c0> if compare[name[ret] not_equal[!=] name[ft].RET_OK] begin[:] call[name[print], parameter[call[constant[取不到每手信息,重试中: {}].format, parameter[name[data]]]]] continue call[name[print], parameter[constant[get order book...]]] <ast.Tuple object at 0x7da18f00eef0> assign[=] call[name[quote_ctx].get_order_book, parameter[name[test_code]]] if compare[name[ret] not_equal[!=] name[ft].RET_OK] begin[:] continue variable[bid_order_arr] assign[=] call[name[data]][constant[Bid]] if name[is_hk_trade] begin[:] if compare[call[name[len], parameter[name[bid_order_arr]]] not_equal[!=] constant[10]] begin[:] continue <ast.Tuple object at 0x7da18f00dc90> assign[=] call[name[bid_order_arr]][constant[9]] variable[qty] assign[=] name[lot_size] if <ast.BoolOp object at 0x7da18f00ce20> begin[:] continue variable[order_id] assign[=] constant[0] call[name[print], parameter[call[constant[place order : price={} qty={} code={}].format, parameter[name[price], name[qty], name[test_code]]]]] <ast.Tuple object at 0x7da18f00f910> assign[=] call[name[trade_ctx].place_order, parameter[]] variable[is_fire_trade] assign[=] constant[True] call[name[print], parameter[call[constant[下单ret={} data={}].format, parameter[name[ret_code], name[ret_data]]]]] if compare[name[ret_code] equal[==] name[ft].RET_OK] begin[:] variable[row] assign[=] call[name[ret_data].iloc][constant[0]] variable[order_id] assign[=] call[name[row]][constant[order_id]] call[name[sleep], parameter[constant[2]]] if name[order_id] begin[:] while constant[True] begin[:] <ast.Tuple object at 0x7da18f00e3b0> assign[=] call[name[trade_ctx].order_list_query, parameter[]] if compare[name[ret_code] not_equal[!=] name[ft].RET_OK] begin[:] call[name[sleep], parameter[constant[2]]] continue variable[order_status] assign[=] call[call[name[ret_data].iloc][constant[0]]][constant[order_status]] if compare[name[order_status] in list[[<ast.Attribute object at 0x7da18f00e410>, <ast.Attribute object at 0x7da18f00c190>, <ast.Attribute object at 0x7da18f00d3f0>, <ast.Attribute object at 0x7da18f00fca0>, <ast.Attribute object at 0x7da18f00f370>]]] begin[:] break call[name[print], parameter[constant[cancel order...]]] <ast.Tuple object at 0x7da18f00fa30> assign[=] call[name[trade_ctx].modify_order, parameter[]] call[name[print], parameter[call[constant[撤单ret={} data={}].format, parameter[name[ret_code], name[ret_data]]]]] if compare[name[ret_code] equal[==] name[ft].RET_OK] begin[:] break call[name[quote_ctx].close, parameter[]] call[name[trade_ctx].close, parameter[]]
keyword[def] identifier[make_order_and_cancel] ( identifier[api_svr_ip] , identifier[api_svr_port] , identifier[unlock_password] , identifier[test_code] , identifier[trade_env] , identifier[acc_id] ): literal[string] keyword[if] identifier[unlock_password] == literal[string] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[quote_ctx] = identifier[ft] . identifier[OpenQuoteContext] ( identifier[host] = identifier[api_svr_ip] , identifier[port] = identifier[api_svr_port] ) identifier[quote_ctx] . identifier[subscribe] ( identifier[test_code] , identifier[ft] . identifier[SubType] . identifier[ORDER_BOOK] ) identifier[is_hk_trade] = literal[string] keyword[in] identifier[test_code] keyword[if] identifier[is_hk_trade] : identifier[trade_ctx] = identifier[ft] . identifier[OpenHKTradeContext] ( identifier[host] = identifier[api_svr_ip] , identifier[port] = identifier[api_svr_port] ) keyword[else] : identifier[trade_ctx] = identifier[ft] . identifier[OpenUSTradeContext] ( identifier[host] = identifier[api_svr_ip] , identifier[port] = identifier[api_svr_port] ) identifier[lot_size] = literal[int] identifier[is_unlock_trade] = keyword[False] identifier[is_fire_trade] = keyword[False] keyword[while] keyword[not] identifier[is_fire_trade] : identifier[sleep] ( literal[int] ) keyword[if] keyword[not] identifier[is_unlock_trade] keyword[and] identifier[trade_env] == identifier[ft] . identifier[TrdEnv] . identifier[REAL] : identifier[print] ( literal[string] ) identifier[ret_code] , identifier[ret_data] = identifier[trade_ctx] . identifier[unlock_trade] ( identifier[unlock_password] ) identifier[is_unlock_trade] =( identifier[ret_code] == identifier[ft] . identifier[RET_OK] ) keyword[if] keyword[not] identifier[is_unlock_trade] : identifier[print] ( literal[string] . identifier[format] ( identifier[ret_data] )) keyword[break] keyword[if] identifier[lot_size] == literal[int] : identifier[print] ( literal[string] ) identifier[ret] , identifier[data] = identifier[quote_ctx] . identifier[get_market_snapshot] ( identifier[test_code] ) identifier[lot_size] = identifier[data] . identifier[iloc] [ literal[int] ][ literal[string] ] keyword[if] identifier[ret] == identifier[ft] . identifier[RET_OK] keyword[else] literal[int] keyword[if] identifier[ret] != identifier[ft] . identifier[RET_OK] : identifier[print] ( literal[string] . identifier[format] ( identifier[data] )) keyword[continue] keyword[elif] identifier[lot_size] <= literal[int] : keyword[raise] identifier[BaseException] ( literal[string] . identifier[format] ( identifier[test_code] )) identifier[print] ( literal[string] ) identifier[ret] , identifier[data] = identifier[quote_ctx] . identifier[get_order_book] ( identifier[test_code] ) keyword[if] identifier[ret] != identifier[ft] . identifier[RET_OK] : keyword[continue] identifier[bid_order_arr] = identifier[data] [ literal[string] ] keyword[if] identifier[is_hk_trade] : keyword[if] identifier[len] ( identifier[bid_order_arr] )!= literal[int] : keyword[continue] identifier[price] , identifier[_] , identifier[_] = identifier[bid_order_arr] [ literal[int] ] keyword[else] : keyword[if] identifier[len] ( identifier[bid_order_arr] )== literal[int] : keyword[continue] identifier[price] , identifier[_] , identifier[_] = identifier[bid_order_arr] [ literal[int] ] identifier[price] = identifier[round] ( identifier[price] * literal[int] , literal[int] ) identifier[qty] = identifier[lot_size] keyword[if] identifier[qty] == literal[int] keyword[or] identifier[price] == literal[int] : keyword[continue] identifier[order_id] = literal[int] identifier[print] ( literal[string] . identifier[format] ( identifier[price] , identifier[qty] , identifier[test_code] )) identifier[ret_code] , identifier[ret_data] = identifier[trade_ctx] . identifier[place_order] ( identifier[price] = identifier[price] , identifier[qty] = identifier[qty] , identifier[code] = identifier[test_code] , identifier[trd_side] = identifier[ft] . identifier[TrdSide] . identifier[BUY] , identifier[order_type] = identifier[ft] . identifier[OrderType] . identifier[NORMAL] , identifier[trd_env] = identifier[trade_env] , identifier[acc_id] = identifier[acc_id] ) identifier[is_fire_trade] = keyword[True] identifier[print] ( literal[string] . identifier[format] ( identifier[ret_code] , identifier[ret_data] )) keyword[if] identifier[ret_code] == identifier[ft] . identifier[RET_OK] : identifier[row] = identifier[ret_data] . identifier[iloc] [ literal[int] ] identifier[order_id] = identifier[row] [ literal[string] ] identifier[sleep] ( literal[int] ) keyword[if] identifier[order_id] : keyword[while] keyword[True] : identifier[ret_code] , identifier[ret_data] = identifier[trade_ctx] . identifier[order_list_query] ( identifier[order_id] = identifier[order_id] , identifier[status_filter_list] =[], identifier[code] = literal[string] , identifier[start] = literal[string] , identifier[end] = literal[string] , identifier[trd_env] = identifier[trade_env] , identifier[acc_id] = identifier[acc_id] ) keyword[if] identifier[ret_code] != identifier[ft] . identifier[RET_OK] : identifier[sleep] ( literal[int] ) keyword[continue] identifier[order_status] = identifier[ret_data] . identifier[iloc] [ literal[int] ][ literal[string] ] keyword[if] identifier[order_status] keyword[in] [ identifier[ft] . identifier[OrderStatus] . identifier[SUBMIT_FAILED] , identifier[ft] . identifier[OrderStatus] . identifier[TIMEOUT] , identifier[ft] . identifier[OrderStatus] . identifier[FILLED_ALL] , identifier[ft] . identifier[OrderStatus] . identifier[FAILED] , identifier[ft] . identifier[OrderStatus] . identifier[DELETED] ]: keyword[break] identifier[print] ( literal[string] ) identifier[ret_code] , identifier[ret_data] = identifier[trade_ctx] . identifier[modify_order] ( identifier[modify_order_op] = identifier[ft] . identifier[ModifyOrderOp] . identifier[CANCEL] , identifier[order_id] = identifier[order_id] , identifier[price] = identifier[price] , identifier[qty] = identifier[qty] , identifier[adjust_limit] = literal[int] , identifier[trd_env] = identifier[trade_env] , identifier[acc_id] = identifier[acc_id] ) identifier[print] ( literal[string] . identifier[format] ( identifier[ret_code] , identifier[ret_data] )) keyword[if] identifier[ret_code] == identifier[ft] . identifier[RET_OK] : keyword[break] keyword[else] : identifier[sleep] ( literal[int] ) identifier[quote_ctx] . identifier[close] () identifier[trade_ctx] . identifier[close] ()
def make_order_and_cancel(api_svr_ip, api_svr_port, unlock_password, test_code, trade_env, acc_id): """ 使用请先配置正确参数: :param api_svr_ip: (string) ip :param api_svr_port: (string) ip :param unlock_password: (string) 交易解锁密码, 必需修改! :param test_code: (string) 股票 :param trade_env: 参见 ft.TrdEnv的定义 :param acc_id: 交易子账号id """ if unlock_password == '': raise Exception('请先配置交易解锁密码!') # depends on [control=['if'], data=[]] quote_ctx = ft.OpenQuoteContext(host=api_svr_ip, port=api_svr_port) # 创建行情api quote_ctx.subscribe(test_code, ft.SubType.ORDER_BOOK) # 定阅摆盘 # 创建交易api is_hk_trade = 'HK.' in test_code if is_hk_trade: trade_ctx = ft.OpenHKTradeContext(host=api_svr_ip, port=api_svr_port) # depends on [control=['if'], data=[]] else: trade_ctx = ft.OpenUSTradeContext(host=api_svr_ip, port=api_svr_port) # 每手股数 lot_size = 0 is_unlock_trade = False is_fire_trade = False while not is_fire_trade: sleep(2) # 解锁交易 if not is_unlock_trade and trade_env == ft.TrdEnv.REAL: print('unlocking trade...') (ret_code, ret_data) = trade_ctx.unlock_trade(unlock_password) is_unlock_trade = ret_code == ft.RET_OK if not is_unlock_trade: print('请求交易解锁失败:{}'.format(ret_data)) break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if lot_size == 0: print('get lotsize...') (ret, data) = quote_ctx.get_market_snapshot(test_code) lot_size = data.iloc[0]['lot_size'] if ret == ft.RET_OK else 0 if ret != ft.RET_OK: print('取不到每手信息,重试中: {}'.format(data)) continue # depends on [control=['if'], data=[]] elif lot_size <= 0: raise BaseException('该股票每手信息错误,可能不支持交易 code ={}'.format(test_code)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['lot_size']] print('get order book...') (ret, data) = quote_ctx.get_order_book(test_code) # 得到第十档数据 if ret != ft.RET_OK: continue # depends on [control=['if'], data=[]] # 计算交易价格 bid_order_arr = data['Bid'] if is_hk_trade: if len(bid_order_arr) != 10: continue # depends on [control=['if'], data=[]] # 港股下单: 价格定为第十档 (price, _, _) = bid_order_arr[9] # depends on [control=['if'], data=[]] else: if len(bid_order_arr) == 0: continue # depends on [control=['if'], data=[]] # 美股下单: 价格定为一档降10% (price, _, _) = bid_order_arr[0] price = round(price * 0.9, 2) qty = lot_size # 价格和数量判断 if qty == 0 or price == 0.0: continue # depends on [control=['if'], data=[]] # 下单 order_id = 0 print('place order : price={} qty={} code={}'.format(price, qty, test_code)) (ret_code, ret_data) = trade_ctx.place_order(price=price, qty=qty, code=test_code, trd_side=ft.TrdSide.BUY, order_type=ft.OrderType.NORMAL, trd_env=trade_env, acc_id=acc_id) is_fire_trade = True print('下单ret={} data={}'.format(ret_code, ret_data)) if ret_code == ft.RET_OK: row = ret_data.iloc[0] order_id = row['order_id'] # depends on [control=['if'], data=[]] # 循环撤单 sleep(2) if order_id: while True: (ret_code, ret_data) = trade_ctx.order_list_query(order_id=order_id, status_filter_list=[], code='', start='', end='', trd_env=trade_env, acc_id=acc_id) if ret_code != ft.RET_OK: sleep(2) continue # depends on [control=['if'], data=[]] order_status = ret_data.iloc[0]['order_status'] if order_status in [ft.OrderStatus.SUBMIT_FAILED, ft.OrderStatus.TIMEOUT, ft.OrderStatus.FILLED_ALL, ft.OrderStatus.FAILED, ft.OrderStatus.DELETED]: break # depends on [control=['if'], data=[]] print('cancel order...') (ret_code, ret_data) = trade_ctx.modify_order(modify_order_op=ft.ModifyOrderOp.CANCEL, order_id=order_id, price=price, qty=qty, adjust_limit=0, trd_env=trade_env, acc_id=acc_id) print('撤单ret={} data={}'.format(ret_code, ret_data)) if ret_code == ft.RET_OK: break # depends on [control=['if'], data=[]] else: sleep(2) # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # destroy object quote_ctx.close() trade_ctx.close()
def getPrivilegeForRole(self, rolename): """ Returns the privilege associated with a role. Input: rolename - name of the role Output: JSON Messages """ params = { "f" : "json", "rolename" : rolename } pURL = self._url + "/roles/getPrivilege" return self._post(url=pURL, param_dict=params, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port)
def function[getPrivilegeForRole, parameter[self, rolename]]: constant[ Returns the privilege associated with a role. Input: rolename - name of the role Output: JSON Messages ] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b1248700>, <ast.Constant object at 0x7da1b124aaa0>], [<ast.Constant object at 0x7da1b1248880>, <ast.Name object at 0x7da1b1249b70>]] variable[pURL] assign[=] binary_operation[name[self]._url + constant[/roles/getPrivilege]] return[call[name[self]._post, parameter[]]]
keyword[def] identifier[getPrivilegeForRole] ( identifier[self] , identifier[rolename] ): literal[string] identifier[params] ={ literal[string] : literal[string] , literal[string] : identifier[rolename] } identifier[pURL] = identifier[self] . identifier[_url] + literal[string] keyword[return] identifier[self] . identifier[_post] ( identifier[url] = identifier[pURL] , identifier[param_dict] = identifier[params] , identifier[securityHandler] = identifier[self] . identifier[_securityHandler] , identifier[proxy_url] = identifier[self] . identifier[_proxy_url] , identifier[proxy_port] = identifier[self] . identifier[_proxy_port] )
def getPrivilegeForRole(self, rolename): """ Returns the privilege associated with a role. Input: rolename - name of the role Output: JSON Messages """ params = {'f': 'json', 'rolename': rolename} pURL = self._url + '/roles/getPrivilege' return self._post(url=pURL, param_dict=params, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port)
def deactivate(name): """Deactivate plugin. Parameters ---------- name : str Plugin name. """ if name in plugins: plugins[name].deactivate() else: raise Exception("plugin {} not found".format(name))
def function[deactivate, parameter[name]]: constant[Deactivate plugin. Parameters ---------- name : str Plugin name. ] if compare[name[name] in name[plugins]] begin[:] call[call[name[plugins]][name[name]].deactivate, parameter[]]
keyword[def] identifier[deactivate] ( identifier[name] ): literal[string] keyword[if] identifier[name] keyword[in] identifier[plugins] : identifier[plugins] [ identifier[name] ]. identifier[deactivate] () keyword[else] : keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[name] ))
def deactivate(name): """Deactivate plugin. Parameters ---------- name : str Plugin name. """ if name in plugins: plugins[name].deactivate() # depends on [control=['if'], data=['name', 'plugins']] else: raise Exception('plugin {} not found'.format(name))
def general_rotation_matrix(theta, phi, alpha): """ Rotation around vector u = (sin(theta) cos(phi), sin(theta) sin(phi), cos(theta)) by an angle alpha Ref: http://ksuweb.kennesaw.edu/~plaval//math4490/rotgen.pdf :parameter float theta: :parameter float phi: :parameter float alpha: rotation angle :return: 3x3 matrix of floats """ C = cos(alpha) S = sin(alpha) t = 1 - C ux = sin(theta)*cos(phi) uy = sin(theta)*sin(phi) uz = cos(theta) return np.array([ [t*ux**2 + C, t*ux*uy - S*uz, t*ux*uz + S*uy], [t*ux*uy + S*uz, t*uy**2 + C, t*uy*uz - S*ux], [t*ux*uz - S*uy, t*uy*uz + S*ux, t*uz**2 + C] ])
def function[general_rotation_matrix, parameter[theta, phi, alpha]]: constant[ Rotation around vector u = (sin(theta) cos(phi), sin(theta) sin(phi), cos(theta)) by an angle alpha Ref: http://ksuweb.kennesaw.edu/~plaval//math4490/rotgen.pdf :parameter float theta: :parameter float phi: :parameter float alpha: rotation angle :return: 3x3 matrix of floats ] variable[C] assign[=] call[name[cos], parameter[name[alpha]]] variable[S] assign[=] call[name[sin], parameter[name[alpha]]] variable[t] assign[=] binary_operation[constant[1] - name[C]] variable[ux] assign[=] binary_operation[call[name[sin], parameter[name[theta]]] * call[name[cos], parameter[name[phi]]]] variable[uy] assign[=] binary_operation[call[name[sin], parameter[name[theta]]] * call[name[sin], parameter[name[phi]]]] variable[uz] assign[=] call[name[cos], parameter[name[theta]]] return[call[name[np].array, parameter[list[[<ast.List object at 0x7da20e954ee0>, <ast.List object at 0x7da20e957220>, <ast.List object at 0x7da20e957460>]]]]]
keyword[def] identifier[general_rotation_matrix] ( identifier[theta] , identifier[phi] , identifier[alpha] ): literal[string] identifier[C] = identifier[cos] ( identifier[alpha] ) identifier[S] = identifier[sin] ( identifier[alpha] ) identifier[t] = literal[int] - identifier[C] identifier[ux] = identifier[sin] ( identifier[theta] )* identifier[cos] ( identifier[phi] ) identifier[uy] = identifier[sin] ( identifier[theta] )* identifier[sin] ( identifier[phi] ) identifier[uz] = identifier[cos] ( identifier[theta] ) keyword[return] identifier[np] . identifier[array] ([ [ identifier[t] * identifier[ux] ** literal[int] + identifier[C] , identifier[t] * identifier[ux] * identifier[uy] - identifier[S] * identifier[uz] , identifier[t] * identifier[ux] * identifier[uz] + identifier[S] * identifier[uy] ], [ identifier[t] * identifier[ux] * identifier[uy] + identifier[S] * identifier[uz] , identifier[t] * identifier[uy] ** literal[int] + identifier[C] , identifier[t] * identifier[uy] * identifier[uz] - identifier[S] * identifier[ux] ], [ identifier[t] * identifier[ux] * identifier[uz] - identifier[S] * identifier[uy] , identifier[t] * identifier[uy] * identifier[uz] + identifier[S] * identifier[ux] , identifier[t] * identifier[uz] ** literal[int] + identifier[C] ] ])
def general_rotation_matrix(theta, phi, alpha): """ Rotation around vector u = (sin(theta) cos(phi), sin(theta) sin(phi), cos(theta)) by an angle alpha Ref: http://ksuweb.kennesaw.edu/~plaval//math4490/rotgen.pdf :parameter float theta: :parameter float phi: :parameter float alpha: rotation angle :return: 3x3 matrix of floats """ C = cos(alpha) S = sin(alpha) t = 1 - C ux = sin(theta) * cos(phi) uy = sin(theta) * sin(phi) uz = cos(theta) return np.array([[t * ux ** 2 + C, t * ux * uy - S * uz, t * ux * uz + S * uy], [t * ux * uy + S * uz, t * uy ** 2 + C, t * uy * uz - S * ux], [t * ux * uz - S * uy, t * uy * uz + S * ux, t * uz ** 2 + C]])
def get_unique_document_id(query_str): # type: (str) -> str """Get a unique id given a query_string""" assert isinstance(query_str, string_types), ( "Must receive a string as query_str. Received {}" ).format(repr(query_str)) if query_str not in _cached_queries: _cached_queries[query_str] = sha1(str(query_str).encode("utf-8")).hexdigest() return _cached_queries[query_str]
def function[get_unique_document_id, parameter[query_str]]: constant[Get a unique id given a query_string] assert[call[name[isinstance], parameter[name[query_str], name[string_types]]]] if compare[name[query_str] <ast.NotIn object at 0x7da2590d7190> name[_cached_queries]] begin[:] call[name[_cached_queries]][name[query_str]] assign[=] call[call[name[sha1], parameter[call[call[name[str], parameter[name[query_str]]].encode, parameter[constant[utf-8]]]]].hexdigest, parameter[]] return[call[name[_cached_queries]][name[query_str]]]
keyword[def] identifier[get_unique_document_id] ( identifier[query_str] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[query_str] , identifier[string_types] ),( literal[string] ). identifier[format] ( identifier[repr] ( identifier[query_str] )) keyword[if] identifier[query_str] keyword[not] keyword[in] identifier[_cached_queries] : identifier[_cached_queries] [ identifier[query_str] ]= identifier[sha1] ( identifier[str] ( identifier[query_str] ). identifier[encode] ( literal[string] )). identifier[hexdigest] () keyword[return] identifier[_cached_queries] [ identifier[query_str] ]
def get_unique_document_id(query_str): # type: (str) -> str 'Get a unique id given a query_string' assert isinstance(query_str, string_types), 'Must receive a string as query_str. Received {}'.format(repr(query_str)) if query_str not in _cached_queries: _cached_queries[query_str] = sha1(str(query_str).encode('utf-8')).hexdigest() # depends on [control=['if'], data=['query_str', '_cached_queries']] return _cached_queries[query_str]
def get_final_freq(approx, m1, m2, s1z, s2z): """ Returns the LALSimulation function which evaluates the final (highest) frequency for a given approximant using given template parameters. NOTE: TaylorTx and TaylorFx are currently all given an ISCO cutoff !! Parameters ---------- approx : string Name of the approximant e.g. 'EOBNRv2' m1 : float or numpy.array First component mass in solar masses m2 : float or numpy.array Second component mass in solar masses s1z : float or numpy.array First component dimensionless spin S_1/m_1^2 projected onto L s2z : float or numpy.array Second component dimensionless spin S_2/m_2^2 projected onto L Returns ------- f : float or numpy.array Frequency in Hz """ lalsim_approx = lalsimulation.GetApproximantFromString(approx) return _vec_get_final_freq(lalsim_approx, m1, m2, s1z, s2z)
def function[get_final_freq, parameter[approx, m1, m2, s1z, s2z]]: constant[ Returns the LALSimulation function which evaluates the final (highest) frequency for a given approximant using given template parameters. NOTE: TaylorTx and TaylorFx are currently all given an ISCO cutoff !! Parameters ---------- approx : string Name of the approximant e.g. 'EOBNRv2' m1 : float or numpy.array First component mass in solar masses m2 : float or numpy.array Second component mass in solar masses s1z : float or numpy.array First component dimensionless spin S_1/m_1^2 projected onto L s2z : float or numpy.array Second component dimensionless spin S_2/m_2^2 projected onto L Returns ------- f : float or numpy.array Frequency in Hz ] variable[lalsim_approx] assign[=] call[name[lalsimulation].GetApproximantFromString, parameter[name[approx]]] return[call[name[_vec_get_final_freq], parameter[name[lalsim_approx], name[m1], name[m2], name[s1z], name[s2z]]]]
keyword[def] identifier[get_final_freq] ( identifier[approx] , identifier[m1] , identifier[m2] , identifier[s1z] , identifier[s2z] ): literal[string] identifier[lalsim_approx] = identifier[lalsimulation] . identifier[GetApproximantFromString] ( identifier[approx] ) keyword[return] identifier[_vec_get_final_freq] ( identifier[lalsim_approx] , identifier[m1] , identifier[m2] , identifier[s1z] , identifier[s2z] )
def get_final_freq(approx, m1, m2, s1z, s2z): """ Returns the LALSimulation function which evaluates the final (highest) frequency for a given approximant using given template parameters. NOTE: TaylorTx and TaylorFx are currently all given an ISCO cutoff !! Parameters ---------- approx : string Name of the approximant e.g. 'EOBNRv2' m1 : float or numpy.array First component mass in solar masses m2 : float or numpy.array Second component mass in solar masses s1z : float or numpy.array First component dimensionless spin S_1/m_1^2 projected onto L s2z : float or numpy.array Second component dimensionless spin S_2/m_2^2 projected onto L Returns ------- f : float or numpy.array Frequency in Hz """ lalsim_approx = lalsimulation.GetApproximantFromString(approx) return _vec_get_final_freq(lalsim_approx, m1, m2, s1z, s2z)
def reasons_to_paths(reasons): """Calculate the dependency paths to the reasons of the blockers. Paths will be in reverse-dependency order (i.e. parent projects are in ascending order). """ blockers = set(reasons.keys()) - set(reasons.values()) paths = set() for blocker in blockers: path = [blocker] parent = reasons[blocker] while parent: if parent in path: raise CircularDependencyError(dict(parent=parent, blocker=blocker, path=path)) path.append(parent) parent = reasons.get(parent) paths.add(tuple(path)) return paths
def function[reasons_to_paths, parameter[reasons]]: constant[Calculate the dependency paths to the reasons of the blockers. Paths will be in reverse-dependency order (i.e. parent projects are in ascending order). ] variable[blockers] assign[=] binary_operation[call[name[set], parameter[call[name[reasons].keys, parameter[]]]] - call[name[set], parameter[call[name[reasons].values, parameter[]]]]] variable[paths] assign[=] call[name[set], parameter[]] for taget[name[blocker]] in starred[name[blockers]] begin[:] variable[path] assign[=] list[[<ast.Name object at 0x7da1b0797310>]] variable[parent] assign[=] call[name[reasons]][name[blocker]] while name[parent] begin[:] if compare[name[parent] in name[path]] begin[:] <ast.Raise object at 0x7da1b07948b0> call[name[path].append, parameter[name[parent]]] variable[parent] assign[=] call[name[reasons].get, parameter[name[parent]]] call[name[paths].add, parameter[call[name[tuple], parameter[name[path]]]]] return[name[paths]]
keyword[def] identifier[reasons_to_paths] ( identifier[reasons] ): literal[string] identifier[blockers] = identifier[set] ( identifier[reasons] . identifier[keys] ())- identifier[set] ( identifier[reasons] . identifier[values] ()) identifier[paths] = identifier[set] () keyword[for] identifier[blocker] keyword[in] identifier[blockers] : identifier[path] =[ identifier[blocker] ] identifier[parent] = identifier[reasons] [ identifier[blocker] ] keyword[while] identifier[parent] : keyword[if] identifier[parent] keyword[in] identifier[path] : keyword[raise] identifier[CircularDependencyError] ( identifier[dict] ( identifier[parent] = identifier[parent] , identifier[blocker] = identifier[blocker] , identifier[path] = identifier[path] )) identifier[path] . identifier[append] ( identifier[parent] ) identifier[parent] = identifier[reasons] . identifier[get] ( identifier[parent] ) identifier[paths] . identifier[add] ( identifier[tuple] ( identifier[path] )) keyword[return] identifier[paths]
def reasons_to_paths(reasons): """Calculate the dependency paths to the reasons of the blockers. Paths will be in reverse-dependency order (i.e. parent projects are in ascending order). """ blockers = set(reasons.keys()) - set(reasons.values()) paths = set() for blocker in blockers: path = [blocker] parent = reasons[blocker] while parent: if parent in path: raise CircularDependencyError(dict(parent=parent, blocker=blocker, path=path)) # depends on [control=['if'], data=['parent', 'path']] path.append(parent) parent = reasons.get(parent) # depends on [control=['while'], data=[]] paths.add(tuple(path)) # depends on [control=['for'], data=['blocker']] return paths
def generate_parallel(samples, run_parallel): """Provide parallel preparation of summary information for alignment and variant calling. """ to_analyze, extras = _split_samples_by_qc(samples) qced = run_parallel("pipeline_summary", to_analyze) samples = _combine_qc_samples(qced) + extras qsign_info = run_parallel("qsignature_summary", [samples]) metadata_file = _merge_metadata([samples]) summary_file = write_project_summary(samples, qsign_info) out = [] for data in samples: if "summary" not in data[0]: data[0]["summary"] = {} data[0]["summary"]["project"] = summary_file data[0]["summary"]["metadata"] = metadata_file if qsign_info: data[0]["summary"]["mixup_check"] = qsign_info[0]["out_dir"] out.append(data) out = _add_researcher_summary(out, summary_file) # MultiQC must be run after all file outputs are set: return [[utils.to_single_data(d)] for d in run_parallel("multiqc_summary", [out])]
def function[generate_parallel, parameter[samples, run_parallel]]: constant[Provide parallel preparation of summary information for alignment and variant calling. ] <ast.Tuple object at 0x7da1b1867c40> assign[=] call[name[_split_samples_by_qc], parameter[name[samples]]] variable[qced] assign[=] call[name[run_parallel], parameter[constant[pipeline_summary], name[to_analyze]]] variable[samples] assign[=] binary_operation[call[name[_combine_qc_samples], parameter[name[qced]]] + name[extras]] variable[qsign_info] assign[=] call[name[run_parallel], parameter[constant[qsignature_summary], list[[<ast.Name object at 0x7da1b18d2cb0>]]]] variable[metadata_file] assign[=] call[name[_merge_metadata], parameter[list[[<ast.Name object at 0x7da1b1846590>]]]] variable[summary_file] assign[=] call[name[write_project_summary], parameter[name[samples], name[qsign_info]]] variable[out] assign[=] list[[]] for taget[name[data]] in starred[name[samples]] begin[:] if compare[constant[summary] <ast.NotIn object at 0x7da2590d7190> call[name[data]][constant[0]]] begin[:] call[call[name[data]][constant[0]]][constant[summary]] assign[=] dictionary[[], []] call[call[call[name[data]][constant[0]]][constant[summary]]][constant[project]] assign[=] name[summary_file] call[call[call[name[data]][constant[0]]][constant[summary]]][constant[metadata]] assign[=] name[metadata_file] if name[qsign_info] begin[:] call[call[call[name[data]][constant[0]]][constant[summary]]][constant[mixup_check]] assign[=] call[call[name[qsign_info]][constant[0]]][constant[out_dir]] call[name[out].append, parameter[name[data]]] variable[out] assign[=] call[name[_add_researcher_summary], parameter[name[out], name[summary_file]]] return[<ast.ListComp object at 0x7da1b1844d90>]
keyword[def] identifier[generate_parallel] ( identifier[samples] , identifier[run_parallel] ): literal[string] identifier[to_analyze] , identifier[extras] = identifier[_split_samples_by_qc] ( identifier[samples] ) identifier[qced] = identifier[run_parallel] ( literal[string] , identifier[to_analyze] ) identifier[samples] = identifier[_combine_qc_samples] ( identifier[qced] )+ identifier[extras] identifier[qsign_info] = identifier[run_parallel] ( literal[string] ,[ identifier[samples] ]) identifier[metadata_file] = identifier[_merge_metadata] ([ identifier[samples] ]) identifier[summary_file] = identifier[write_project_summary] ( identifier[samples] , identifier[qsign_info] ) identifier[out] =[] keyword[for] identifier[data] keyword[in] identifier[samples] : keyword[if] literal[string] keyword[not] keyword[in] identifier[data] [ literal[int] ]: identifier[data] [ literal[int] ][ literal[string] ]={} identifier[data] [ literal[int] ][ literal[string] ][ literal[string] ]= identifier[summary_file] identifier[data] [ literal[int] ][ literal[string] ][ literal[string] ]= identifier[metadata_file] keyword[if] identifier[qsign_info] : identifier[data] [ literal[int] ][ literal[string] ][ literal[string] ]= identifier[qsign_info] [ literal[int] ][ literal[string] ] identifier[out] . identifier[append] ( identifier[data] ) identifier[out] = identifier[_add_researcher_summary] ( identifier[out] , identifier[summary_file] ) keyword[return] [[ identifier[utils] . identifier[to_single_data] ( identifier[d] )] keyword[for] identifier[d] keyword[in] identifier[run_parallel] ( literal[string] ,[ identifier[out] ])]
def generate_parallel(samples, run_parallel): """Provide parallel preparation of summary information for alignment and variant calling. """ (to_analyze, extras) = _split_samples_by_qc(samples) qced = run_parallel('pipeline_summary', to_analyze) samples = _combine_qc_samples(qced) + extras qsign_info = run_parallel('qsignature_summary', [samples]) metadata_file = _merge_metadata([samples]) summary_file = write_project_summary(samples, qsign_info) out = [] for data in samples: if 'summary' not in data[0]: data[0]['summary'] = {} # depends on [control=['if'], data=[]] data[0]['summary']['project'] = summary_file data[0]['summary']['metadata'] = metadata_file if qsign_info: data[0]['summary']['mixup_check'] = qsign_info[0]['out_dir'] # depends on [control=['if'], data=[]] out.append(data) # depends on [control=['for'], data=['data']] out = _add_researcher_summary(out, summary_file) # MultiQC must be run after all file outputs are set: return [[utils.to_single_data(d)] for d in run_parallel('multiqc_summary', [out])]
def clean_perms(self): """ FAB leaves faulty permissions that need to be cleaned up """ self.log.debug('Cleaning faulty perms') sesh = self.get_session pvms = ( sesh.query(sqla_models.PermissionView) .filter(or_( sqla_models.PermissionView.permission == None, # NOQA sqla_models.PermissionView.view_menu == None, # NOQA )) ) deleted_count = pvms.delete() sesh.commit() if deleted_count: self.log.info('Deleted %s faulty permissions', deleted_count)
def function[clean_perms, parameter[self]]: constant[ FAB leaves faulty permissions that need to be cleaned up ] call[name[self].log.debug, parameter[constant[Cleaning faulty perms]]] variable[sesh] assign[=] name[self].get_session variable[pvms] assign[=] call[call[name[sesh].query, parameter[name[sqla_models].PermissionView]].filter, parameter[call[name[or_], parameter[compare[name[sqla_models].PermissionView.permission equal[==] constant[None]], compare[name[sqla_models].PermissionView.view_menu equal[==] constant[None]]]]]] variable[deleted_count] assign[=] call[name[pvms].delete, parameter[]] call[name[sesh].commit, parameter[]] if name[deleted_count] begin[:] call[name[self].log.info, parameter[constant[Deleted %s faulty permissions], name[deleted_count]]]
keyword[def] identifier[clean_perms] ( identifier[self] ): literal[string] identifier[self] . identifier[log] . identifier[debug] ( literal[string] ) identifier[sesh] = identifier[self] . identifier[get_session] identifier[pvms] =( identifier[sesh] . identifier[query] ( identifier[sqla_models] . identifier[PermissionView] ) . identifier[filter] ( identifier[or_] ( identifier[sqla_models] . identifier[PermissionView] . identifier[permission] == keyword[None] , identifier[sqla_models] . identifier[PermissionView] . identifier[view_menu] == keyword[None] , )) ) identifier[deleted_count] = identifier[pvms] . identifier[delete] () identifier[sesh] . identifier[commit] () keyword[if] identifier[deleted_count] : identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[deleted_count] )
def clean_perms(self): """ FAB leaves faulty permissions that need to be cleaned up """ self.log.debug('Cleaning faulty perms') sesh = self.get_session # NOQA # NOQA pvms = sesh.query(sqla_models.PermissionView).filter(or_(sqla_models.PermissionView.permission == None, sqla_models.PermissionView.view_menu == None)) deleted_count = pvms.delete() sesh.commit() if deleted_count: self.log.info('Deleted %s faulty permissions', deleted_count) # depends on [control=['if'], data=[]]
def update_experiment(): '''Update the experiment status in config file''' experiment_config = Experiments() experiment_dict = experiment_config.get_all_experiments() if not experiment_dict: return None for key in experiment_dict.keys(): if isinstance(experiment_dict[key], dict): if experiment_dict[key].get('status') != 'STOPPED': nni_config = Config(experiment_dict[key]['fileName']) rest_pid = nni_config.get_config('restServerPid') if not detect_process(rest_pid): experiment_config.update_experiment(key, 'status', 'STOPPED') continue rest_port = nni_config.get_config('restServerPort') startTime, endTime = get_experiment_time(rest_port) if startTime: experiment_config.update_experiment(key, 'startTime', startTime) if endTime: experiment_config.update_experiment(key, 'endTime', endTime) status = get_experiment_status(rest_port) if status: experiment_config.update_experiment(key, 'status', status)
def function[update_experiment, parameter[]]: constant[Update the experiment status in config file] variable[experiment_config] assign[=] call[name[Experiments], parameter[]] variable[experiment_dict] assign[=] call[name[experiment_config].get_all_experiments, parameter[]] if <ast.UnaryOp object at 0x7da18f58fa00> begin[:] return[constant[None]] for taget[name[key]] in starred[call[name[experiment_dict].keys, parameter[]]] begin[:] if call[name[isinstance], parameter[call[name[experiment_dict]][name[key]], name[dict]]] begin[:] if compare[call[call[name[experiment_dict]][name[key]].get, parameter[constant[status]]] not_equal[!=] constant[STOPPED]] begin[:] variable[nni_config] assign[=] call[name[Config], parameter[call[call[name[experiment_dict]][name[key]]][constant[fileName]]]] variable[rest_pid] assign[=] call[name[nni_config].get_config, parameter[constant[restServerPid]]] if <ast.UnaryOp object at 0x7da20cabee90> begin[:] call[name[experiment_config].update_experiment, parameter[name[key], constant[status], constant[STOPPED]]] continue variable[rest_port] assign[=] call[name[nni_config].get_config, parameter[constant[restServerPort]]] <ast.Tuple object at 0x7da20cabfc70> assign[=] call[name[get_experiment_time], parameter[name[rest_port]]] if name[startTime] begin[:] call[name[experiment_config].update_experiment, parameter[name[key], constant[startTime], name[startTime]]] if name[endTime] begin[:] call[name[experiment_config].update_experiment, parameter[name[key], constant[endTime], name[endTime]]] variable[status] assign[=] call[name[get_experiment_status], parameter[name[rest_port]]] if name[status] begin[:] call[name[experiment_config].update_experiment, parameter[name[key], constant[status], name[status]]]
keyword[def] identifier[update_experiment] (): literal[string] identifier[experiment_config] = identifier[Experiments] () identifier[experiment_dict] = identifier[experiment_config] . identifier[get_all_experiments] () keyword[if] keyword[not] identifier[experiment_dict] : keyword[return] keyword[None] keyword[for] identifier[key] keyword[in] identifier[experiment_dict] . identifier[keys] (): keyword[if] identifier[isinstance] ( identifier[experiment_dict] [ identifier[key] ], identifier[dict] ): keyword[if] identifier[experiment_dict] [ identifier[key] ]. identifier[get] ( literal[string] )!= literal[string] : identifier[nni_config] = identifier[Config] ( identifier[experiment_dict] [ identifier[key] ][ literal[string] ]) identifier[rest_pid] = identifier[nni_config] . identifier[get_config] ( literal[string] ) keyword[if] keyword[not] identifier[detect_process] ( identifier[rest_pid] ): identifier[experiment_config] . identifier[update_experiment] ( identifier[key] , literal[string] , literal[string] ) keyword[continue] identifier[rest_port] = identifier[nni_config] . identifier[get_config] ( literal[string] ) identifier[startTime] , identifier[endTime] = identifier[get_experiment_time] ( identifier[rest_port] ) keyword[if] identifier[startTime] : identifier[experiment_config] . identifier[update_experiment] ( identifier[key] , literal[string] , identifier[startTime] ) keyword[if] identifier[endTime] : identifier[experiment_config] . identifier[update_experiment] ( identifier[key] , literal[string] , identifier[endTime] ) identifier[status] = identifier[get_experiment_status] ( identifier[rest_port] ) keyword[if] identifier[status] : identifier[experiment_config] . identifier[update_experiment] ( identifier[key] , literal[string] , identifier[status] )
def update_experiment(): """Update the experiment status in config file""" experiment_config = Experiments() experiment_dict = experiment_config.get_all_experiments() if not experiment_dict: return None # depends on [control=['if'], data=[]] for key in experiment_dict.keys(): if isinstance(experiment_dict[key], dict): if experiment_dict[key].get('status') != 'STOPPED': nni_config = Config(experiment_dict[key]['fileName']) rest_pid = nni_config.get_config('restServerPid') if not detect_process(rest_pid): experiment_config.update_experiment(key, 'status', 'STOPPED') continue # depends on [control=['if'], data=[]] rest_port = nni_config.get_config('restServerPort') (startTime, endTime) = get_experiment_time(rest_port) if startTime: experiment_config.update_experiment(key, 'startTime', startTime) # depends on [control=['if'], data=[]] if endTime: experiment_config.update_experiment(key, 'endTime', endTime) # depends on [control=['if'], data=[]] status = get_experiment_status(rest_port) if status: experiment_config.update_experiment(key, 'status', status) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
def ColorfullyWriteLine(log: str, consoleColor: int = -1, writeToFile: bool = True, printToStdout: bool = True, logFile: str = None) -> None: """ log: str. consoleColor: int, a value in class `ConsoleColor`, such as `ConsoleColor.DarkGreen`. writeToFile: bool. printToStdout: bool. logFile: str, log file path. ColorfullyWriteLine('Hello <Color=Green>Green</Color> !!!'), color name must be in Logger.ColorNames. """ Logger.ColorfullyWrite(log + '\n', consoleColor, writeToFile, printToStdout, logFile)
def function[ColorfullyWriteLine, parameter[log, consoleColor, writeToFile, printToStdout, logFile]]: constant[ log: str. consoleColor: int, a value in class `ConsoleColor`, such as `ConsoleColor.DarkGreen`. writeToFile: bool. printToStdout: bool. logFile: str, log file path. ColorfullyWriteLine('Hello <Color=Green>Green</Color> !!!'), color name must be in Logger.ColorNames. ] call[name[Logger].ColorfullyWrite, parameter[binary_operation[name[log] + constant[ ]], name[consoleColor], name[writeToFile], name[printToStdout], name[logFile]]]
keyword[def] identifier[ColorfullyWriteLine] ( identifier[log] : identifier[str] , identifier[consoleColor] : identifier[int] =- literal[int] , identifier[writeToFile] : identifier[bool] = keyword[True] , identifier[printToStdout] : identifier[bool] = keyword[True] , identifier[logFile] : identifier[str] = keyword[None] )-> keyword[None] : literal[string] identifier[Logger] . identifier[ColorfullyWrite] ( identifier[log] + literal[string] , identifier[consoleColor] , identifier[writeToFile] , identifier[printToStdout] , identifier[logFile] )
def ColorfullyWriteLine(log: str, consoleColor: int=-1, writeToFile: bool=True, printToStdout: bool=True, logFile: str=None) -> None: """ log: str. consoleColor: int, a value in class `ConsoleColor`, such as `ConsoleColor.DarkGreen`. writeToFile: bool. printToStdout: bool. logFile: str, log file path. ColorfullyWriteLine('Hello <Color=Green>Green</Color> !!!'), color name must be in Logger.ColorNames. """ Logger.ColorfullyWrite(log + '\n', consoleColor, writeToFile, printToStdout, logFile)
def add_context(self, isc_name, isc_policy_id, isc_traffic_tag): """ Create the VSS Context within the VSSContainer :param str isc_name: ISC name, possibly append policy name?? :param str isc_policy_id: Policy ID in SMC (the 'key' attribute) :param str isc_traffic_tag: NSX groupId (serviceprofile-145) :raises CreateElementFailed: failed to create :return: VSSContext """ if 'add_context' in self.data.links: # SMC >=6.5 element = ElementCreator( VSSContext, href=self.get_relation('add_context'), json = { 'name': isc_name, 'vc_isc': { 'isc_name': isc_name, 'isc_policy_id': isc_policy_id, 'isc_traffic_tag': isc_traffic_tag } }) else: # SMC < 6.5 element = VSSContext.create( isc_name=isc_name, isc_policy_id=isc_policy_id, isc_traffic_tag=isc_traffic_tag, vss_container=self) # Delete cache since the virtualResources node is attached to # the engine json self._del_cache() return element
def function[add_context, parameter[self, isc_name, isc_policy_id, isc_traffic_tag]]: constant[ Create the VSS Context within the VSSContainer :param str isc_name: ISC name, possibly append policy name?? :param str isc_policy_id: Policy ID in SMC (the 'key' attribute) :param str isc_traffic_tag: NSX groupId (serviceprofile-145) :raises CreateElementFailed: failed to create :return: VSSContext ] if compare[constant[add_context] in name[self].data.links] begin[:] variable[element] assign[=] call[name[ElementCreator], parameter[name[VSSContext]]] call[name[self]._del_cache, parameter[]] return[name[element]]
keyword[def] identifier[add_context] ( identifier[self] , identifier[isc_name] , identifier[isc_policy_id] , identifier[isc_traffic_tag] ): literal[string] keyword[if] literal[string] keyword[in] identifier[self] . identifier[data] . identifier[links] : identifier[element] = identifier[ElementCreator] ( identifier[VSSContext] , identifier[href] = identifier[self] . identifier[get_relation] ( literal[string] ), identifier[json] ={ literal[string] : identifier[isc_name] , literal[string] :{ literal[string] : identifier[isc_name] , literal[string] : identifier[isc_policy_id] , literal[string] : identifier[isc_traffic_tag] } }) keyword[else] : identifier[element] = identifier[VSSContext] . identifier[create] ( identifier[isc_name] = identifier[isc_name] , identifier[isc_policy_id] = identifier[isc_policy_id] , identifier[isc_traffic_tag] = identifier[isc_traffic_tag] , identifier[vss_container] = identifier[self] ) identifier[self] . identifier[_del_cache] () keyword[return] identifier[element]
def add_context(self, isc_name, isc_policy_id, isc_traffic_tag): """ Create the VSS Context within the VSSContainer :param str isc_name: ISC name, possibly append policy name?? :param str isc_policy_id: Policy ID in SMC (the 'key' attribute) :param str isc_traffic_tag: NSX groupId (serviceprofile-145) :raises CreateElementFailed: failed to create :return: VSSContext """ if 'add_context' in self.data.links: # SMC >=6.5 element = ElementCreator(VSSContext, href=self.get_relation('add_context'), json={'name': isc_name, 'vc_isc': {'isc_name': isc_name, 'isc_policy_id': isc_policy_id, 'isc_traffic_tag': isc_traffic_tag}}) # depends on [control=['if'], data=[]] else: # SMC < 6.5 element = VSSContext.create(isc_name=isc_name, isc_policy_id=isc_policy_id, isc_traffic_tag=isc_traffic_tag, vss_container=self) # Delete cache since the virtualResources node is attached to # the engine json self._del_cache() return element
def where(cls, condition, istrue, isfalse, dtype=None): """ Selects elements from either istrue or isfalse depending on the value of the condition SArray. Parameters ---------- condition : SArray An SArray of values such that for each value, if non-zero, yields a value from istrue, otherwise from isfalse. istrue : SArray or constant The elements selected if condition is true. If istrue is an SArray, this must be of the same length as condition. isfalse : SArray or constant The elements selected if condition is false. If istrue is an SArray, this must be of the same length as condition. dtype : type The type of result SArray. This is required if both istrue and isfalse are constants of ambiguous types. Examples -------- Returns an SArray with the same values as g with values above 10 clipped to 10 >>> g = SArray([6,7,8,9,10,11,12,13]) >>> SArray.where(g > 10, 10, g) dtype: int Rows: 8 [6, 7, 8, 9, 10, 10, 10, 10] Returns an SArray with the same values as g with values below 10 clipped to 10 >>> SArray.where(g > 10, g, 10) dtype: int Rows: 8 [10, 10, 10, 10, 10, 11, 12, 13] Returns an SArray with the same values of g with all values == 1 replaced by None >>> g = SArray([1,2,3,4,1,2,3,4]) >>> SArray.where(g == 1, None, g) dtype: int Rows: 8 [None, 2, 3, 4, None, 2, 3, 4] Returns an SArray with the same values of g, but with each missing value replaced by its corresponding element in replace_none >>> g = SArray([1,2,None,None]) >>> replace_none = SArray([3,3,2,2]) >>> SArray.where(g != None, g, replace_none) dtype: int Rows: 4 [1, 2, 2, 2] """ true_is_sarray = isinstance(istrue, SArray) false_is_sarray = isinstance(isfalse, SArray) if not true_is_sarray and false_is_sarray: istrue = cls(_proxy=condition.__proxy__.to_const(istrue, isfalse.dtype)) if true_is_sarray and not false_is_sarray: isfalse = cls(_proxy=condition.__proxy__.to_const(isfalse, istrue.dtype)) if not true_is_sarray and not false_is_sarray: if dtype is None: if istrue is None: dtype = type(isfalse) elif isfalse is None: dtype = type(istrue) elif type(istrue) != type(isfalse): raise TypeError("true and false inputs are of different types") elif type(istrue) == type(isfalse): dtype = type(istrue) if dtype is None: raise TypeError("Both true and false are None. Resultant type cannot be inferred.") istrue = cls(_proxy=condition.__proxy__.to_const(istrue, dtype)) isfalse = cls(_proxy=condition.__proxy__.to_const(isfalse, dtype)) return cls(_proxy=condition.__proxy__.ternary_operator(istrue.__proxy__, isfalse.__proxy__))
def function[where, parameter[cls, condition, istrue, isfalse, dtype]]: constant[ Selects elements from either istrue or isfalse depending on the value of the condition SArray. Parameters ---------- condition : SArray An SArray of values such that for each value, if non-zero, yields a value from istrue, otherwise from isfalse. istrue : SArray or constant The elements selected if condition is true. If istrue is an SArray, this must be of the same length as condition. isfalse : SArray or constant The elements selected if condition is false. If istrue is an SArray, this must be of the same length as condition. dtype : type The type of result SArray. This is required if both istrue and isfalse are constants of ambiguous types. Examples -------- Returns an SArray with the same values as g with values above 10 clipped to 10 >>> g = SArray([6,7,8,9,10,11,12,13]) >>> SArray.where(g > 10, 10, g) dtype: int Rows: 8 [6, 7, 8, 9, 10, 10, 10, 10] Returns an SArray with the same values as g with values below 10 clipped to 10 >>> SArray.where(g > 10, g, 10) dtype: int Rows: 8 [10, 10, 10, 10, 10, 11, 12, 13] Returns an SArray with the same values of g with all values == 1 replaced by None >>> g = SArray([1,2,3,4,1,2,3,4]) >>> SArray.where(g == 1, None, g) dtype: int Rows: 8 [None, 2, 3, 4, None, 2, 3, 4] Returns an SArray with the same values of g, but with each missing value replaced by its corresponding element in replace_none >>> g = SArray([1,2,None,None]) >>> replace_none = SArray([3,3,2,2]) >>> SArray.where(g != None, g, replace_none) dtype: int Rows: 4 [1, 2, 2, 2] ] variable[true_is_sarray] assign[=] call[name[isinstance], parameter[name[istrue], name[SArray]]] variable[false_is_sarray] assign[=] call[name[isinstance], parameter[name[isfalse], name[SArray]]] if <ast.BoolOp object at 0x7da1b1f65210> begin[:] variable[istrue] assign[=] call[name[cls], parameter[]] if <ast.BoolOp object at 0x7da1b1f670d0> begin[:] variable[isfalse] assign[=] call[name[cls], parameter[]] if <ast.BoolOp object at 0x7da1b1f8f130> begin[:] if compare[name[dtype] is constant[None]] begin[:] if compare[name[istrue] is constant[None]] begin[:] variable[dtype] assign[=] call[name[type], parameter[name[isfalse]]] if compare[name[dtype] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1f8c0d0> variable[istrue] assign[=] call[name[cls], parameter[]] variable[isfalse] assign[=] call[name[cls], parameter[]] return[call[name[cls], parameter[]]]
keyword[def] identifier[where] ( identifier[cls] , identifier[condition] , identifier[istrue] , identifier[isfalse] , identifier[dtype] = keyword[None] ): literal[string] identifier[true_is_sarray] = identifier[isinstance] ( identifier[istrue] , identifier[SArray] ) identifier[false_is_sarray] = identifier[isinstance] ( identifier[isfalse] , identifier[SArray] ) keyword[if] keyword[not] identifier[true_is_sarray] keyword[and] identifier[false_is_sarray] : identifier[istrue] = identifier[cls] ( identifier[_proxy] = identifier[condition] . identifier[__proxy__] . identifier[to_const] ( identifier[istrue] , identifier[isfalse] . identifier[dtype] )) keyword[if] identifier[true_is_sarray] keyword[and] keyword[not] identifier[false_is_sarray] : identifier[isfalse] = identifier[cls] ( identifier[_proxy] = identifier[condition] . identifier[__proxy__] . identifier[to_const] ( identifier[isfalse] , identifier[istrue] . identifier[dtype] )) keyword[if] keyword[not] identifier[true_is_sarray] keyword[and] keyword[not] identifier[false_is_sarray] : keyword[if] identifier[dtype] keyword[is] keyword[None] : keyword[if] identifier[istrue] keyword[is] keyword[None] : identifier[dtype] = identifier[type] ( identifier[isfalse] ) keyword[elif] identifier[isfalse] keyword[is] keyword[None] : identifier[dtype] = identifier[type] ( identifier[istrue] ) keyword[elif] identifier[type] ( identifier[istrue] )!= identifier[type] ( identifier[isfalse] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[elif] identifier[type] ( identifier[istrue] )== identifier[type] ( identifier[isfalse] ): identifier[dtype] = identifier[type] ( identifier[istrue] ) keyword[if] identifier[dtype] keyword[is] keyword[None] : keyword[raise] identifier[TypeError] ( literal[string] ) identifier[istrue] = identifier[cls] ( identifier[_proxy] = identifier[condition] . identifier[__proxy__] . identifier[to_const] ( identifier[istrue] , identifier[dtype] )) identifier[isfalse] = identifier[cls] ( identifier[_proxy] = identifier[condition] . identifier[__proxy__] . identifier[to_const] ( identifier[isfalse] , identifier[dtype] )) keyword[return] identifier[cls] ( identifier[_proxy] = identifier[condition] . identifier[__proxy__] . identifier[ternary_operator] ( identifier[istrue] . identifier[__proxy__] , identifier[isfalse] . identifier[__proxy__] ))
def where(cls, condition, istrue, isfalse, dtype=None): """ Selects elements from either istrue or isfalse depending on the value of the condition SArray. Parameters ---------- condition : SArray An SArray of values such that for each value, if non-zero, yields a value from istrue, otherwise from isfalse. istrue : SArray or constant The elements selected if condition is true. If istrue is an SArray, this must be of the same length as condition. isfalse : SArray or constant The elements selected if condition is false. If istrue is an SArray, this must be of the same length as condition. dtype : type The type of result SArray. This is required if both istrue and isfalse are constants of ambiguous types. Examples -------- Returns an SArray with the same values as g with values above 10 clipped to 10 >>> g = SArray([6,7,8,9,10,11,12,13]) >>> SArray.where(g > 10, 10, g) dtype: int Rows: 8 [6, 7, 8, 9, 10, 10, 10, 10] Returns an SArray with the same values as g with values below 10 clipped to 10 >>> SArray.where(g > 10, g, 10) dtype: int Rows: 8 [10, 10, 10, 10, 10, 11, 12, 13] Returns an SArray with the same values of g with all values == 1 replaced by None >>> g = SArray([1,2,3,4,1,2,3,4]) >>> SArray.where(g == 1, None, g) dtype: int Rows: 8 [None, 2, 3, 4, None, 2, 3, 4] Returns an SArray with the same values of g, but with each missing value replaced by its corresponding element in replace_none >>> g = SArray([1,2,None,None]) >>> replace_none = SArray([3,3,2,2]) >>> SArray.where(g != None, g, replace_none) dtype: int Rows: 4 [1, 2, 2, 2] """ true_is_sarray = isinstance(istrue, SArray) false_is_sarray = isinstance(isfalse, SArray) if not true_is_sarray and false_is_sarray: istrue = cls(_proxy=condition.__proxy__.to_const(istrue, isfalse.dtype)) # depends on [control=['if'], data=[]] if true_is_sarray and (not false_is_sarray): isfalse = cls(_proxy=condition.__proxy__.to_const(isfalse, istrue.dtype)) # depends on [control=['if'], data=[]] if not true_is_sarray and (not false_is_sarray): if dtype is None: if istrue is None: dtype = type(isfalse) # depends on [control=['if'], data=[]] elif isfalse is None: dtype = type(istrue) # depends on [control=['if'], data=[]] elif type(istrue) != type(isfalse): raise TypeError('true and false inputs are of different types') # depends on [control=['if'], data=[]] elif type(istrue) == type(isfalse): dtype = type(istrue) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['dtype']] if dtype is None: raise TypeError('Both true and false are None. Resultant type cannot be inferred.') # depends on [control=['if'], data=[]] istrue = cls(_proxy=condition.__proxy__.to_const(istrue, dtype)) isfalse = cls(_proxy=condition.__proxy__.to_const(isfalse, dtype)) # depends on [control=['if'], data=[]] return cls(_proxy=condition.__proxy__.ternary_operator(istrue.__proxy__, isfalse.__proxy__))
def _get_blocks_containing_index(self, axis, index): """Convert a global index to a block index and local index. Note: This method is primarily used to convert a global index into a partition index (along the axis provided) and local index (useful for `iloc` or similar operations. Args: axis: The axis along which to get the indices (0 - columns, 1 - rows) index: The global index to convert. Returns: A tuple containing (block index and internal index). """ if not axis: ErrorMessage.catch_bugs_and_request_email(index > sum(self.block_widths)) cumulative_column_widths = np.array(self.block_widths).cumsum() block_idx = int(np.digitize(index, cumulative_column_widths)) if block_idx == len(cumulative_column_widths): block_idx -= 1 # Compute the internal index based on the previous lengths. This # is a global index, so we must subtract the lengths first. internal_idx = ( index if not block_idx else index - cumulative_column_widths[block_idx - 1] ) else: ErrorMessage.catch_bugs_and_request_email(index > sum(self.block_lengths)) cumulative_row_lengths = np.array(self.block_lengths).cumsum() block_idx = int(np.digitize(index, cumulative_row_lengths)) # See note above about internal index internal_idx = ( index if not block_idx else index - cumulative_row_lengths[block_idx - 1] ) return block_idx, internal_idx
def function[_get_blocks_containing_index, parameter[self, axis, index]]: constant[Convert a global index to a block index and local index. Note: This method is primarily used to convert a global index into a partition index (along the axis provided) and local index (useful for `iloc` or similar operations. Args: axis: The axis along which to get the indices (0 - columns, 1 - rows) index: The global index to convert. Returns: A tuple containing (block index and internal index). ] if <ast.UnaryOp object at 0x7da20e74ad40> begin[:] call[name[ErrorMessage].catch_bugs_and_request_email, parameter[compare[name[index] greater[>] call[name[sum], parameter[name[self].block_widths]]]]] variable[cumulative_column_widths] assign[=] call[call[name[np].array, parameter[name[self].block_widths]].cumsum, parameter[]] variable[block_idx] assign[=] call[name[int], parameter[call[name[np].digitize, parameter[name[index], name[cumulative_column_widths]]]]] if compare[name[block_idx] equal[==] call[name[len], parameter[name[cumulative_column_widths]]]] begin[:] <ast.AugAssign object at 0x7da18f723340> variable[internal_idx] assign[=] <ast.IfExp object at 0x7da18f720400> return[tuple[[<ast.Name object at 0x7da18f721000>, <ast.Name object at 0x7da18f722cb0>]]]
keyword[def] identifier[_get_blocks_containing_index] ( identifier[self] , identifier[axis] , identifier[index] ): literal[string] keyword[if] keyword[not] identifier[axis] : identifier[ErrorMessage] . identifier[catch_bugs_and_request_email] ( identifier[index] > identifier[sum] ( identifier[self] . identifier[block_widths] )) identifier[cumulative_column_widths] = identifier[np] . identifier[array] ( identifier[self] . identifier[block_widths] ). identifier[cumsum] () identifier[block_idx] = identifier[int] ( identifier[np] . identifier[digitize] ( identifier[index] , identifier[cumulative_column_widths] )) keyword[if] identifier[block_idx] == identifier[len] ( identifier[cumulative_column_widths] ): identifier[block_idx] -= literal[int] identifier[internal_idx] =( identifier[index] keyword[if] keyword[not] identifier[block_idx] keyword[else] identifier[index] - identifier[cumulative_column_widths] [ identifier[block_idx] - literal[int] ] ) keyword[else] : identifier[ErrorMessage] . identifier[catch_bugs_and_request_email] ( identifier[index] > identifier[sum] ( identifier[self] . identifier[block_lengths] )) identifier[cumulative_row_lengths] = identifier[np] . identifier[array] ( identifier[self] . identifier[block_lengths] ). identifier[cumsum] () identifier[block_idx] = identifier[int] ( identifier[np] . identifier[digitize] ( identifier[index] , identifier[cumulative_row_lengths] )) identifier[internal_idx] =( identifier[index] keyword[if] keyword[not] identifier[block_idx] keyword[else] identifier[index] - identifier[cumulative_row_lengths] [ identifier[block_idx] - literal[int] ] ) keyword[return] identifier[block_idx] , identifier[internal_idx]
def _get_blocks_containing_index(self, axis, index): """Convert a global index to a block index and local index. Note: This method is primarily used to convert a global index into a partition index (along the axis provided) and local index (useful for `iloc` or similar operations. Args: axis: The axis along which to get the indices (0 - columns, 1 - rows) index: The global index to convert. Returns: A tuple containing (block index and internal index). """ if not axis: ErrorMessage.catch_bugs_and_request_email(index > sum(self.block_widths)) cumulative_column_widths = np.array(self.block_widths).cumsum() block_idx = int(np.digitize(index, cumulative_column_widths)) if block_idx == len(cumulative_column_widths): block_idx -= 1 # depends on [control=['if'], data=['block_idx']] # Compute the internal index based on the previous lengths. This # is a global index, so we must subtract the lengths first. internal_idx = index if not block_idx else index - cumulative_column_widths[block_idx - 1] # depends on [control=['if'], data=[]] else: ErrorMessage.catch_bugs_and_request_email(index > sum(self.block_lengths)) cumulative_row_lengths = np.array(self.block_lengths).cumsum() block_idx = int(np.digitize(index, cumulative_row_lengths)) # See note above about internal index internal_idx = index if not block_idx else index - cumulative_row_lengths[block_idx - 1] return (block_idx, internal_idx)
def snyder_opt(self, structure): """ Calculates Snyder's optical sound velocity (in SI units) Args: structure: pymatgen structure object Returns: Snyder's optical sound velocity (in SI units) """ nsites = structure.num_sites volume = structure.volume num_density = 1e30 * nsites / volume return 1.66914e-23 * \ (self.long_v(structure) + 2.*self.trans_v(structure))/3. \ / num_density ** (-2./3.) * (1 - nsites ** (-1./3.))
def function[snyder_opt, parameter[self, structure]]: constant[ Calculates Snyder's optical sound velocity (in SI units) Args: structure: pymatgen structure object Returns: Snyder's optical sound velocity (in SI units) ] variable[nsites] assign[=] name[structure].num_sites variable[volume] assign[=] name[structure].volume variable[num_density] assign[=] binary_operation[binary_operation[constant[1e+30] * name[nsites]] / name[volume]] return[binary_operation[binary_operation[binary_operation[binary_operation[constant[1.66914e-23] * binary_operation[call[name[self].long_v, parameter[name[structure]]] + binary_operation[constant[2.0] * call[name[self].trans_v, parameter[name[structure]]]]]] / constant[3.0]] / binary_operation[name[num_density] ** binary_operation[<ast.UnaryOp object at 0x7da18f721630> / constant[3.0]]]] * binary_operation[constant[1] - binary_operation[name[nsites] ** binary_operation[<ast.UnaryOp object at 0x7da18f720cd0> / constant[3.0]]]]]]
keyword[def] identifier[snyder_opt] ( identifier[self] , identifier[structure] ): literal[string] identifier[nsites] = identifier[structure] . identifier[num_sites] identifier[volume] = identifier[structure] . identifier[volume] identifier[num_density] = literal[int] * identifier[nsites] / identifier[volume] keyword[return] literal[int] *( identifier[self] . identifier[long_v] ( identifier[structure] )+ literal[int] * identifier[self] . identifier[trans_v] ( identifier[structure] ))/ literal[int] / identifier[num_density] **(- literal[int] / literal[int] )*( literal[int] - identifier[nsites] **(- literal[int] / literal[int] ))
def snyder_opt(self, structure): """ Calculates Snyder's optical sound velocity (in SI units) Args: structure: pymatgen structure object Returns: Snyder's optical sound velocity (in SI units) """ nsites = structure.num_sites volume = structure.volume num_density = 1e+30 * nsites / volume return 1.66914e-23 * (self.long_v(structure) + 2.0 * self.trans_v(structure)) / 3.0 / num_density ** (-2.0 / 3.0) * (1 - nsites ** (-1.0 / 3.0))
def list_local_files(self): """Returns list of all stored local files. Each element of this list is of :class:`DataStore.FileInfoEntry` type. """ result = [] if self.local_store: result.extend(self.local_store.list_files()) return result
def function[list_local_files, parameter[self]]: constant[Returns list of all stored local files. Each element of this list is of :class:`DataStore.FileInfoEntry` type. ] variable[result] assign[=] list[[]] if name[self].local_store begin[:] call[name[result].extend, parameter[call[name[self].local_store.list_files, parameter[]]]] return[name[result]]
keyword[def] identifier[list_local_files] ( identifier[self] ): literal[string] identifier[result] =[] keyword[if] identifier[self] . identifier[local_store] : identifier[result] . identifier[extend] ( identifier[self] . identifier[local_store] . identifier[list_files] ()) keyword[return] identifier[result]
def list_local_files(self): """Returns list of all stored local files. Each element of this list is of :class:`DataStore.FileInfoEntry` type. """ result = [] if self.local_store: result.extend(self.local_store.list_files()) # depends on [control=['if'], data=[]] return result
def normalize_dictionary_values(dictionary): """ Normalizes the values in a dictionary recursivly. """ for key, val in dictionary.iteritems(): if isinstance(val, dict): dictionary[key] = normalize_dictionary_values(val) elif isinstance(val, list): dictionary[key] = list(val) else: dictionary[key] = normalize_value(val) return dictionary
def function[normalize_dictionary_values, parameter[dictionary]]: constant[ Normalizes the values in a dictionary recursivly. ] for taget[tuple[[<ast.Name object at 0x7da1b2345ab0>, <ast.Name object at 0x7da1b2345f30>]]] in starred[call[name[dictionary].iteritems, parameter[]]] begin[:] if call[name[isinstance], parameter[name[val], name[dict]]] begin[:] call[name[dictionary]][name[key]] assign[=] call[name[normalize_dictionary_values], parameter[name[val]]] return[name[dictionary]]
keyword[def] identifier[normalize_dictionary_values] ( identifier[dictionary] ): literal[string] keyword[for] identifier[key] , identifier[val] keyword[in] identifier[dictionary] . identifier[iteritems] (): keyword[if] identifier[isinstance] ( identifier[val] , identifier[dict] ): identifier[dictionary] [ identifier[key] ]= identifier[normalize_dictionary_values] ( identifier[val] ) keyword[elif] identifier[isinstance] ( identifier[val] , identifier[list] ): identifier[dictionary] [ identifier[key] ]= identifier[list] ( identifier[val] ) keyword[else] : identifier[dictionary] [ identifier[key] ]= identifier[normalize_value] ( identifier[val] ) keyword[return] identifier[dictionary]
def normalize_dictionary_values(dictionary): """ Normalizes the values in a dictionary recursivly. """ for (key, val) in dictionary.iteritems(): if isinstance(val, dict): dictionary[key] = normalize_dictionary_values(val) # depends on [control=['if'], data=[]] elif isinstance(val, list): dictionary[key] = list(val) # depends on [control=['if'], data=[]] else: dictionary[key] = normalize_value(val) # depends on [control=['for'], data=[]] return dictionary
def analyze_feature(raw_datasets, feature, basename="aspect_ratios"): """ Apply ``feature`` to all recordings in ``raw_datasets``. Store the results in two files. One file stores the raw result, the other one groups the results by symbols and stores the mean, standard deviation and the name of the symbol as a csv file. Parameters ---------- raw_datasets : List of dictionaries Each dictionary is a raw_dataset. feature : An instance of the feature class type The `feature` which gets analyzed on `raw_datasets`. basename : string Name for the file in which the data gets written. """ # Prepare files csv_file = dam.prepare_file(basename + '.csv') raw_file = dam.prepare_file(basename + '.raw') csv_file = open(csv_file, 'a') raw_file = open(raw_file, 'a') csv_file.write("label,mean,std\n") # Write header raw_file.write("latex,raw_data_id,value\n") # Write header print_data = [] for _, datasets in dam.sort_by_formula_id(raw_datasets).items(): values = [] for data in datasets: value = feature(data)[0] values.append(value) raw_file.write("%s,%i,%0.2f\n" % (datasets[0].formula_in_latex, data.raw_data_id, value)) label = filter_label(datasets[0].formula_in_latex) print_data.append((label, numpy.mean(values), numpy.std(values))) # Sort the data by highest mean, descending print_data = sorted(print_data, key=lambda n: n[1], reverse=True) # Write data to file for label, mean, std in print_data: csv_file.write("%s,%0.2f,%0.2f\n" % (label, mean, std)) csv_file.close()
def function[analyze_feature, parameter[raw_datasets, feature, basename]]: constant[ Apply ``feature`` to all recordings in ``raw_datasets``. Store the results in two files. One file stores the raw result, the other one groups the results by symbols and stores the mean, standard deviation and the name of the symbol as a csv file. Parameters ---------- raw_datasets : List of dictionaries Each dictionary is a raw_dataset. feature : An instance of the feature class type The `feature` which gets analyzed on `raw_datasets`. basename : string Name for the file in which the data gets written. ] variable[csv_file] assign[=] call[name[dam].prepare_file, parameter[binary_operation[name[basename] + constant[.csv]]]] variable[raw_file] assign[=] call[name[dam].prepare_file, parameter[binary_operation[name[basename] + constant[.raw]]]] variable[csv_file] assign[=] call[name[open], parameter[name[csv_file], constant[a]]] variable[raw_file] assign[=] call[name[open], parameter[name[raw_file], constant[a]]] call[name[csv_file].write, parameter[constant[label,mean,std ]]] call[name[raw_file].write, parameter[constant[latex,raw_data_id,value ]]] variable[print_data] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b28f04f0>, <ast.Name object at 0x7da1b28f2bf0>]]] in starred[call[call[name[dam].sort_by_formula_id, parameter[name[raw_datasets]]].items, parameter[]]] begin[:] variable[values] assign[=] list[[]] for taget[name[data]] in starred[name[datasets]] begin[:] variable[value] assign[=] call[call[name[feature], parameter[name[data]]]][constant[0]] call[name[values].append, parameter[name[value]]] call[name[raw_file].write, parameter[binary_operation[constant[%s,%i,%0.2f ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b28f31c0>, <ast.Attribute object at 0x7da1b28f1cf0>, <ast.Name object at 0x7da1b28f1f60>]]]]] variable[label] assign[=] call[name[filter_label], parameter[call[name[datasets]][constant[0]].formula_in_latex]] call[name[print_data].append, parameter[tuple[[<ast.Name object at 0x7da1b28f1360>, <ast.Call object at 0x7da1b28f0130>, <ast.Call object at 0x7da1b28ae2c0>]]]] variable[print_data] assign[=] call[name[sorted], parameter[name[print_data]]] for taget[tuple[[<ast.Name object at 0x7da1b28c67d0>, <ast.Name object at 0x7da1b28c6200>, <ast.Name object at 0x7da1b28c5fc0>]]] in starred[name[print_data]] begin[:] call[name[csv_file].write, parameter[binary_operation[constant[%s,%0.2f,%0.2f ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b28c6140>, <ast.Name object at 0x7da1b28c4df0>, <ast.Name object at 0x7da1b28c64d0>]]]]] call[name[csv_file].close, parameter[]]
keyword[def] identifier[analyze_feature] ( identifier[raw_datasets] , identifier[feature] , identifier[basename] = literal[string] ): literal[string] identifier[csv_file] = identifier[dam] . identifier[prepare_file] ( identifier[basename] + literal[string] ) identifier[raw_file] = identifier[dam] . identifier[prepare_file] ( identifier[basename] + literal[string] ) identifier[csv_file] = identifier[open] ( identifier[csv_file] , literal[string] ) identifier[raw_file] = identifier[open] ( identifier[raw_file] , literal[string] ) identifier[csv_file] . identifier[write] ( literal[string] ) identifier[raw_file] . identifier[write] ( literal[string] ) identifier[print_data] =[] keyword[for] identifier[_] , identifier[datasets] keyword[in] identifier[dam] . identifier[sort_by_formula_id] ( identifier[raw_datasets] ). identifier[items] (): identifier[values] =[] keyword[for] identifier[data] keyword[in] identifier[datasets] : identifier[value] = identifier[feature] ( identifier[data] )[ literal[int] ] identifier[values] . identifier[append] ( identifier[value] ) identifier[raw_file] . identifier[write] ( literal[string] %( identifier[datasets] [ literal[int] ]. identifier[formula_in_latex] , identifier[data] . identifier[raw_data_id] , identifier[value] )) identifier[label] = identifier[filter_label] ( identifier[datasets] [ literal[int] ]. identifier[formula_in_latex] ) identifier[print_data] . identifier[append] (( identifier[label] , identifier[numpy] . identifier[mean] ( identifier[values] ), identifier[numpy] . identifier[std] ( identifier[values] ))) identifier[print_data] = identifier[sorted] ( identifier[print_data] , identifier[key] = keyword[lambda] identifier[n] : identifier[n] [ literal[int] ], identifier[reverse] = keyword[True] ) keyword[for] identifier[label] , identifier[mean] , identifier[std] keyword[in] identifier[print_data] : identifier[csv_file] . identifier[write] ( literal[string] %( identifier[label] , identifier[mean] , identifier[std] )) identifier[csv_file] . identifier[close] ()
def analyze_feature(raw_datasets, feature, basename='aspect_ratios'): """ Apply ``feature`` to all recordings in ``raw_datasets``. Store the results in two files. One file stores the raw result, the other one groups the results by symbols and stores the mean, standard deviation and the name of the symbol as a csv file. Parameters ---------- raw_datasets : List of dictionaries Each dictionary is a raw_dataset. feature : An instance of the feature class type The `feature` which gets analyzed on `raw_datasets`. basename : string Name for the file in which the data gets written. """ # Prepare files csv_file = dam.prepare_file(basename + '.csv') raw_file = dam.prepare_file(basename + '.raw') csv_file = open(csv_file, 'a') raw_file = open(raw_file, 'a') csv_file.write('label,mean,std\n') # Write header raw_file.write('latex,raw_data_id,value\n') # Write header print_data = [] for (_, datasets) in dam.sort_by_formula_id(raw_datasets).items(): values = [] for data in datasets: value = feature(data)[0] values.append(value) raw_file.write('%s,%i,%0.2f\n' % (datasets[0].formula_in_latex, data.raw_data_id, value)) # depends on [control=['for'], data=['data']] label = filter_label(datasets[0].formula_in_latex) print_data.append((label, numpy.mean(values), numpy.std(values))) # depends on [control=['for'], data=[]] # Sort the data by highest mean, descending print_data = sorted(print_data, key=lambda n: n[1], reverse=True) # Write data to file for (label, mean, std) in print_data: csv_file.write('%s,%0.2f,%0.2f\n' % (label, mean, std)) # depends on [control=['for'], data=[]] csv_file.close()
def add(self, items): '''add a submenu''' if not isinstance(items, list): items = [items] for m in items: updated = False for i in range(len(self.items)): if self.items[i].name == m.name: self.items[i] = m updated = True if not updated: self.items.append(m)
def function[add, parameter[self, items]]: constant[add a submenu] if <ast.UnaryOp object at 0x7da1b17de1d0> begin[:] variable[items] assign[=] list[[<ast.Name object at 0x7da1b17dd870>]] for taget[name[m]] in starred[name[items]] begin[:] variable[updated] assign[=] constant[False] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].items]]]]] begin[:] if compare[call[name[self].items][name[i]].name equal[==] name[m].name] begin[:] call[name[self].items][name[i]] assign[=] name[m] variable[updated] assign[=] constant[True] if <ast.UnaryOp object at 0x7da1b17dde70> begin[:] call[name[self].items.append, parameter[name[m]]]
keyword[def] identifier[add] ( identifier[self] , identifier[items] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[items] , identifier[list] ): identifier[items] =[ identifier[items] ] keyword[for] identifier[m] keyword[in] identifier[items] : identifier[updated] = keyword[False] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[items] )): keyword[if] identifier[self] . identifier[items] [ identifier[i] ]. identifier[name] == identifier[m] . identifier[name] : identifier[self] . identifier[items] [ identifier[i] ]= identifier[m] identifier[updated] = keyword[True] keyword[if] keyword[not] identifier[updated] : identifier[self] . identifier[items] . identifier[append] ( identifier[m] )
def add(self, items): """add a submenu""" if not isinstance(items, list): items = [items] # depends on [control=['if'], data=[]] for m in items: updated = False for i in range(len(self.items)): if self.items[i].name == m.name: self.items[i] = m updated = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] if not updated: self.items.append(m) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']]
def RIBVRFRouteLimitExceeded_originator_switch_info_switchIdentifier(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") RIBVRFRouteLimitExceeded = ET.SubElement(config, "RIBVRFRouteLimitExceeded", xmlns="http://brocade.com/ns/brocade-notification-stream") originator_switch_info = ET.SubElement(RIBVRFRouteLimitExceeded, "originator-switch-info") switchIdentifier = ET.SubElement(originator_switch_info, "switchIdentifier") switchIdentifier.text = kwargs.pop('switchIdentifier') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[RIBVRFRouteLimitExceeded_originator_switch_info_switchIdentifier, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[RIBVRFRouteLimitExceeded] assign[=] call[name[ET].SubElement, parameter[name[config], constant[RIBVRFRouteLimitExceeded]]] variable[originator_switch_info] assign[=] call[name[ET].SubElement, parameter[name[RIBVRFRouteLimitExceeded], constant[originator-switch-info]]] variable[switchIdentifier] assign[=] call[name[ET].SubElement, parameter[name[originator_switch_info], constant[switchIdentifier]]] name[switchIdentifier].text assign[=] call[name[kwargs].pop, parameter[constant[switchIdentifier]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[RIBVRFRouteLimitExceeded_originator_switch_info_switchIdentifier] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[RIBVRFRouteLimitExceeded] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[originator_switch_info] = identifier[ET] . identifier[SubElement] ( identifier[RIBVRFRouteLimitExceeded] , literal[string] ) identifier[switchIdentifier] = identifier[ET] . identifier[SubElement] ( identifier[originator_switch_info] , literal[string] ) identifier[switchIdentifier] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def RIBVRFRouteLimitExceeded_originator_switch_info_switchIdentifier(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') RIBVRFRouteLimitExceeded = ET.SubElement(config, 'RIBVRFRouteLimitExceeded', xmlns='http://brocade.com/ns/brocade-notification-stream') originator_switch_info = ET.SubElement(RIBVRFRouteLimitExceeded, 'originator-switch-info') switchIdentifier = ET.SubElement(originator_switch_info, 'switchIdentifier') switchIdentifier.text = kwargs.pop('switchIdentifier') callback = kwargs.pop('callback', self._callback) return callback(config)
def update_soil_moisture(self): """ Method will select the ADC module, turn on the analog sensor, wait for voltage settle, and then digitize the sensor voltage. Voltage division/signal loss is accounted for by scaling up the sensor output. This may need to be adjusted if a different sensor is used """ SensorCluster.analog_sensor_power(SensorCluster.bus, "on") # turn on sensor sleep(.2) TCA_select(SensorCluster.bus, self.mux_addr, SensorCluster.adc_chan) moisture = get_ADC_value( SensorCluster.bus, SensorCluster.adc_addr, SensorCluster.moisture_chan) status = TCA_select(SensorCluster.bus, self.mux_addr, "off") # Turn off mux. SensorCluster.analog_sensor_power(SensorCluster.bus, "off") # turn off sensor if (moisture >= 0): soil_moisture = moisture/2.048 # Scale to a percentage value self.soil_moisture = round(soil_moisture,3) else: raise SensorError( "The soil moisture meter is not configured correctly.") return status
def function[update_soil_moisture, parameter[self]]: constant[ Method will select the ADC module, turn on the analog sensor, wait for voltage settle, and then digitize the sensor voltage. Voltage division/signal loss is accounted for by scaling up the sensor output. This may need to be adjusted if a different sensor is used ] call[name[SensorCluster].analog_sensor_power, parameter[name[SensorCluster].bus, constant[on]]] call[name[sleep], parameter[constant[0.2]]] call[name[TCA_select], parameter[name[SensorCluster].bus, name[self].mux_addr, name[SensorCluster].adc_chan]] variable[moisture] assign[=] call[name[get_ADC_value], parameter[name[SensorCluster].bus, name[SensorCluster].adc_addr, name[SensorCluster].moisture_chan]] variable[status] assign[=] call[name[TCA_select], parameter[name[SensorCluster].bus, name[self].mux_addr, constant[off]]] call[name[SensorCluster].analog_sensor_power, parameter[name[SensorCluster].bus, constant[off]]] if compare[name[moisture] greater_or_equal[>=] constant[0]] begin[:] variable[soil_moisture] assign[=] binary_operation[name[moisture] / constant[2.048]] name[self].soil_moisture assign[=] call[name[round], parameter[name[soil_moisture], constant[3]]] return[name[status]]
keyword[def] identifier[update_soil_moisture] ( identifier[self] ): literal[string] identifier[SensorCluster] . identifier[analog_sensor_power] ( identifier[SensorCluster] . identifier[bus] , literal[string] ) identifier[sleep] ( literal[int] ) identifier[TCA_select] ( identifier[SensorCluster] . identifier[bus] , identifier[self] . identifier[mux_addr] , identifier[SensorCluster] . identifier[adc_chan] ) identifier[moisture] = identifier[get_ADC_value] ( identifier[SensorCluster] . identifier[bus] , identifier[SensorCluster] . identifier[adc_addr] , identifier[SensorCluster] . identifier[moisture_chan] ) identifier[status] = identifier[TCA_select] ( identifier[SensorCluster] . identifier[bus] , identifier[self] . identifier[mux_addr] , literal[string] ) identifier[SensorCluster] . identifier[analog_sensor_power] ( identifier[SensorCluster] . identifier[bus] , literal[string] ) keyword[if] ( identifier[moisture] >= literal[int] ): identifier[soil_moisture] = identifier[moisture] / literal[int] identifier[self] . identifier[soil_moisture] = identifier[round] ( identifier[soil_moisture] , literal[int] ) keyword[else] : keyword[raise] identifier[SensorError] ( literal[string] ) keyword[return] identifier[status]
def update_soil_moisture(self): """ Method will select the ADC module, turn on the analog sensor, wait for voltage settle, and then digitize the sensor voltage. Voltage division/signal loss is accounted for by scaling up the sensor output. This may need to be adjusted if a different sensor is used """ SensorCluster.analog_sensor_power(SensorCluster.bus, 'on') # turn on sensor sleep(0.2) TCA_select(SensorCluster.bus, self.mux_addr, SensorCluster.adc_chan) moisture = get_ADC_value(SensorCluster.bus, SensorCluster.adc_addr, SensorCluster.moisture_chan) status = TCA_select(SensorCluster.bus, self.mux_addr, 'off') # Turn off mux. SensorCluster.analog_sensor_power(SensorCluster.bus, 'off') # turn off sensor if moisture >= 0: soil_moisture = moisture / 2.048 # Scale to a percentage value self.soil_moisture = round(soil_moisture, 3) # depends on [control=['if'], data=['moisture']] else: raise SensorError('The soil moisture meter is not configured correctly.') return status
def rewrite_kwargs(conn_type, kwargs, module_name=None): """ Manipulate connection keywords. Modifieds keywords based on connection type. There is an assumption here that the client has already been created and that these keywords are being passed into methods for interacting with various services. Current modifications: - if conn_type is not cloud and module is 'compute', then rewrite project as name. - if conn_type is cloud and module is 'storage', then remove 'project' from dict. :param conn_type: E.g. 'cloud' or 'general' :type conn_type: ``str`` :param kwargs: Dictionary of keywords sent in by user. :type kwargs: ``dict`` :param module_name: Name of specific module that will be loaded. Default is None. :type conn_type: ``str`` or None :returns kwargs with client and module specific changes :rtype: ``dict`` """ if conn_type != 'cloud' and module_name != 'compute': if 'project' in kwargs: kwargs['name'] = 'projects/%s' % kwargs.pop('project') if conn_type == 'cloud' and module_name == 'storage': if 'project' in kwargs: del kwargs['project'] return kwargs
def function[rewrite_kwargs, parameter[conn_type, kwargs, module_name]]: constant[ Manipulate connection keywords. Modifieds keywords based on connection type. There is an assumption here that the client has already been created and that these keywords are being passed into methods for interacting with various services. Current modifications: - if conn_type is not cloud and module is 'compute', then rewrite project as name. - if conn_type is cloud and module is 'storage', then remove 'project' from dict. :param conn_type: E.g. 'cloud' or 'general' :type conn_type: ``str`` :param kwargs: Dictionary of keywords sent in by user. :type kwargs: ``dict`` :param module_name: Name of specific module that will be loaded. Default is None. :type conn_type: ``str`` or None :returns kwargs with client and module specific changes :rtype: ``dict`` ] if <ast.BoolOp object at 0x7da1b0193310> begin[:] if compare[constant[project] in name[kwargs]] begin[:] call[name[kwargs]][constant[name]] assign[=] binary_operation[constant[projects/%s] <ast.Mod object at 0x7da2590d6920> call[name[kwargs].pop, parameter[constant[project]]]] if <ast.BoolOp object at 0x7da1b0150550> begin[:] if compare[constant[project] in name[kwargs]] begin[:] <ast.Delete object at 0x7da1b0153f40> return[name[kwargs]]
keyword[def] identifier[rewrite_kwargs] ( identifier[conn_type] , identifier[kwargs] , identifier[module_name] = keyword[None] ): literal[string] keyword[if] identifier[conn_type] != literal[string] keyword[and] identifier[module_name] != literal[string] : keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[kwargs] [ literal[string] ]= literal[string] % identifier[kwargs] . identifier[pop] ( literal[string] ) keyword[if] identifier[conn_type] == literal[string] keyword[and] identifier[module_name] == literal[string] : keyword[if] literal[string] keyword[in] identifier[kwargs] : keyword[del] identifier[kwargs] [ literal[string] ] keyword[return] identifier[kwargs]
def rewrite_kwargs(conn_type, kwargs, module_name=None): """ Manipulate connection keywords. Modifieds keywords based on connection type. There is an assumption here that the client has already been created and that these keywords are being passed into methods for interacting with various services. Current modifications: - if conn_type is not cloud and module is 'compute', then rewrite project as name. - if conn_type is cloud and module is 'storage', then remove 'project' from dict. :param conn_type: E.g. 'cloud' or 'general' :type conn_type: ``str`` :param kwargs: Dictionary of keywords sent in by user. :type kwargs: ``dict`` :param module_name: Name of specific module that will be loaded. Default is None. :type conn_type: ``str`` or None :returns kwargs with client and module specific changes :rtype: ``dict`` """ if conn_type != 'cloud' and module_name != 'compute': if 'project' in kwargs: kwargs['name'] = 'projects/%s' % kwargs.pop('project') # depends on [control=['if'], data=['kwargs']] # depends on [control=['if'], data=[]] if conn_type == 'cloud' and module_name == 'storage': if 'project' in kwargs: del kwargs['project'] # depends on [control=['if'], data=['kwargs']] # depends on [control=['if'], data=[]] return kwargs
def delete(self, request, *args, **kwargs): """ Delete the user's avatar. We set `user.avatar = None` instead of calling `user.avatar.delete()` to avoid test errors with `django.inmemorystorage`. """ user = self.get_object() user.avatar = None user.save() return response.Response(status=HTTP_204_NO_CONTENT)
def function[delete, parameter[self, request]]: constant[ Delete the user's avatar. We set `user.avatar = None` instead of calling `user.avatar.delete()` to avoid test errors with `django.inmemorystorage`. ] variable[user] assign[=] call[name[self].get_object, parameter[]] name[user].avatar assign[=] constant[None] call[name[user].save, parameter[]] return[call[name[response].Response, parameter[]]]
keyword[def] identifier[delete] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[user] = identifier[self] . identifier[get_object] () identifier[user] . identifier[avatar] = keyword[None] identifier[user] . identifier[save] () keyword[return] identifier[response] . identifier[Response] ( identifier[status] = identifier[HTTP_204_NO_CONTENT] )
def delete(self, request, *args, **kwargs): """ Delete the user's avatar. We set `user.avatar = None` instead of calling `user.avatar.delete()` to avoid test errors with `django.inmemorystorage`. """ user = self.get_object() user.avatar = None user.save() return response.Response(status=HTTP_204_NO_CONTENT)
def get_zero_position(self): """ Returns programmed zero position in OTP memory. """ LSB = self.bus.read_byte_data(self.address, self.zero_position_MSB) MSB = self.bus.read_byte_data(self.address, self.zero_position_LSB) DATA = (MSB << 6) + LSB return DATA
def function[get_zero_position, parameter[self]]: constant[ Returns programmed zero position in OTP memory. ] variable[LSB] assign[=] call[name[self].bus.read_byte_data, parameter[name[self].address, name[self].zero_position_MSB]] variable[MSB] assign[=] call[name[self].bus.read_byte_data, parameter[name[self].address, name[self].zero_position_LSB]] variable[DATA] assign[=] binary_operation[binary_operation[name[MSB] <ast.LShift object at 0x7da2590d69e0> constant[6]] + name[LSB]] return[name[DATA]]
keyword[def] identifier[get_zero_position] ( identifier[self] ): literal[string] identifier[LSB] = identifier[self] . identifier[bus] . identifier[read_byte_data] ( identifier[self] . identifier[address] , identifier[self] . identifier[zero_position_MSB] ) identifier[MSB] = identifier[self] . identifier[bus] . identifier[read_byte_data] ( identifier[self] . identifier[address] , identifier[self] . identifier[zero_position_LSB] ) identifier[DATA] =( identifier[MSB] << literal[int] )+ identifier[LSB] keyword[return] identifier[DATA]
def get_zero_position(self): """ Returns programmed zero position in OTP memory. """ LSB = self.bus.read_byte_data(self.address, self.zero_position_MSB) MSB = self.bus.read_byte_data(self.address, self.zero_position_LSB) DATA = (MSB << 6) + LSB return DATA
def start_update(self, layer_id): """ A shortcut to create a new version and start importing it. Effectively the same as :py:meth:`koordinates.layers.LayerManager.create_draft` followed by :py:meth:`koordinates.layers.LayerManager.start_import`. """ target_url = self.client.get_url('LAYER', 'POST', 'update', {'layer_id': layer_id}) r = self.client.request('POST', target_url, json={}) return self.parent.create_from_result(r.json())
def function[start_update, parameter[self, layer_id]]: constant[ A shortcut to create a new version and start importing it. Effectively the same as :py:meth:`koordinates.layers.LayerManager.create_draft` followed by :py:meth:`koordinates.layers.LayerManager.start_import`. ] variable[target_url] assign[=] call[name[self].client.get_url, parameter[constant[LAYER], constant[POST], constant[update], dictionary[[<ast.Constant object at 0x7da1b1020a00>], [<ast.Name object at 0x7da1b10210f0>]]]] variable[r] assign[=] call[name[self].client.request, parameter[constant[POST], name[target_url]]] return[call[name[self].parent.create_from_result, parameter[call[name[r].json, parameter[]]]]]
keyword[def] identifier[start_update] ( identifier[self] , identifier[layer_id] ): literal[string] identifier[target_url] = identifier[self] . identifier[client] . identifier[get_url] ( literal[string] , literal[string] , literal[string] ,{ literal[string] : identifier[layer_id] }) identifier[r] = identifier[self] . identifier[client] . identifier[request] ( literal[string] , identifier[target_url] , identifier[json] ={}) keyword[return] identifier[self] . identifier[parent] . identifier[create_from_result] ( identifier[r] . identifier[json] ())
def start_update(self, layer_id): """ A shortcut to create a new version and start importing it. Effectively the same as :py:meth:`koordinates.layers.LayerManager.create_draft` followed by :py:meth:`koordinates.layers.LayerManager.start_import`. """ target_url = self.client.get_url('LAYER', 'POST', 'update', {'layer_id': layer_id}) r = self.client.request('POST', target_url, json={}) return self.parent.create_from_result(r.json())
def ParseMetadataFile( self, parser_mediator, file_entry, data_stream_name): """Parses a metadata file. Args: parser_mediator (ParserMediator): parser mediator. file_entry (dfvfs.FileEntry): file entry. data_stream_name (str): data stream name. """ parent_path_spec = getattr(file_entry.path_spec, 'parent', None) filename_upper = file_entry.name.upper() if (self._mft_parser and parent_path_spec and filename_upper in ('$MFT', '$MFTMIRR') and not data_stream_name): self._ParseDataStreamWithParser( parser_mediator, self._mft_parser, file_entry, '') elif (self._usnjrnl_parser and parent_path_spec and filename_upper == '$USNJRNL' and data_stream_name == '$J'): # To be able to ignore the sparse data ranges the UsnJrnl parser # needs to read directly from the volume. volume_file_object = path_spec_resolver.Resolver.OpenFileObject( parent_path_spec, resolver_context=parser_mediator.resolver_context) try: self._ParseFileEntryWithParser( parser_mediator, self._usnjrnl_parser, file_entry, file_object=volume_file_object) finally: volume_file_object.close()
def function[ParseMetadataFile, parameter[self, parser_mediator, file_entry, data_stream_name]]: constant[Parses a metadata file. Args: parser_mediator (ParserMediator): parser mediator. file_entry (dfvfs.FileEntry): file entry. data_stream_name (str): data stream name. ] variable[parent_path_spec] assign[=] call[name[getattr], parameter[name[file_entry].path_spec, constant[parent], constant[None]]] variable[filename_upper] assign[=] call[name[file_entry].name.upper, parameter[]] if <ast.BoolOp object at 0x7da20cabc550> begin[:] call[name[self]._ParseDataStreamWithParser, parameter[name[parser_mediator], name[self]._mft_parser, name[file_entry], constant[]]]
keyword[def] identifier[ParseMetadataFile] ( identifier[self] , identifier[parser_mediator] , identifier[file_entry] , identifier[data_stream_name] ): literal[string] identifier[parent_path_spec] = identifier[getattr] ( identifier[file_entry] . identifier[path_spec] , literal[string] , keyword[None] ) identifier[filename_upper] = identifier[file_entry] . identifier[name] . identifier[upper] () keyword[if] ( identifier[self] . identifier[_mft_parser] keyword[and] identifier[parent_path_spec] keyword[and] identifier[filename_upper] keyword[in] ( literal[string] , literal[string] ) keyword[and] keyword[not] identifier[data_stream_name] ): identifier[self] . identifier[_ParseDataStreamWithParser] ( identifier[parser_mediator] , identifier[self] . identifier[_mft_parser] , identifier[file_entry] , literal[string] ) keyword[elif] ( identifier[self] . identifier[_usnjrnl_parser] keyword[and] identifier[parent_path_spec] keyword[and] identifier[filename_upper] == literal[string] keyword[and] identifier[data_stream_name] == literal[string] ): identifier[volume_file_object] = identifier[path_spec_resolver] . identifier[Resolver] . identifier[OpenFileObject] ( identifier[parent_path_spec] , identifier[resolver_context] = identifier[parser_mediator] . identifier[resolver_context] ) keyword[try] : identifier[self] . identifier[_ParseFileEntryWithParser] ( identifier[parser_mediator] , identifier[self] . identifier[_usnjrnl_parser] , identifier[file_entry] , identifier[file_object] = identifier[volume_file_object] ) keyword[finally] : identifier[volume_file_object] . identifier[close] ()
def ParseMetadataFile(self, parser_mediator, file_entry, data_stream_name): """Parses a metadata file. Args: parser_mediator (ParserMediator): parser mediator. file_entry (dfvfs.FileEntry): file entry. data_stream_name (str): data stream name. """ parent_path_spec = getattr(file_entry.path_spec, 'parent', None) filename_upper = file_entry.name.upper() if self._mft_parser and parent_path_spec and (filename_upper in ('$MFT', '$MFTMIRR')) and (not data_stream_name): self._ParseDataStreamWithParser(parser_mediator, self._mft_parser, file_entry, '') # depends on [control=['if'], data=[]] elif self._usnjrnl_parser and parent_path_spec and (filename_upper == '$USNJRNL') and (data_stream_name == '$J'): # To be able to ignore the sparse data ranges the UsnJrnl parser # needs to read directly from the volume. volume_file_object = path_spec_resolver.Resolver.OpenFileObject(parent_path_spec, resolver_context=parser_mediator.resolver_context) try: self._ParseFileEntryWithParser(parser_mediator, self._usnjrnl_parser, file_entry, file_object=volume_file_object) # depends on [control=['try'], data=[]] finally: volume_file_object.close() # depends on [control=['if'], data=[]]
def extract_surface(self, pass_pointid=True, pass_cellid=True, inplace=False): """ Extract surface mesh of the grid Parameters ---------- pass_pointid : bool, optional Adds a point scalar "vtkOriginalPointIds" that idenfities which original points these surface points correspond to pass_cellid : bool, optional Adds a cell scalar "vtkOriginalPointIds" that idenfities which original cells these surface cells correspond to inplace : bool, optional Return new mesh or overwrite input. Returns ------- extsurf : vtki.PolyData Surface mesh of the grid """ surf_filter = vtk.vtkDataSetSurfaceFilter() surf_filter.SetInputData(self) if pass_pointid: surf_filter.PassThroughCellIdsOn() if pass_cellid: surf_filter.PassThroughPointIdsOn() surf_filter.Update() mesh = _get_output(surf_filter) if inplace: self.overwrite(mesh) else: return mesh
def function[extract_surface, parameter[self, pass_pointid, pass_cellid, inplace]]: constant[ Extract surface mesh of the grid Parameters ---------- pass_pointid : bool, optional Adds a point scalar "vtkOriginalPointIds" that idenfities which original points these surface points correspond to pass_cellid : bool, optional Adds a cell scalar "vtkOriginalPointIds" that idenfities which original cells these surface cells correspond to inplace : bool, optional Return new mesh or overwrite input. Returns ------- extsurf : vtki.PolyData Surface mesh of the grid ] variable[surf_filter] assign[=] call[name[vtk].vtkDataSetSurfaceFilter, parameter[]] call[name[surf_filter].SetInputData, parameter[name[self]]] if name[pass_pointid] begin[:] call[name[surf_filter].PassThroughCellIdsOn, parameter[]] if name[pass_cellid] begin[:] call[name[surf_filter].PassThroughPointIdsOn, parameter[]] call[name[surf_filter].Update, parameter[]] variable[mesh] assign[=] call[name[_get_output], parameter[name[surf_filter]]] if name[inplace] begin[:] call[name[self].overwrite, parameter[name[mesh]]]
keyword[def] identifier[extract_surface] ( identifier[self] , identifier[pass_pointid] = keyword[True] , identifier[pass_cellid] = keyword[True] , identifier[inplace] = keyword[False] ): literal[string] identifier[surf_filter] = identifier[vtk] . identifier[vtkDataSetSurfaceFilter] () identifier[surf_filter] . identifier[SetInputData] ( identifier[self] ) keyword[if] identifier[pass_pointid] : identifier[surf_filter] . identifier[PassThroughCellIdsOn] () keyword[if] identifier[pass_cellid] : identifier[surf_filter] . identifier[PassThroughPointIdsOn] () identifier[surf_filter] . identifier[Update] () identifier[mesh] = identifier[_get_output] ( identifier[surf_filter] ) keyword[if] identifier[inplace] : identifier[self] . identifier[overwrite] ( identifier[mesh] ) keyword[else] : keyword[return] identifier[mesh]
def extract_surface(self, pass_pointid=True, pass_cellid=True, inplace=False): """ Extract surface mesh of the grid Parameters ---------- pass_pointid : bool, optional Adds a point scalar "vtkOriginalPointIds" that idenfities which original points these surface points correspond to pass_cellid : bool, optional Adds a cell scalar "vtkOriginalPointIds" that idenfities which original cells these surface cells correspond to inplace : bool, optional Return new mesh or overwrite input. Returns ------- extsurf : vtki.PolyData Surface mesh of the grid """ surf_filter = vtk.vtkDataSetSurfaceFilter() surf_filter.SetInputData(self) if pass_pointid: surf_filter.PassThroughCellIdsOn() # depends on [control=['if'], data=[]] if pass_cellid: surf_filter.PassThroughPointIdsOn() # depends on [control=['if'], data=[]] surf_filter.Update() mesh = _get_output(surf_filter) if inplace: self.overwrite(mesh) # depends on [control=['if'], data=[]] else: return mesh
def decode_msg(msg, enc='utf-8'): """ Decodes a message fragment. Args: msg - A Message object representing the fragment enc - The encoding to use for decoding the message """ # We avoid the get_payload decoding machinery for raw # content-transfer-encodings potentially containing non-ascii characters, # such as 8bit or binary, as these are encoded using raw-unicode-escape which # seems to prevent subsequent utf-8 decoding. cte = str(msg.get('content-transfer-encoding', '')).lower() decode = cte not in ("8bit", "7bit", "binary") res = msg.get_payload(decode=decode) return decode_bytes(res, enc)
def function[decode_msg, parameter[msg, enc]]: constant[ Decodes a message fragment. Args: msg - A Message object representing the fragment enc - The encoding to use for decoding the message ] variable[cte] assign[=] call[call[name[str], parameter[call[name[msg].get, parameter[constant[content-transfer-encoding], constant[]]]]].lower, parameter[]] variable[decode] assign[=] compare[name[cte] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da20c6c7e50>, <ast.Constant object at 0x7da20c6c5f90>, <ast.Constant object at 0x7da20c6c4c40>]]] variable[res] assign[=] call[name[msg].get_payload, parameter[]] return[call[name[decode_bytes], parameter[name[res], name[enc]]]]
keyword[def] identifier[decode_msg] ( identifier[msg] , identifier[enc] = literal[string] ): literal[string] identifier[cte] = identifier[str] ( identifier[msg] . identifier[get] ( literal[string] , literal[string] )). identifier[lower] () identifier[decode] = identifier[cte] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] ) identifier[res] = identifier[msg] . identifier[get_payload] ( identifier[decode] = identifier[decode] ) keyword[return] identifier[decode_bytes] ( identifier[res] , identifier[enc] )
def decode_msg(msg, enc='utf-8'): """ Decodes a message fragment. Args: msg - A Message object representing the fragment enc - The encoding to use for decoding the message """ # We avoid the get_payload decoding machinery for raw # content-transfer-encodings potentially containing non-ascii characters, # such as 8bit or binary, as these are encoded using raw-unicode-escape which # seems to prevent subsequent utf-8 decoding. cte = str(msg.get('content-transfer-encoding', '')).lower() decode = cte not in ('8bit', '7bit', 'binary') res = msg.get_payload(decode=decode) return decode_bytes(res, enc)
def eventFilter( self, object, event ): """ Processes events for the menu tree. :param event | <QEvent> """ if ( not event.type() in (QEvent.DragEnter, QEvent.DragMove, QEvent.Drop) ): return False # support dragging and dropping if ( event.type() in (event.DragEnter, event.DragMove) ): data = event.mimeData() if ( data.hasFormat('application/x-actions') ): event.acceptProposedAction() return True # handle creation of new items if ( event.type() == QEvent.Drop ): data = event.mimeData() actions = nativestring(data.data('application/x-actions')) # determine the drop item pos = event.pos() pos.setY(pos.y() - 20) drop_item = self.uiMenuTREE.itemAt(pos) tree = self.uiMenuTREE for key in actions.split(','): if ( not key ): continue item = self.createActionItem(key) self.addMenuItem(item, drop_item) drop_item = item return True return False
def function[eventFilter, parameter[self, object, event]]: constant[ Processes events for the menu tree. :param event | <QEvent> ] if <ast.UnaryOp object at 0x7da1b253b310> begin[:] return[constant[False]] if compare[call[name[event].type, parameter[]] in tuple[[<ast.Attribute object at 0x7da1b253aa40>, <ast.Attribute object at 0x7da1b2538f40>]]] begin[:] variable[data] assign[=] call[name[event].mimeData, parameter[]] if call[name[data].hasFormat, parameter[constant[application/x-actions]]] begin[:] call[name[event].acceptProposedAction, parameter[]] return[constant[True]] if compare[call[name[event].type, parameter[]] equal[==] name[QEvent].Drop] begin[:] variable[data] assign[=] call[name[event].mimeData, parameter[]] variable[actions] assign[=] call[name[nativestring], parameter[call[name[data].data, parameter[constant[application/x-actions]]]]] variable[pos] assign[=] call[name[event].pos, parameter[]] call[name[pos].setY, parameter[binary_operation[call[name[pos].y, parameter[]] - constant[20]]]] variable[drop_item] assign[=] call[name[self].uiMenuTREE.itemAt, parameter[name[pos]]] variable[tree] assign[=] name[self].uiMenuTREE for taget[name[key]] in starred[call[name[actions].split, parameter[constant[,]]]] begin[:] if <ast.UnaryOp object at 0x7da18f09f070> begin[:] continue variable[item] assign[=] call[name[self].createActionItem, parameter[name[key]]] call[name[self].addMenuItem, parameter[name[item], name[drop_item]]] variable[drop_item] assign[=] name[item] return[constant[True]] return[constant[False]]
keyword[def] identifier[eventFilter] ( identifier[self] , identifier[object] , identifier[event] ): literal[string] keyword[if] ( keyword[not] identifier[event] . identifier[type] () keyword[in] ( identifier[QEvent] . identifier[DragEnter] , identifier[QEvent] . identifier[DragMove] , identifier[QEvent] . identifier[Drop] )): keyword[return] keyword[False] keyword[if] ( identifier[event] . identifier[type] () keyword[in] ( identifier[event] . identifier[DragEnter] , identifier[event] . identifier[DragMove] )): identifier[data] = identifier[event] . identifier[mimeData] () keyword[if] ( identifier[data] . identifier[hasFormat] ( literal[string] )): identifier[event] . identifier[acceptProposedAction] () keyword[return] keyword[True] keyword[if] ( identifier[event] . identifier[type] ()== identifier[QEvent] . identifier[Drop] ): identifier[data] = identifier[event] . identifier[mimeData] () identifier[actions] = identifier[nativestring] ( identifier[data] . identifier[data] ( literal[string] )) identifier[pos] = identifier[event] . identifier[pos] () identifier[pos] . identifier[setY] ( identifier[pos] . identifier[y] ()- literal[int] ) identifier[drop_item] = identifier[self] . identifier[uiMenuTREE] . identifier[itemAt] ( identifier[pos] ) identifier[tree] = identifier[self] . identifier[uiMenuTREE] keyword[for] identifier[key] keyword[in] identifier[actions] . identifier[split] ( literal[string] ): keyword[if] ( keyword[not] identifier[key] ): keyword[continue] identifier[item] = identifier[self] . identifier[createActionItem] ( identifier[key] ) identifier[self] . identifier[addMenuItem] ( identifier[item] , identifier[drop_item] ) identifier[drop_item] = identifier[item] keyword[return] keyword[True] keyword[return] keyword[False]
def eventFilter(self, object, event): """ Processes events for the menu tree. :param event | <QEvent> """ if not event.type() in (QEvent.DragEnter, QEvent.DragMove, QEvent.Drop): return False # depends on [control=['if'], data=[]] # support dragging and dropping if event.type() in (event.DragEnter, event.DragMove): data = event.mimeData() if data.hasFormat('application/x-actions'): event.acceptProposedAction() # depends on [control=['if'], data=[]] return True # depends on [control=['if'], data=[]] # handle creation of new items if event.type() == QEvent.Drop: data = event.mimeData() actions = nativestring(data.data('application/x-actions')) # determine the drop item pos = event.pos() pos.setY(pos.y() - 20) drop_item = self.uiMenuTREE.itemAt(pos) tree = self.uiMenuTREE for key in actions.split(','): if not key: continue # depends on [control=['if'], data=[]] item = self.createActionItem(key) self.addMenuItem(item, drop_item) drop_item = item # depends on [control=['for'], data=['key']] return True # depends on [control=['if'], data=[]] return False
def color_range(startcolor, goalcolor, steps): """ wrapper for interpolate_tuple that accepts colors as html ("#CCCCC" and such) """ start_tuple = make_color_tuple(startcolor) goal_tuple = make_color_tuple(goalcolor) return interpolate_tuple(start_tuple, goal_tuple, steps)
def function[color_range, parameter[startcolor, goalcolor, steps]]: constant[ wrapper for interpolate_tuple that accepts colors as html ("#CCCCC" and such) ] variable[start_tuple] assign[=] call[name[make_color_tuple], parameter[name[startcolor]]] variable[goal_tuple] assign[=] call[name[make_color_tuple], parameter[name[goalcolor]]] return[call[name[interpolate_tuple], parameter[name[start_tuple], name[goal_tuple], name[steps]]]]
keyword[def] identifier[color_range] ( identifier[startcolor] , identifier[goalcolor] , identifier[steps] ): literal[string] identifier[start_tuple] = identifier[make_color_tuple] ( identifier[startcolor] ) identifier[goal_tuple] = identifier[make_color_tuple] ( identifier[goalcolor] ) keyword[return] identifier[interpolate_tuple] ( identifier[start_tuple] , identifier[goal_tuple] , identifier[steps] )
def color_range(startcolor, goalcolor, steps): """ wrapper for interpolate_tuple that accepts colors as html ("#CCCCC" and such) """ start_tuple = make_color_tuple(startcolor) goal_tuple = make_color_tuple(goalcolor) return interpolate_tuple(start_tuple, goal_tuple, steps)
def maf(genotypes): """Computes the MAF and returns a boolean indicating if the minor allele is currently the coded allele. """ warnings.warn("deprecated: use 'Genotypes.maf'", DeprecationWarning) g = genotypes.genotypes maf = np.nansum(g) / (2 * np.sum(~np.isnan(g))) if maf > 0.5: maf = 1 - maf return maf, False return maf, True
def function[maf, parameter[genotypes]]: constant[Computes the MAF and returns a boolean indicating if the minor allele is currently the coded allele. ] call[name[warnings].warn, parameter[constant[deprecated: use 'Genotypes.maf'], name[DeprecationWarning]]] variable[g] assign[=] name[genotypes].genotypes variable[maf] assign[=] binary_operation[call[name[np].nansum, parameter[name[g]]] / binary_operation[constant[2] * call[name[np].sum, parameter[<ast.UnaryOp object at 0x7da1b2240640>]]]] if compare[name[maf] greater[>] constant[0.5]] begin[:] variable[maf] assign[=] binary_operation[constant[1] - name[maf]] return[tuple[[<ast.Name object at 0x7da1b2243280>, <ast.Constant object at 0x7da1b22423b0>]]] return[tuple[[<ast.Name object at 0x7da1b22437f0>, <ast.Constant object at 0x7da1b2243790>]]]
keyword[def] identifier[maf] ( identifier[genotypes] ): literal[string] identifier[warnings] . identifier[warn] ( literal[string] , identifier[DeprecationWarning] ) identifier[g] = identifier[genotypes] . identifier[genotypes] identifier[maf] = identifier[np] . identifier[nansum] ( identifier[g] )/( literal[int] * identifier[np] . identifier[sum] (~ identifier[np] . identifier[isnan] ( identifier[g] ))) keyword[if] identifier[maf] > literal[int] : identifier[maf] = literal[int] - identifier[maf] keyword[return] identifier[maf] , keyword[False] keyword[return] identifier[maf] , keyword[True]
def maf(genotypes): """Computes the MAF and returns a boolean indicating if the minor allele is currently the coded allele. """ warnings.warn("deprecated: use 'Genotypes.maf'", DeprecationWarning) g = genotypes.genotypes maf = np.nansum(g) / (2 * np.sum(~np.isnan(g))) if maf > 0.5: maf = 1 - maf return (maf, False) # depends on [control=['if'], data=['maf']] return (maf, True)
def get_labels(cls, path=None): """Get all server configuration labels. :param path: A string. The configuration file to be manipulated. Defaults to what is returned by :func:`nailgun.config._get_config_file_path`. :returns: Server configuration labels, where each label is a string. """ if path is None: path = _get_config_file_path( cls._xdg_config_dir, cls._xdg_config_file ) with open(path) as config_file: # keys() returns a list in Python 2 and a view in Python 3. return tuple(json.load(config_file).keys())
def function[get_labels, parameter[cls, path]]: constant[Get all server configuration labels. :param path: A string. The configuration file to be manipulated. Defaults to what is returned by :func:`nailgun.config._get_config_file_path`. :returns: Server configuration labels, where each label is a string. ] if compare[name[path] is constant[None]] begin[:] variable[path] assign[=] call[name[_get_config_file_path], parameter[name[cls]._xdg_config_dir, name[cls]._xdg_config_file]] with call[name[open], parameter[name[path]]] begin[:] return[call[name[tuple], parameter[call[call[name[json].load, parameter[name[config_file]]].keys, parameter[]]]]]
keyword[def] identifier[get_labels] ( identifier[cls] , identifier[path] = keyword[None] ): literal[string] keyword[if] identifier[path] keyword[is] keyword[None] : identifier[path] = identifier[_get_config_file_path] ( identifier[cls] . identifier[_xdg_config_dir] , identifier[cls] . identifier[_xdg_config_file] ) keyword[with] identifier[open] ( identifier[path] ) keyword[as] identifier[config_file] : keyword[return] identifier[tuple] ( identifier[json] . identifier[load] ( identifier[config_file] ). identifier[keys] ())
def get_labels(cls, path=None): """Get all server configuration labels. :param path: A string. The configuration file to be manipulated. Defaults to what is returned by :func:`nailgun.config._get_config_file_path`. :returns: Server configuration labels, where each label is a string. """ if path is None: path = _get_config_file_path(cls._xdg_config_dir, cls._xdg_config_file) # depends on [control=['if'], data=['path']] with open(path) as config_file: # keys() returns a list in Python 2 and a view in Python 3. return tuple(json.load(config_file).keys()) # depends on [control=['with'], data=['config_file']]
def post(method, hmc, uri, uri_parms, body, logon_required, wait_for_completion): """Operation: Create Hipersocket (requires DPM mode).""" assert wait_for_completion is True cpc_oid = uri_parms[0] try: cpc = hmc.cpcs.lookup_by_oid(cpc_oid) except KeyError: raise InvalidResourceError(method, uri) if not cpc.dpm_enabled: raise CpcNotInDpmError(method, uri, cpc) check_required_fields(method, uri, body, ['name']) # We need to emulate the behavior of this POST to always create a # hipersocket, but the add() method is used for adding all kinds of # faked adapters to the faked HMC. So we need to specify the adapter # type, but because the behavior of the Adapter resource object is # that it only has its input properties set, we add the 'type' # property on a copy of the input properties. body2 = body.copy() body2['type'] = 'hipersockets' try: new_adapter = cpc.adapters.add(body2) except InputError as exc: raise BadRequestError(method, uri, reason=5, message=str(exc)) return {'object-uri': new_adapter.uri}
def function[post, parameter[method, hmc, uri, uri_parms, body, logon_required, wait_for_completion]]: constant[Operation: Create Hipersocket (requires DPM mode).] assert[compare[name[wait_for_completion] is constant[True]]] variable[cpc_oid] assign[=] call[name[uri_parms]][constant[0]] <ast.Try object at 0x7da18fe92800> if <ast.UnaryOp object at 0x7da18fe92da0> begin[:] <ast.Raise object at 0x7da18fe90040> call[name[check_required_fields], parameter[name[method], name[uri], name[body], list[[<ast.Constant object at 0x7da18fe91d20>]]]] variable[body2] assign[=] call[name[body].copy, parameter[]] call[name[body2]][constant[type]] assign[=] constant[hipersockets] <ast.Try object at 0x7da18fe93280> return[dictionary[[<ast.Constant object at 0x7da18fe91ba0>], [<ast.Attribute object at 0x7da18fe91570>]]]
keyword[def] identifier[post] ( identifier[method] , identifier[hmc] , identifier[uri] , identifier[uri_parms] , identifier[body] , identifier[logon_required] , identifier[wait_for_completion] ): literal[string] keyword[assert] identifier[wait_for_completion] keyword[is] keyword[True] identifier[cpc_oid] = identifier[uri_parms] [ literal[int] ] keyword[try] : identifier[cpc] = identifier[hmc] . identifier[cpcs] . identifier[lookup_by_oid] ( identifier[cpc_oid] ) keyword[except] identifier[KeyError] : keyword[raise] identifier[InvalidResourceError] ( identifier[method] , identifier[uri] ) keyword[if] keyword[not] identifier[cpc] . identifier[dpm_enabled] : keyword[raise] identifier[CpcNotInDpmError] ( identifier[method] , identifier[uri] , identifier[cpc] ) identifier[check_required_fields] ( identifier[method] , identifier[uri] , identifier[body] ,[ literal[string] ]) identifier[body2] = identifier[body] . identifier[copy] () identifier[body2] [ literal[string] ]= literal[string] keyword[try] : identifier[new_adapter] = identifier[cpc] . identifier[adapters] . identifier[add] ( identifier[body2] ) keyword[except] identifier[InputError] keyword[as] identifier[exc] : keyword[raise] identifier[BadRequestError] ( identifier[method] , identifier[uri] , identifier[reason] = literal[int] , identifier[message] = identifier[str] ( identifier[exc] )) keyword[return] { literal[string] : identifier[new_adapter] . identifier[uri] }
def post(method, hmc, uri, uri_parms, body, logon_required, wait_for_completion): """Operation: Create Hipersocket (requires DPM mode).""" assert wait_for_completion is True cpc_oid = uri_parms[0] try: cpc = hmc.cpcs.lookup_by_oid(cpc_oid) # depends on [control=['try'], data=[]] except KeyError: raise InvalidResourceError(method, uri) # depends on [control=['except'], data=[]] if not cpc.dpm_enabled: raise CpcNotInDpmError(method, uri, cpc) # depends on [control=['if'], data=[]] check_required_fields(method, uri, body, ['name']) # We need to emulate the behavior of this POST to always create a # hipersocket, but the add() method is used for adding all kinds of # faked adapters to the faked HMC. So we need to specify the adapter # type, but because the behavior of the Adapter resource object is # that it only has its input properties set, we add the 'type' # property on a copy of the input properties. body2 = body.copy() body2['type'] = 'hipersockets' try: new_adapter = cpc.adapters.add(body2) # depends on [control=['try'], data=[]] except InputError as exc: raise BadRequestError(method, uri, reason=5, message=str(exc)) # depends on [control=['except'], data=['exc']] return {'object-uri': new_adapter.uri}
def delete(self): """Delete this table. For example: .. literalinclude:: snippets_table.py :start-after: [START bigtable_delete_table] :end-before: [END bigtable_delete_table] """ table_client = self._instance._client.table_admin_client table_client.delete_table(name=self.name)
def function[delete, parameter[self]]: constant[Delete this table. For example: .. literalinclude:: snippets_table.py :start-after: [START bigtable_delete_table] :end-before: [END bigtable_delete_table] ] variable[table_client] assign[=] name[self]._instance._client.table_admin_client call[name[table_client].delete_table, parameter[]]
keyword[def] identifier[delete] ( identifier[self] ): literal[string] identifier[table_client] = identifier[self] . identifier[_instance] . identifier[_client] . identifier[table_admin_client] identifier[table_client] . identifier[delete_table] ( identifier[name] = identifier[self] . identifier[name] )
def delete(self): """Delete this table. For example: .. literalinclude:: snippets_table.py :start-after: [START bigtable_delete_table] :end-before: [END bigtable_delete_table] """ table_client = self._instance._client.table_admin_client table_client.delete_table(name=self.name)
async def getLiftRows(self, lops): ''' Yield row tuples from a series of lift operations. Row tuples only requirement is that the first element be the binary id of a node. Args: lops (list): A list of lift operations. Yields: (tuple): (layer_indx, (buid, ...)) rows. ''' for layeridx, layr in enumerate(self.layers): async for x in layr.getLiftRows(lops): yield layeridx, x
<ast.AsyncFunctionDef object at 0x7da1b2309f60>
keyword[async] keyword[def] identifier[getLiftRows] ( identifier[self] , identifier[lops] ): literal[string] keyword[for] identifier[layeridx] , identifier[layr] keyword[in] identifier[enumerate] ( identifier[self] . identifier[layers] ): keyword[async] keyword[for] identifier[x] keyword[in] identifier[layr] . identifier[getLiftRows] ( identifier[lops] ): keyword[yield] identifier[layeridx] , identifier[x]
async def getLiftRows(self, lops): """ Yield row tuples from a series of lift operations. Row tuples only requirement is that the first element be the binary id of a node. Args: lops (list): A list of lift operations. Yields: (tuple): (layer_indx, (buid, ...)) rows. """ for (layeridx, layr) in enumerate(self.layers): async for x in layr.getLiftRows(lops): yield (layeridx, x) # depends on [control=['for'], data=[]]
def _naturalize_numbers(self, string): """ Makes any integers into very zero-padded numbers. e.g. '1' becomes '00000001'. """ def naturalize_int_match(match): return '%08d' % (int(match.group(0)),) string = re.sub(r'\d+', naturalize_int_match, string) return string
def function[_naturalize_numbers, parameter[self, string]]: constant[ Makes any integers into very zero-padded numbers. e.g. '1' becomes '00000001'. ] def function[naturalize_int_match, parameter[match]]: return[binary_operation[constant[%08d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da207f9a4a0>]]]] variable[string] assign[=] call[name[re].sub, parameter[constant[\d+], name[naturalize_int_match], name[string]]] return[name[string]]
keyword[def] identifier[_naturalize_numbers] ( identifier[self] , identifier[string] ): literal[string] keyword[def] identifier[naturalize_int_match] ( identifier[match] ): keyword[return] literal[string] %( identifier[int] ( identifier[match] . identifier[group] ( literal[int] )),) identifier[string] = identifier[re] . identifier[sub] ( literal[string] , identifier[naturalize_int_match] , identifier[string] ) keyword[return] identifier[string]
def _naturalize_numbers(self, string): """ Makes any integers into very zero-padded numbers. e.g. '1' becomes '00000001'. """ def naturalize_int_match(match): return '%08d' % (int(match.group(0)),) string = re.sub('\\d+', naturalize_int_match, string) return string
def _is_valid_edition(self): """ Return True if the input value of attribute "edition" is valid, and otherwise False. :returns: True if value is valid, False otherwise :rtype: boolean """ comp_str = self._standard_value[0] packed = [] packed.append("(") packed.append(CPEComponent2_3_URI.SEPARATOR_PACKED_EDITION) packed.append(CPEComponent2_3_URI._string) packed.append("){5}") value_pattern = [] value_pattern.append("^(") value_pattern.append(CPEComponent2_3_URI._string) value_pattern.append("|") value_pattern.append("".join(packed)) value_pattern.append(")$") value_rxc = re.compile("".join(value_pattern)) return value_rxc.match(comp_str) is not None
def function[_is_valid_edition, parameter[self]]: constant[ Return True if the input value of attribute "edition" is valid, and otherwise False. :returns: True if value is valid, False otherwise :rtype: boolean ] variable[comp_str] assign[=] call[name[self]._standard_value][constant[0]] variable[packed] assign[=] list[[]] call[name[packed].append, parameter[constant[(]]] call[name[packed].append, parameter[name[CPEComponent2_3_URI].SEPARATOR_PACKED_EDITION]] call[name[packed].append, parameter[name[CPEComponent2_3_URI]._string]] call[name[packed].append, parameter[constant[){5}]]] variable[value_pattern] assign[=] list[[]] call[name[value_pattern].append, parameter[constant[^(]]] call[name[value_pattern].append, parameter[name[CPEComponent2_3_URI]._string]] call[name[value_pattern].append, parameter[constant[|]]] call[name[value_pattern].append, parameter[call[constant[].join, parameter[name[packed]]]]] call[name[value_pattern].append, parameter[constant[)$]]] variable[value_rxc] assign[=] call[name[re].compile, parameter[call[constant[].join, parameter[name[value_pattern]]]]] return[compare[call[name[value_rxc].match, parameter[name[comp_str]]] is_not constant[None]]]
keyword[def] identifier[_is_valid_edition] ( identifier[self] ): literal[string] identifier[comp_str] = identifier[self] . identifier[_standard_value] [ literal[int] ] identifier[packed] =[] identifier[packed] . identifier[append] ( literal[string] ) identifier[packed] . identifier[append] ( identifier[CPEComponent2_3_URI] . identifier[SEPARATOR_PACKED_EDITION] ) identifier[packed] . identifier[append] ( identifier[CPEComponent2_3_URI] . identifier[_string] ) identifier[packed] . identifier[append] ( literal[string] ) identifier[value_pattern] =[] identifier[value_pattern] . identifier[append] ( literal[string] ) identifier[value_pattern] . identifier[append] ( identifier[CPEComponent2_3_URI] . identifier[_string] ) identifier[value_pattern] . identifier[append] ( literal[string] ) identifier[value_pattern] . identifier[append] ( literal[string] . identifier[join] ( identifier[packed] )) identifier[value_pattern] . identifier[append] ( literal[string] ) identifier[value_rxc] = identifier[re] . identifier[compile] ( literal[string] . identifier[join] ( identifier[value_pattern] )) keyword[return] identifier[value_rxc] . identifier[match] ( identifier[comp_str] ) keyword[is] keyword[not] keyword[None]
def _is_valid_edition(self): """ Return True if the input value of attribute "edition" is valid, and otherwise False. :returns: True if value is valid, False otherwise :rtype: boolean """ comp_str = self._standard_value[0] packed = [] packed.append('(') packed.append(CPEComponent2_3_URI.SEPARATOR_PACKED_EDITION) packed.append(CPEComponent2_3_URI._string) packed.append('){5}') value_pattern = [] value_pattern.append('^(') value_pattern.append(CPEComponent2_3_URI._string) value_pattern.append('|') value_pattern.append(''.join(packed)) value_pattern.append(')$') value_rxc = re.compile(''.join(value_pattern)) return value_rxc.match(comp_str) is not None
def set_position(x, y, stream=STD_OUTPUT_HANDLE): ''' Sets current position of the cursor. ''' stream = kernel32.GetStdHandle(stream) value = x + (y << 16) kernel32.SetConsoleCursorPosition(stream, c_long(value))
def function[set_position, parameter[x, y, stream]]: constant[ Sets current position of the cursor. ] variable[stream] assign[=] call[name[kernel32].GetStdHandle, parameter[name[stream]]] variable[value] assign[=] binary_operation[name[x] + binary_operation[name[y] <ast.LShift object at 0x7da2590d69e0> constant[16]]] call[name[kernel32].SetConsoleCursorPosition, parameter[name[stream], call[name[c_long], parameter[name[value]]]]]
keyword[def] identifier[set_position] ( identifier[x] , identifier[y] , identifier[stream] = identifier[STD_OUTPUT_HANDLE] ): literal[string] identifier[stream] = identifier[kernel32] . identifier[GetStdHandle] ( identifier[stream] ) identifier[value] = identifier[x] +( identifier[y] << literal[int] ) identifier[kernel32] . identifier[SetConsoleCursorPosition] ( identifier[stream] , identifier[c_long] ( identifier[value] ))
def set_position(x, y, stream=STD_OUTPUT_HANDLE): """ Sets current position of the cursor. """ stream = kernel32.GetStdHandle(stream) value = x + (y << 16) kernel32.SetConsoleCursorPosition(stream, c_long(value))
def get_current_element(self, ): """Return the currently open Shot or Asset :returns: the currently open element :rtype: :class:`jukeboxcore.djadapter.models.Asset` | :class:`jukeboxcore.djadapter.models.Shot` | None :raises: :class:`djadapter.models.TaskFile.DoesNotExist` """ n = jbscene.get_current_scene_node() if not n: return None tfid = cmds.getAttr("%s.taskfile_id" % n) try: tf = djadapter.taskfiles.get(pk=tfid) return tf.task.element except djadapter.models.TaskFile.DoesNotExist: raise djadapter.models.TaskFile.DoesNotExist("Could not find the taskfile that was set on the scene node. Id was %s" % tfid)
def function[get_current_element, parameter[self]]: constant[Return the currently open Shot or Asset :returns: the currently open element :rtype: :class:`jukeboxcore.djadapter.models.Asset` | :class:`jukeboxcore.djadapter.models.Shot` | None :raises: :class:`djadapter.models.TaskFile.DoesNotExist` ] variable[n] assign[=] call[name[jbscene].get_current_scene_node, parameter[]] if <ast.UnaryOp object at 0x7da1b1da4220> begin[:] return[constant[None]] variable[tfid] assign[=] call[name[cmds].getAttr, parameter[binary_operation[constant[%s.taskfile_id] <ast.Mod object at 0x7da2590d6920> name[n]]]] <ast.Try object at 0x7da1b1da4340>
keyword[def] identifier[get_current_element] ( identifier[self] ,): literal[string] identifier[n] = identifier[jbscene] . identifier[get_current_scene_node] () keyword[if] keyword[not] identifier[n] : keyword[return] keyword[None] identifier[tfid] = identifier[cmds] . identifier[getAttr] ( literal[string] % identifier[n] ) keyword[try] : identifier[tf] = identifier[djadapter] . identifier[taskfiles] . identifier[get] ( identifier[pk] = identifier[tfid] ) keyword[return] identifier[tf] . identifier[task] . identifier[element] keyword[except] identifier[djadapter] . identifier[models] . identifier[TaskFile] . identifier[DoesNotExist] : keyword[raise] identifier[djadapter] . identifier[models] . identifier[TaskFile] . identifier[DoesNotExist] ( literal[string] % identifier[tfid] )
def get_current_element(self): """Return the currently open Shot or Asset :returns: the currently open element :rtype: :class:`jukeboxcore.djadapter.models.Asset` | :class:`jukeboxcore.djadapter.models.Shot` | None :raises: :class:`djadapter.models.TaskFile.DoesNotExist` """ n = jbscene.get_current_scene_node() if not n: return None # depends on [control=['if'], data=[]] tfid = cmds.getAttr('%s.taskfile_id' % n) try: tf = djadapter.taskfiles.get(pk=tfid) return tf.task.element # depends on [control=['try'], data=[]] except djadapter.models.TaskFile.DoesNotExist: raise djadapter.models.TaskFile.DoesNotExist('Could not find the taskfile that was set on the scene node. Id was %s' % tfid) # depends on [control=['except'], data=[]]
def _Rphideriv(self,R,phi=0.,t=0.): """ NAME: _Rphideriv PURPOSE: evaluate the mixed radial-azimuthal derivative INPUT: R phi t OUTPUT: d2phi/dRdphi HISTORY: 2016-06-02 - Written - Bovy (UofT) """ return self._Pot.Rphideriv(R,0.,phi=phi,t=t,use_physical=False)
def function[_Rphideriv, parameter[self, R, phi, t]]: constant[ NAME: _Rphideriv PURPOSE: evaluate the mixed radial-azimuthal derivative INPUT: R phi t OUTPUT: d2phi/dRdphi HISTORY: 2016-06-02 - Written - Bovy (UofT) ] return[call[name[self]._Pot.Rphideriv, parameter[name[R], constant[0.0]]]]
keyword[def] identifier[_Rphideriv] ( identifier[self] , identifier[R] , identifier[phi] = literal[int] , identifier[t] = literal[int] ): literal[string] keyword[return] identifier[self] . identifier[_Pot] . identifier[Rphideriv] ( identifier[R] , literal[int] , identifier[phi] = identifier[phi] , identifier[t] = identifier[t] , identifier[use_physical] = keyword[False] )
def _Rphideriv(self, R, phi=0.0, t=0.0): """ NAME: _Rphideriv PURPOSE: evaluate the mixed radial-azimuthal derivative INPUT: R phi t OUTPUT: d2phi/dRdphi HISTORY: 2016-06-02 - Written - Bovy (UofT) """ return self._Pot.Rphideriv(R, 0.0, phi=phi, t=t, use_physical=False)
def to_cfn_resource_name(name): # type: (str) -> str """Transform a name to a valid cfn name. This will convert the provided name to a CamelCase name. It's possible that the conversion to a CFN resource name can result in name collisions. It's up to the caller to handle name collisions appropriately. """ if not name: raise ValueError("Invalid name: %r" % name) word_separators = ['-', '_'] for word_separator in word_separators: word_parts = [p for p in name.split(word_separator) if p] name = ''.join([w[0].upper() + w[1:] for w in word_parts]) return re.sub(r'[^A-Za-z0-9]+', '', name)
def function[to_cfn_resource_name, parameter[name]]: constant[Transform a name to a valid cfn name. This will convert the provided name to a CamelCase name. It's possible that the conversion to a CFN resource name can result in name collisions. It's up to the caller to handle name collisions appropriately. ] if <ast.UnaryOp object at 0x7da1b1fca8c0> begin[:] <ast.Raise object at 0x7da1b1fca620> variable[word_separators] assign[=] list[[<ast.Constant object at 0x7da1b1fcbdc0>, <ast.Constant object at 0x7da1b1fc9840>]] for taget[name[word_separator]] in starred[name[word_separators]] begin[:] variable[word_parts] assign[=] <ast.ListComp object at 0x7da1b1fc8520> variable[name] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da1b1fc9b70>]] return[call[name[re].sub, parameter[constant[[^A-Za-z0-9]+], constant[], name[name]]]]
keyword[def] identifier[to_cfn_resource_name] ( identifier[name] ): literal[string] keyword[if] keyword[not] identifier[name] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[name] ) identifier[word_separators] =[ literal[string] , literal[string] ] keyword[for] identifier[word_separator] keyword[in] identifier[word_separators] : identifier[word_parts] =[ identifier[p] keyword[for] identifier[p] keyword[in] identifier[name] . identifier[split] ( identifier[word_separator] ) keyword[if] identifier[p] ] identifier[name] = literal[string] . identifier[join] ([ identifier[w] [ literal[int] ]. identifier[upper] ()+ identifier[w] [ literal[int] :] keyword[for] identifier[w] keyword[in] identifier[word_parts] ]) keyword[return] identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[name] )
def to_cfn_resource_name(name): # type: (str) -> str "Transform a name to a valid cfn name.\n\n This will convert the provided name to a CamelCase name.\n It's possible that the conversion to a CFN resource name\n can result in name collisions. It's up to the caller\n to handle name collisions appropriately.\n\n " if not name: raise ValueError('Invalid name: %r' % name) # depends on [control=['if'], data=[]] word_separators = ['-', '_'] for word_separator in word_separators: word_parts = [p for p in name.split(word_separator) if p] name = ''.join([w[0].upper() + w[1:] for w in word_parts]) # depends on [control=['for'], data=['word_separator']] return re.sub('[^A-Za-z0-9]+', '', name)
def build_score_matrix(a, b, score_func, gap_penalty): """Calculate the score and traceback matrices for two input sequences and scoring functions. Returns: A tuple of (score-matrix, traceback-matrix). Each entry in the score-matrix is a numeric score. Each entry in the traceback-matrix is a logical ORing of the direction bitfields. """ score_matrix = Matrix(rows=len(a) + 1, cols=len(b) + 1) traceback_matrix = Matrix(rows=len(a) + 1, cols=len(b) + 1, type_code='B') for row in range(1, score_matrix.rows): for col in range(1, score_matrix.cols): match_score = score_func(a[row - 1], b[col - 1]) scores = sorted( ((score_matrix[(row - 1, col - 1)] + match_score, Direction.DIAG), (score_matrix[(row - 1, col)] - gap_penalty(1), Direction.UP), (score_matrix[(row, col - 1)] - gap_penalty(1), Direction.LEFT), (0, Direction.NONE)), key=lambda x: x[0], reverse=True) max_score = scores[0][0] scores = itertools.takewhile( lambda x: x[0] == max_score, scores) score_matrix[row, col] = max_score for _, direction in scores: traceback_matrix[row, col] = traceback_matrix[row, col] | direction.value return score_matrix, traceback_matrix
def function[build_score_matrix, parameter[a, b, score_func, gap_penalty]]: constant[Calculate the score and traceback matrices for two input sequences and scoring functions. Returns: A tuple of (score-matrix, traceback-matrix). Each entry in the score-matrix is a numeric score. Each entry in the traceback-matrix is a logical ORing of the direction bitfields. ] variable[score_matrix] assign[=] call[name[Matrix], parameter[]] variable[traceback_matrix] assign[=] call[name[Matrix], parameter[]] for taget[name[row]] in starred[call[name[range], parameter[constant[1], name[score_matrix].rows]]] begin[:] for taget[name[col]] in starred[call[name[range], parameter[constant[1], name[score_matrix].cols]]] begin[:] variable[match_score] assign[=] call[name[score_func], parameter[call[name[a]][binary_operation[name[row] - constant[1]]], call[name[b]][binary_operation[name[col] - constant[1]]]]] variable[scores] assign[=] call[name[sorted], parameter[tuple[[<ast.Tuple object at 0x7da1b092d2a0>, <ast.Tuple object at 0x7da1b092d0c0>, <ast.Tuple object at 0x7da1b092da20>, <ast.Tuple object at 0x7da1b092c3d0>]]]] variable[max_score] assign[=] call[call[name[scores]][constant[0]]][constant[0]] variable[scores] assign[=] call[name[itertools].takewhile, parameter[<ast.Lambda object at 0x7da1b092fbe0>, name[scores]]] call[name[score_matrix]][tuple[[<ast.Name object at 0x7da1b092ded0>, <ast.Name object at 0x7da1b092f280>]]] assign[=] name[max_score] for taget[tuple[[<ast.Name object at 0x7da1b092e410>, <ast.Name object at 0x7da1b092f730>]]] in starred[name[scores]] begin[:] call[name[traceback_matrix]][tuple[[<ast.Name object at 0x7da1b092d9f0>, <ast.Name object at 0x7da1b092fa30>]]] assign[=] binary_operation[call[name[traceback_matrix]][tuple[[<ast.Name object at 0x7da1b092fc40>, <ast.Name object at 0x7da1b092eef0>]]] <ast.BitOr object at 0x7da2590d6aa0> name[direction].value] return[tuple[[<ast.Name object at 0x7da1b092c670>, <ast.Name object at 0x7da1b092c8b0>]]]
keyword[def] identifier[build_score_matrix] ( identifier[a] , identifier[b] , identifier[score_func] , identifier[gap_penalty] ): literal[string] identifier[score_matrix] = identifier[Matrix] ( identifier[rows] = identifier[len] ( identifier[a] )+ literal[int] , identifier[cols] = identifier[len] ( identifier[b] )+ literal[int] ) identifier[traceback_matrix] = identifier[Matrix] ( identifier[rows] = identifier[len] ( identifier[a] )+ literal[int] , identifier[cols] = identifier[len] ( identifier[b] )+ literal[int] , identifier[type_code] = literal[string] ) keyword[for] identifier[row] keyword[in] identifier[range] ( literal[int] , identifier[score_matrix] . identifier[rows] ): keyword[for] identifier[col] keyword[in] identifier[range] ( literal[int] , identifier[score_matrix] . identifier[cols] ): identifier[match_score] = identifier[score_func] ( identifier[a] [ identifier[row] - literal[int] ], identifier[b] [ identifier[col] - literal[int] ]) identifier[scores] = identifier[sorted] ( (( identifier[score_matrix] [( identifier[row] - literal[int] , identifier[col] - literal[int] )]+ identifier[match_score] , identifier[Direction] . identifier[DIAG] ), ( identifier[score_matrix] [( identifier[row] - literal[int] , identifier[col] )]- identifier[gap_penalty] ( literal[int] ), identifier[Direction] . identifier[UP] ), ( identifier[score_matrix] [( identifier[row] , identifier[col] - literal[int] )]- identifier[gap_penalty] ( literal[int] ), identifier[Direction] . identifier[LEFT] ), ( literal[int] , identifier[Direction] . identifier[NONE] )), identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ], identifier[reverse] = keyword[True] ) identifier[max_score] = identifier[scores] [ literal[int] ][ literal[int] ] identifier[scores] = identifier[itertools] . identifier[takewhile] ( keyword[lambda] identifier[x] : identifier[x] [ literal[int] ]== identifier[max_score] , identifier[scores] ) identifier[score_matrix] [ identifier[row] , identifier[col] ]= identifier[max_score] keyword[for] identifier[_] , identifier[direction] keyword[in] identifier[scores] : identifier[traceback_matrix] [ identifier[row] , identifier[col] ]= identifier[traceback_matrix] [ identifier[row] , identifier[col] ]| identifier[direction] . identifier[value] keyword[return] identifier[score_matrix] , identifier[traceback_matrix]
def build_score_matrix(a, b, score_func, gap_penalty): """Calculate the score and traceback matrices for two input sequences and scoring functions. Returns: A tuple of (score-matrix, traceback-matrix). Each entry in the score-matrix is a numeric score. Each entry in the traceback-matrix is a logical ORing of the direction bitfields. """ score_matrix = Matrix(rows=len(a) + 1, cols=len(b) + 1) traceback_matrix = Matrix(rows=len(a) + 1, cols=len(b) + 1, type_code='B') for row in range(1, score_matrix.rows): for col in range(1, score_matrix.cols): match_score = score_func(a[row - 1], b[col - 1]) scores = sorted(((score_matrix[row - 1, col - 1] + match_score, Direction.DIAG), (score_matrix[row - 1, col] - gap_penalty(1), Direction.UP), (score_matrix[row, col - 1] - gap_penalty(1), Direction.LEFT), (0, Direction.NONE)), key=lambda x: x[0], reverse=True) max_score = scores[0][0] scores = itertools.takewhile(lambda x: x[0] == max_score, scores) score_matrix[row, col] = max_score for (_, direction) in scores: traceback_matrix[row, col] = traceback_matrix[row, col] | direction.value # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['col']] # depends on [control=['for'], data=['row']] return (score_matrix, traceback_matrix)
def _cache_from_source(path: str) -> str: """Return the path to the cached file for the given path. The original path does not have to exist.""" cache_path, cache_file = os.path.split(importlib.util.cache_from_source(path)) filename, _ = os.path.splitext(cache_file) return os.path.join(cache_path, filename + ".lpyc")
def function[_cache_from_source, parameter[path]]: constant[Return the path to the cached file for the given path. The original path does not have to exist.] <ast.Tuple object at 0x7da1b023eec0> assign[=] call[name[os].path.split, parameter[call[name[importlib].util.cache_from_source, parameter[name[path]]]]] <ast.Tuple object at 0x7da1b03b9a50> assign[=] call[name[os].path.splitext, parameter[name[cache_file]]] return[call[name[os].path.join, parameter[name[cache_path], binary_operation[name[filename] + constant[.lpyc]]]]]
keyword[def] identifier[_cache_from_source] ( identifier[path] : identifier[str] )-> identifier[str] : literal[string] identifier[cache_path] , identifier[cache_file] = identifier[os] . identifier[path] . identifier[split] ( identifier[importlib] . identifier[util] . identifier[cache_from_source] ( identifier[path] )) identifier[filename] , identifier[_] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[cache_file] ) keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[cache_path] , identifier[filename] + literal[string] )
def _cache_from_source(path: str) -> str: """Return the path to the cached file for the given path. The original path does not have to exist.""" (cache_path, cache_file) = os.path.split(importlib.util.cache_from_source(path)) (filename, _) = os.path.splitext(cache_file) return os.path.join(cache_path, filename + '.lpyc')
def get_checksum(self): """Return the md5 checksum of the delta file.""" with open(self.file, 'rb') as f: cs = md5(f.read()).hexdigest() return cs
def function[get_checksum, parameter[self]]: constant[Return the md5 checksum of the delta file.] with call[name[open], parameter[name[self].file, constant[rb]]] begin[:] variable[cs] assign[=] call[call[name[md5], parameter[call[name[f].read, parameter[]]]].hexdigest, parameter[]] return[name[cs]]
keyword[def] identifier[get_checksum] ( identifier[self] ): literal[string] keyword[with] identifier[open] ( identifier[self] . identifier[file] , literal[string] ) keyword[as] identifier[f] : identifier[cs] = identifier[md5] ( identifier[f] . identifier[read] ()). identifier[hexdigest] () keyword[return] identifier[cs]
def get_checksum(self): """Return the md5 checksum of the delta file.""" with open(self.file, 'rb') as f: cs = md5(f.read()).hexdigest() # depends on [control=['with'], data=['f']] return cs
def expand_cmd_labels(self): """Expand make-style variables in cmd parameters. Currently: $(location <foo>) Location of one dependency or output file. $(locations <foo>) Space-delimited list of foo's output files. $(SRCS) Space-delimited list of this rule's source files. $(OUTS) Space-delimited list of this rule's output files. $(@D) Full path to the output directory for this rule. $@ Path to the output (single) file for this rule. """ cmd = self.cmd def _expand_onesrc(): """Expand $@ or $(@) to one output file.""" outs = self.rule.params['outs'] or [] if len(outs) != 1: raise error.TargetBuildFailed( self.address, '$@ substitution requires exactly one output file, but ' 'this rule has %s of them: %s' % (len(outs), outs)) else: return os.path.join(self.buildroot, self.path_to_this_rule, outs[0]) # TODO: this function is dumb and way too long def _expand_makevar(re_match): """Expands one substitution symbol.""" # Expand $(location foo) and $(locations foo): label = None tagstr = re_match.groups()[0] tag_location = re.match( r'\s*location\s+([A-Za-z0-9/\-_:\.]+)\s*', tagstr) tag_locations = re.match( r'\s*locations\s+([A-Za-z0-9/\-_:\.]+)\s*', tagstr) if tag_location: label = tag_location.groups()[0] elif tag_locations: label = tag_locations.groups()[0] if label: # Is it a filename found in the outputs of this rule? if label in self.rule.params['outs']: return os.path.join(self.buildroot, self.address.repo, self.address.path, label) # Is it an address found in the deps of this rule? addr = self.rule.makeaddress(label) if addr not in self.rule.composed_deps(): raise error.TargetBuildFailed( self.address, '%s is referenced in cmd but is neither an output ' 'file from this rule nor a dependency of this rule.' % label) else: paths = [x for x in self.rulefor(addr).output_files] if len(paths) is 0: raise error.TargetBuildFailed( self.address, 'cmd refers to %s, but it has no output files.') elif len(paths) > 1 and tag_location: raise error.TargetBuildFailed( self.address, 'Bad substitution in cmd: Expected exactly one ' 'file, but %s expands to %s files.' % ( addr, len(paths))) else: return ' '.join( [os.path.join(self.buildroot, x) for x in paths]) # Expand $(OUTS): elif re.match(r'OUTS', tagstr): return ' '.join( [os.path.join(self.buildroot, x) for x in self.rule.output_files]) # Expand $(SRCS): elif re.match(r'SRCS', tagstr): return ' '.join(os.path.join(self.path_to_this_rule, x) for x in self.rule.params['srcs'] or []) # Expand $(@D): elif re.match(r'\s*@D\s*', tagstr): ruledir = os.path.join(self.buildroot, self.path_to_this_rule) return ruledir # Expand $(@), $@: elif re.match(r'\s*@\s*', tagstr): return _expand_onesrc() else: raise error.TargetBuildFailed( self.address, '[%s] Unrecognized substitution in cmd: %s' % ( self.address, re_match.group())) cmd, _ = re.subn(self.paren_tag_re, _expand_makevar, cmd) # Match tags starting with $ without parens. Will also catch parens, so # this goes after the tag_re substitutions. cmd, _ = re.subn(self.noparen_tag_re, _expand_makevar, cmd) # Now that we're done looking for $(blabla) and $bla parameters, clean # up any $$ escaping: cmd, _ = re.subn(r'\$\$', '$', cmd) # Maybe try heuristic label expansion? Actually on second thought # that's a terrible idea. Use the explicit syntax, you lazy slobs. ;-) # TODO: Maybe consider other expansions from the gnu make manual? # $^ might be useful. # http://www.gnu.org/software/make/manual/html_node/Automatic-Variables.html#Automatic-Variables self.cmd = cmd
def function[expand_cmd_labels, parameter[self]]: constant[Expand make-style variables in cmd parameters. Currently: $(location <foo>) Location of one dependency or output file. $(locations <foo>) Space-delimited list of foo's output files. $(SRCS) Space-delimited list of this rule's source files. $(OUTS) Space-delimited list of this rule's output files. $(@D) Full path to the output directory for this rule. $@ Path to the output (single) file for this rule. ] variable[cmd] assign[=] name[self].cmd def function[_expand_onesrc, parameter[]]: constant[Expand $@ or $(@) to one output file.] variable[outs] assign[=] <ast.BoolOp object at 0x7da20e962050> if compare[call[name[len], parameter[name[outs]]] not_equal[!=] constant[1]] begin[:] <ast.Raise object at 0x7da20e960130> def function[_expand_makevar, parameter[re_match]]: constant[Expands one substitution symbol.] variable[label] assign[=] constant[None] variable[tagstr] assign[=] call[call[name[re_match].groups, parameter[]]][constant[0]] variable[tag_location] assign[=] call[name[re].match, parameter[constant[\s*location\s+([A-Za-z0-9/\-_:\.]+)\s*], name[tagstr]]] variable[tag_locations] assign[=] call[name[re].match, parameter[constant[\s*locations\s+([A-Za-z0-9/\-_:\.]+)\s*], name[tagstr]]] if name[tag_location] begin[:] variable[label] assign[=] call[call[name[tag_location].groups, parameter[]]][constant[0]] if name[label] begin[:] if compare[name[label] in call[name[self].rule.params][constant[outs]]] begin[:] return[call[name[os].path.join, parameter[name[self].buildroot, name[self].address.repo, name[self].address.path, name[label]]]] variable[addr] assign[=] call[name[self].rule.makeaddress, parameter[name[label]]] if compare[name[addr] <ast.NotIn object at 0x7da2590d7190> call[name[self].rule.composed_deps, parameter[]]] begin[:] <ast.Raise object at 0x7da204620130> <ast.Tuple object at 0x7da20e960220> assign[=] call[name[re].subn, parameter[name[self].paren_tag_re, name[_expand_makevar], name[cmd]]] <ast.Tuple object at 0x7da20e961f00> assign[=] call[name[re].subn, parameter[name[self].noparen_tag_re, name[_expand_makevar], name[cmd]]] <ast.Tuple object at 0x7da20e961510> assign[=] call[name[re].subn, parameter[constant[\$\$], constant[$], name[cmd]]] name[self].cmd assign[=] name[cmd]
keyword[def] identifier[expand_cmd_labels] ( identifier[self] ): literal[string] identifier[cmd] = identifier[self] . identifier[cmd] keyword[def] identifier[_expand_onesrc] (): literal[string] identifier[outs] = identifier[self] . identifier[rule] . identifier[params] [ literal[string] ] keyword[or] [] keyword[if] identifier[len] ( identifier[outs] )!= literal[int] : keyword[raise] identifier[error] . identifier[TargetBuildFailed] ( identifier[self] . identifier[address] , literal[string] literal[string] %( identifier[len] ( identifier[outs] ), identifier[outs] )) keyword[else] : keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[buildroot] , identifier[self] . identifier[path_to_this_rule] , identifier[outs] [ literal[int] ]) keyword[def] identifier[_expand_makevar] ( identifier[re_match] ): literal[string] identifier[label] = keyword[None] identifier[tagstr] = identifier[re_match] . identifier[groups] ()[ literal[int] ] identifier[tag_location] = identifier[re] . identifier[match] ( literal[string] , identifier[tagstr] ) identifier[tag_locations] = identifier[re] . identifier[match] ( literal[string] , identifier[tagstr] ) keyword[if] identifier[tag_location] : identifier[label] = identifier[tag_location] . identifier[groups] ()[ literal[int] ] keyword[elif] identifier[tag_locations] : identifier[label] = identifier[tag_locations] . identifier[groups] ()[ literal[int] ] keyword[if] identifier[label] : keyword[if] identifier[label] keyword[in] identifier[self] . identifier[rule] . identifier[params] [ literal[string] ]: keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[buildroot] , identifier[self] . identifier[address] . identifier[repo] , identifier[self] . identifier[address] . identifier[path] , identifier[label] ) identifier[addr] = identifier[self] . identifier[rule] . identifier[makeaddress] ( identifier[label] ) keyword[if] identifier[addr] keyword[not] keyword[in] identifier[self] . identifier[rule] . identifier[composed_deps] (): keyword[raise] identifier[error] . identifier[TargetBuildFailed] ( identifier[self] . identifier[address] , literal[string] literal[string] % identifier[label] ) keyword[else] : identifier[paths] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[rulefor] ( identifier[addr] ). identifier[output_files] ] keyword[if] identifier[len] ( identifier[paths] ) keyword[is] literal[int] : keyword[raise] identifier[error] . identifier[TargetBuildFailed] ( identifier[self] . identifier[address] , literal[string] ) keyword[elif] identifier[len] ( identifier[paths] )> literal[int] keyword[and] identifier[tag_location] : keyword[raise] identifier[error] . identifier[TargetBuildFailed] ( identifier[self] . identifier[address] , literal[string] literal[string] %( identifier[addr] , identifier[len] ( identifier[paths] ))) keyword[else] : keyword[return] literal[string] . identifier[join] ( [ identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[buildroot] , identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[paths] ]) keyword[elif] identifier[re] . identifier[match] ( literal[string] , identifier[tagstr] ): keyword[return] literal[string] . identifier[join] ( [ identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[buildroot] , identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[self] . identifier[rule] . identifier[output_files] ]) keyword[elif] identifier[re] . identifier[match] ( literal[string] , identifier[tagstr] ): keyword[return] literal[string] . identifier[join] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[path_to_this_rule] , identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[self] . identifier[rule] . identifier[params] [ literal[string] ] keyword[or] []) keyword[elif] identifier[re] . identifier[match] ( literal[string] , identifier[tagstr] ): identifier[ruledir] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[buildroot] , identifier[self] . identifier[path_to_this_rule] ) keyword[return] identifier[ruledir] keyword[elif] identifier[re] . identifier[match] ( literal[string] , identifier[tagstr] ): keyword[return] identifier[_expand_onesrc] () keyword[else] : keyword[raise] identifier[error] . identifier[TargetBuildFailed] ( identifier[self] . identifier[address] , literal[string] %( identifier[self] . identifier[address] , identifier[re_match] . identifier[group] ())) identifier[cmd] , identifier[_] = identifier[re] . identifier[subn] ( identifier[self] . identifier[paren_tag_re] , identifier[_expand_makevar] , identifier[cmd] ) identifier[cmd] , identifier[_] = identifier[re] . identifier[subn] ( identifier[self] . identifier[noparen_tag_re] , identifier[_expand_makevar] , identifier[cmd] ) identifier[cmd] , identifier[_] = identifier[re] . identifier[subn] ( literal[string] , literal[string] , identifier[cmd] ) identifier[self] . identifier[cmd] = identifier[cmd]
def expand_cmd_labels(self): """Expand make-style variables in cmd parameters. Currently: $(location <foo>) Location of one dependency or output file. $(locations <foo>) Space-delimited list of foo's output files. $(SRCS) Space-delimited list of this rule's source files. $(OUTS) Space-delimited list of this rule's output files. $(@D) Full path to the output directory for this rule. $@ Path to the output (single) file for this rule. """ cmd = self.cmd def _expand_onesrc(): """Expand $@ or $(@) to one output file.""" outs = self.rule.params['outs'] or [] if len(outs) != 1: raise error.TargetBuildFailed(self.address, '$@ substitution requires exactly one output file, but this rule has %s of them: %s' % (len(outs), outs)) # depends on [control=['if'], data=[]] else: return os.path.join(self.buildroot, self.path_to_this_rule, outs[0]) # TODO: this function is dumb and way too long def _expand_makevar(re_match): """Expands one substitution symbol.""" # Expand $(location foo) and $(locations foo): label = None tagstr = re_match.groups()[0] tag_location = re.match('\\s*location\\s+([A-Za-z0-9/\\-_:\\.]+)\\s*', tagstr) tag_locations = re.match('\\s*locations\\s+([A-Za-z0-9/\\-_:\\.]+)\\s*', tagstr) if tag_location: label = tag_location.groups()[0] # depends on [control=['if'], data=[]] elif tag_locations: label = tag_locations.groups()[0] # depends on [control=['if'], data=[]] if label: # Is it a filename found in the outputs of this rule? if label in self.rule.params['outs']: return os.path.join(self.buildroot, self.address.repo, self.address.path, label) # depends on [control=['if'], data=['label']] # Is it an address found in the deps of this rule? addr = self.rule.makeaddress(label) if addr not in self.rule.composed_deps(): raise error.TargetBuildFailed(self.address, '%s is referenced in cmd but is neither an output file from this rule nor a dependency of this rule.' % label) # depends on [control=['if'], data=[]] else: paths = [x for x in self.rulefor(addr).output_files] if len(paths) is 0: raise error.TargetBuildFailed(self.address, 'cmd refers to %s, but it has no output files.') # depends on [control=['if'], data=[]] elif len(paths) > 1 and tag_location: raise error.TargetBuildFailed(self.address, 'Bad substitution in cmd: Expected exactly one file, but %s expands to %s files.' % (addr, len(paths))) # depends on [control=['if'], data=[]] else: return ' '.join([os.path.join(self.buildroot, x) for x in paths]) # depends on [control=['if'], data=[]] # Expand $(OUTS): elif re.match('OUTS', tagstr): return ' '.join([os.path.join(self.buildroot, x) for x in self.rule.output_files]) # depends on [control=['if'], data=[]] # Expand $(SRCS): elif re.match('SRCS', tagstr): return ' '.join((os.path.join(self.path_to_this_rule, x) for x in self.rule.params['srcs'] or [])) # depends on [control=['if'], data=[]] # Expand $(@D): elif re.match('\\s*@D\\s*', tagstr): ruledir = os.path.join(self.buildroot, self.path_to_this_rule) return ruledir # depends on [control=['if'], data=[]] # Expand $(@), $@: elif re.match('\\s*@\\s*', tagstr): return _expand_onesrc() # depends on [control=['if'], data=[]] else: raise error.TargetBuildFailed(self.address, '[%s] Unrecognized substitution in cmd: %s' % (self.address, re_match.group())) (cmd, _) = re.subn(self.paren_tag_re, _expand_makevar, cmd) # Match tags starting with $ without parens. Will also catch parens, so # this goes after the tag_re substitutions. (cmd, _) = re.subn(self.noparen_tag_re, _expand_makevar, cmd) # Now that we're done looking for $(blabla) and $bla parameters, clean # up any $$ escaping: (cmd, _) = re.subn('\\$\\$', '$', cmd) # Maybe try heuristic label expansion? Actually on second thought # that's a terrible idea. Use the explicit syntax, you lazy slobs. ;-) # TODO: Maybe consider other expansions from the gnu make manual? # $^ might be useful. # http://www.gnu.org/software/make/manual/html_node/Automatic-Variables.html#Automatic-Variables self.cmd = cmd
def main(): """ NAME eigs_s.py DESCRIPTION converts eigenparamters format data to s format SYNTAX eigs_s.py [-h][-i][command line options][<filename] OPTIONS -h prints help message and quits -i allows interactive file name entry -f FILE, specifies input file name -F FILE, specifies output file name < filenmae, reads file from standard input (Unix-like operating systems only) INPUT tau_i, dec_i inc_i of eigenvectors OUTPUT x11,x22,x33,x12,x23,x13 """ file="" if '-h' in sys.argv: print(main.__doc__) sys.exit() elif '-i' in sys.argv: file=input("Enter eigenparameters data file name: ") elif '-f' in sys.argv: ind=sys.argv.index('-f') file=sys.argv[ind+1] if file!="": f=open(file,'r') data=f.readlines() f.close() else: data=sys.stdin.readlines() ofile="" if '-F' in sys.argv: ind = sys.argv.index('-F') ofile= sys.argv[ind+1] out = open(ofile, 'w + a') file_outstring = "" for line in data: tau,Vdirs=[],[] rec=line.split() for k in range(0,9,3): tau.append(float(rec[k])) Vdirs.append((float(rec[k+1]),float(rec[k+2]))) srot=pmag.doeigs_s(tau,Vdirs) outstring="" for s in srot:outstring+='%10.8f '%(s) if ofile=="": print(outstring) else: out.write(outstring+'\n')
def function[main, parameter[]]: constant[ NAME eigs_s.py DESCRIPTION converts eigenparamters format data to s format SYNTAX eigs_s.py [-h][-i][command line options][<filename] OPTIONS -h prints help message and quits -i allows interactive file name entry -f FILE, specifies input file name -F FILE, specifies output file name < filenmae, reads file from standard input (Unix-like operating systems only) INPUT tau_i, dec_i inc_i of eigenvectors OUTPUT x11,x22,x33,x12,x23,x13 ] variable[file] assign[=] constant[] if compare[constant[-h] in name[sys].argv] begin[:] call[name[print], parameter[name[main].__doc__]] call[name[sys].exit, parameter[]] if compare[name[file] not_equal[!=] constant[]] begin[:] variable[f] assign[=] call[name[open], parameter[name[file], constant[r]]] variable[data] assign[=] call[name[f].readlines, parameter[]] call[name[f].close, parameter[]] variable[ofile] assign[=] constant[] if compare[constant[-F] in name[sys].argv] begin[:] variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-F]]] variable[ofile] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]] variable[out] assign[=] call[name[open], parameter[name[ofile], constant[w + a]]] variable[file_outstring] assign[=] constant[] for taget[name[line]] in starred[name[data]] begin[:] <ast.Tuple object at 0x7da204960520> assign[=] tuple[[<ast.List object at 0x7da204962500>, <ast.List object at 0x7da204963be0>]] variable[rec] assign[=] call[name[line].split, parameter[]] for taget[name[k]] in starred[call[name[range], parameter[constant[0], constant[9], constant[3]]]] begin[:] call[name[tau].append, parameter[call[name[float], parameter[call[name[rec]][name[k]]]]]] call[name[Vdirs].append, parameter[tuple[[<ast.Call object at 0x7da204962e30>, <ast.Call object at 0x7da204960580>]]]] variable[srot] assign[=] call[name[pmag].doeigs_s, parameter[name[tau], name[Vdirs]]] variable[outstring] assign[=] constant[] for taget[name[s]] in starred[name[srot]] begin[:] <ast.AugAssign object at 0x7da204962020> if compare[name[ofile] equal[==] constant[]] begin[:] call[name[print], parameter[name[outstring]]]
keyword[def] identifier[main] (): literal[string] identifier[file] = literal[string] keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[print] ( identifier[main] . identifier[__doc__] ) identifier[sys] . identifier[exit] () keyword[elif] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[file] = identifier[input] ( literal[string] ) keyword[elif] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] ) identifier[file] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ] keyword[if] identifier[file] != literal[string] : identifier[f] = identifier[open] ( identifier[file] , literal[string] ) identifier[data] = identifier[f] . identifier[readlines] () identifier[f] . identifier[close] () keyword[else] : identifier[data] = identifier[sys] . identifier[stdin] . identifier[readlines] () identifier[ofile] = literal[string] keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] : identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] ) identifier[ofile] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ] identifier[out] = identifier[open] ( identifier[ofile] , literal[string] ) identifier[file_outstring] = literal[string] keyword[for] identifier[line] keyword[in] identifier[data] : identifier[tau] , identifier[Vdirs] =[],[] identifier[rec] = identifier[line] . identifier[split] () keyword[for] identifier[k] keyword[in] identifier[range] ( literal[int] , literal[int] , literal[int] ): identifier[tau] . identifier[append] ( identifier[float] ( identifier[rec] [ identifier[k] ])) identifier[Vdirs] . identifier[append] (( identifier[float] ( identifier[rec] [ identifier[k] + literal[int] ]), identifier[float] ( identifier[rec] [ identifier[k] + literal[int] ]))) identifier[srot] = identifier[pmag] . identifier[doeigs_s] ( identifier[tau] , identifier[Vdirs] ) identifier[outstring] = literal[string] keyword[for] identifier[s] keyword[in] identifier[srot] : identifier[outstring] += literal[string] %( identifier[s] ) keyword[if] identifier[ofile] == literal[string] : identifier[print] ( identifier[outstring] ) keyword[else] : identifier[out] . identifier[write] ( identifier[outstring] + literal[string] )
def main(): """ NAME eigs_s.py DESCRIPTION converts eigenparamters format data to s format SYNTAX eigs_s.py [-h][-i][command line options][<filename] OPTIONS -h prints help message and quits -i allows interactive file name entry -f FILE, specifies input file name -F FILE, specifies output file name < filenmae, reads file from standard input (Unix-like operating systems only) INPUT tau_i, dec_i inc_i of eigenvectors OUTPUT x11,x22,x33,x12,x23,x13 """ file = '' if '-h' in sys.argv: print(main.__doc__) sys.exit() # depends on [control=['if'], data=[]] elif '-i' in sys.argv: file = input('Enter eigenparameters data file name: ') # depends on [control=['if'], data=[]] elif '-f' in sys.argv: ind = sys.argv.index('-f') file = sys.argv[ind + 1] # depends on [control=['if'], data=[]] if file != '': f = open(file, 'r') data = f.readlines() f.close() # depends on [control=['if'], data=['file']] else: data = sys.stdin.readlines() ofile = '' if '-F' in sys.argv: ind = sys.argv.index('-F') ofile = sys.argv[ind + 1] out = open(ofile, 'w + a') # depends on [control=['if'], data=[]] file_outstring = '' for line in data: (tau, Vdirs) = ([], []) rec = line.split() for k in range(0, 9, 3): tau.append(float(rec[k])) Vdirs.append((float(rec[k + 1]), float(rec[k + 2]))) # depends on [control=['for'], data=['k']] srot = pmag.doeigs_s(tau, Vdirs) outstring = '' for s in srot: outstring += '%10.8f ' % s # depends on [control=['for'], data=['s']] if ofile == '': print(outstring) # depends on [control=['if'], data=[]] else: out.write(outstring + '\n') # depends on [control=['for'], data=['line']]
def _split_iso9660_filename(fullname): # type: (bytes) -> Tuple[bytes, bytes, bytes] ''' A function to split an ISO 9660 filename into its constituent parts. This is the name, the extension, and the version number. Parameters: fullname - The name to split. Returns: A tuple containing the name, extension, and version. ''' namesplit = fullname.split(b';') version = b'' if len(namesplit) > 1: version = namesplit.pop() rest = b';'.join(namesplit) dotsplit = rest.split(b'.') if len(dotsplit) == 1: name = dotsplit[0] extension = b'' else: name = b'.'.join(dotsplit[:-1]) extension = dotsplit[-1] return (name, extension, version)
def function[_split_iso9660_filename, parameter[fullname]]: constant[ A function to split an ISO 9660 filename into its constituent parts. This is the name, the extension, and the version number. Parameters: fullname - The name to split. Returns: A tuple containing the name, extension, and version. ] variable[namesplit] assign[=] call[name[fullname].split, parameter[constant[b';']]] variable[version] assign[=] constant[b''] if compare[call[name[len], parameter[name[namesplit]]] greater[>] constant[1]] begin[:] variable[version] assign[=] call[name[namesplit].pop, parameter[]] variable[rest] assign[=] call[constant[b';'].join, parameter[name[namesplit]]] variable[dotsplit] assign[=] call[name[rest].split, parameter[constant[b'.']]] if compare[call[name[len], parameter[name[dotsplit]]] equal[==] constant[1]] begin[:] variable[name] assign[=] call[name[dotsplit]][constant[0]] variable[extension] assign[=] constant[b''] return[tuple[[<ast.Name object at 0x7da20c6c7430>, <ast.Name object at 0x7da20c6c65c0>, <ast.Name object at 0x7da20c6c5780>]]]
keyword[def] identifier[_split_iso9660_filename] ( identifier[fullname] ): literal[string] identifier[namesplit] = identifier[fullname] . identifier[split] ( literal[string] ) identifier[version] = literal[string] keyword[if] identifier[len] ( identifier[namesplit] )> literal[int] : identifier[version] = identifier[namesplit] . identifier[pop] () identifier[rest] = literal[string] . identifier[join] ( identifier[namesplit] ) identifier[dotsplit] = identifier[rest] . identifier[split] ( literal[string] ) keyword[if] identifier[len] ( identifier[dotsplit] )== literal[int] : identifier[name] = identifier[dotsplit] [ literal[int] ] identifier[extension] = literal[string] keyword[else] : identifier[name] = literal[string] . identifier[join] ( identifier[dotsplit] [:- literal[int] ]) identifier[extension] = identifier[dotsplit] [- literal[int] ] keyword[return] ( identifier[name] , identifier[extension] , identifier[version] )
def _split_iso9660_filename(fullname): # type: (bytes) -> Tuple[bytes, bytes, bytes] '\n A function to split an ISO 9660 filename into its constituent parts. This\n is the name, the extension, and the version number.\n\n Parameters:\n fullname - The name to split.\n Returns:\n A tuple containing the name, extension, and version.\n ' namesplit = fullname.split(b';') version = b'' if len(namesplit) > 1: version = namesplit.pop() # depends on [control=['if'], data=[]] rest = b';'.join(namesplit) dotsplit = rest.split(b'.') if len(dotsplit) == 1: name = dotsplit[0] extension = b'' # depends on [control=['if'], data=[]] else: name = b'.'.join(dotsplit[:-1]) extension = dotsplit[-1] return (name, extension, version)
def normalizeIdentifier(value): """ Normalizes identifier. * **value** must be an :ref:`type-string` or `None`. * **value** must not be longer than 100 characters. * **value** must not contain a character out the range of 0x20 - 0x7E. * Returned value is an unencoded ``unicode`` string. """ if value is None: return value if not isinstance(value, basestring): raise TypeError("Identifiers must be strings, not %s." % type(value).__name__) if len(value) == 0: raise ValueError("The identifier string is empty.") if len(value) > 100: raise ValueError("The identifier string has a length (%d) greater " "than the maximum allowed (100)." % len(value)) for c in value: v = ord(c) if v < 0x20 or v > 0x7E: raise ValueError("The identifier string ('%s') contains a " "character out size of the range 0x20 - 0x7E." % value) return unicode(value)
def function[normalizeIdentifier, parameter[value]]: constant[ Normalizes identifier. * **value** must be an :ref:`type-string` or `None`. * **value** must not be longer than 100 characters. * **value** must not contain a character out the range of 0x20 - 0x7E. * Returned value is an unencoded ``unicode`` string. ] if compare[name[value] is constant[None]] begin[:] return[name[value]] if <ast.UnaryOp object at 0x7da20c992590> begin[:] <ast.Raise object at 0x7da20c9905b0> if compare[call[name[len], parameter[name[value]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da20c9907f0> if compare[call[name[len], parameter[name[value]]] greater[>] constant[100]] begin[:] <ast.Raise object at 0x7da20c990460> for taget[name[c]] in starred[name[value]] begin[:] variable[v] assign[=] call[name[ord], parameter[name[c]]] if <ast.BoolOp object at 0x7da2041d8400> begin[:] <ast.Raise object at 0x7da2041dbee0> return[call[name[unicode], parameter[name[value]]]]
keyword[def] identifier[normalizeIdentifier] ( identifier[value] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[None] : keyword[return] identifier[value] keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[basestring] ): keyword[raise] identifier[TypeError] ( literal[string] % identifier[type] ( identifier[value] ). identifier[__name__] ) keyword[if] identifier[len] ( identifier[value] )== literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[len] ( identifier[value] )> literal[int] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] % identifier[len] ( identifier[value] )) keyword[for] identifier[c] keyword[in] identifier[value] : identifier[v] = identifier[ord] ( identifier[c] ) keyword[if] identifier[v] < literal[int] keyword[or] identifier[v] > literal[int] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] % identifier[value] ) keyword[return] identifier[unicode] ( identifier[value] )
def normalizeIdentifier(value): """ Normalizes identifier. * **value** must be an :ref:`type-string` or `None`. * **value** must not be longer than 100 characters. * **value** must not contain a character out the range of 0x20 - 0x7E. * Returned value is an unencoded ``unicode`` string. """ if value is None: return value # depends on [control=['if'], data=['value']] if not isinstance(value, basestring): raise TypeError('Identifiers must be strings, not %s.' % type(value).__name__) # depends on [control=['if'], data=[]] if len(value) == 0: raise ValueError('The identifier string is empty.') # depends on [control=['if'], data=[]] if len(value) > 100: raise ValueError('The identifier string has a length (%d) greater than the maximum allowed (100).' % len(value)) # depends on [control=['if'], data=[]] for c in value: v = ord(c) if v < 32 or v > 126: raise ValueError("The identifier string ('%s') contains a character out size of the range 0x20 - 0x7E." % value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] return unicode(value)
def resize(image, width, height, channels=None, decode=False, resample='nearest'): """ Resizes the image or SArray of Images to a specific width, height, and number of channels. Parameters ---------- image : turicreate.Image | SArray The image or SArray of images to be resized. width : int The width the image is resized to. height : int The height the image is resized to. channels : int, optional The number of channels the image is resized to. 1 channel corresponds to grayscale, 3 channels corresponds to RGB, and 4 channels corresponds to RGBA images. decode : bool, optional Whether to store the resized image in decoded format. Decoded takes more space, but makes the resize and future operations on the image faster. resample : 'nearest' or 'bilinear' Specify the resampling filter: - ``'nearest'``: Nearest neigbhor, extremely fast - ``'bilinear'``: Bilinear, fast and with less aliasing artifacts Returns ------- out : turicreate.Image Returns a resized Image object. Notes ----- Grayscale Images -> Images with one channel, representing a scale from white to black RGB Images -> Images with 3 channels, with each pixel having Green, Red, and Blue values. RGBA Images -> An RGB image with an opacity channel. Examples -------- Resize a single image >>> img = turicreate.Image('https://static.turi.com/datasets/images/sample.jpg') >>> resized_img = turicreate.image_analysis.resize(img,100,100,1) Resize an SArray of images >>> url ='https://static.turi.com/datasets/images/nested' >>> image_sframe = turicreate.image_analysis.load_images(url, "auto", with_path=False, ... recursive=True) >>> image_sarray = image_sframe["image"] >>> resized_images = turicreate.image_analysis.resize(image_sarray, 100, 100, 1) """ if height < 0 or width < 0: raise ValueError("Cannot resize to negative sizes") if resample == 'nearest': resample_method = 0 elif resample == 'bilinear': resample_method = 1 else: raise ValueError("Unknown resample option: '%s'" % resample) from ...data_structures.sarray import SArray as _SArray from ... import extensions as _extensions if type(image) is _Image: if channels is None: channels = image.channels if channels <= 0: raise ValueError("cannot resize images to 0 or fewer channels") return _extensions.resize_image(image, width, height, channels, decode, resample_method) elif type(image) is _SArray: if channels is None: channels = 3 if channels <= 0: raise ValueError("cannot resize images to 0 or fewer channels") return image.apply(lambda x: _extensions.resize_image(x, width, height, channels, decode, resample_method)) else: raise ValueError("Cannot call 'resize' on objects that are not either an Image or SArray of Images")
def function[resize, parameter[image, width, height, channels, decode, resample]]: constant[ Resizes the image or SArray of Images to a specific width, height, and number of channels. Parameters ---------- image : turicreate.Image | SArray The image or SArray of images to be resized. width : int The width the image is resized to. height : int The height the image is resized to. channels : int, optional The number of channels the image is resized to. 1 channel corresponds to grayscale, 3 channels corresponds to RGB, and 4 channels corresponds to RGBA images. decode : bool, optional Whether to store the resized image in decoded format. Decoded takes more space, but makes the resize and future operations on the image faster. resample : 'nearest' or 'bilinear' Specify the resampling filter: - ``'nearest'``: Nearest neigbhor, extremely fast - ``'bilinear'``: Bilinear, fast and with less aliasing artifacts Returns ------- out : turicreate.Image Returns a resized Image object. Notes ----- Grayscale Images -> Images with one channel, representing a scale from white to black RGB Images -> Images with 3 channels, with each pixel having Green, Red, and Blue values. RGBA Images -> An RGB image with an opacity channel. Examples -------- Resize a single image >>> img = turicreate.Image('https://static.turi.com/datasets/images/sample.jpg') >>> resized_img = turicreate.image_analysis.resize(img,100,100,1) Resize an SArray of images >>> url ='https://static.turi.com/datasets/images/nested' >>> image_sframe = turicreate.image_analysis.load_images(url, "auto", with_path=False, ... recursive=True) >>> image_sarray = image_sframe["image"] >>> resized_images = turicreate.image_analysis.resize(image_sarray, 100, 100, 1) ] if <ast.BoolOp object at 0x7da1b20ee290> begin[:] <ast.Raise object at 0x7da1b20edab0> if compare[name[resample] equal[==] constant[nearest]] begin[:] variable[resample_method] assign[=] constant[0] from relative_module[data_structures.sarray] import module[SArray] from relative_module[None] import module[extensions] if compare[call[name[type], parameter[name[image]]] is name[_Image]] begin[:] if compare[name[channels] is constant[None]] begin[:] variable[channels] assign[=] name[image].channels if compare[name[channels] less_or_equal[<=] constant[0]] begin[:] <ast.Raise object at 0x7da1b20edfc0> return[call[name[_extensions].resize_image, parameter[name[image], name[width], name[height], name[channels], name[decode], name[resample_method]]]]
keyword[def] identifier[resize] ( identifier[image] , identifier[width] , identifier[height] , identifier[channels] = keyword[None] , identifier[decode] = keyword[False] , identifier[resample] = literal[string] ): literal[string] keyword[if] identifier[height] < literal[int] keyword[or] identifier[width] < literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[resample] == literal[string] : identifier[resample_method] = literal[int] keyword[elif] identifier[resample] == literal[string] : identifier[resample_method] = literal[int] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[resample] ) keyword[from] ... identifier[data_structures] . identifier[sarray] keyword[import] identifier[SArray] keyword[as] identifier[_SArray] keyword[from] ... keyword[import] identifier[extensions] keyword[as] identifier[_extensions] keyword[if] identifier[type] ( identifier[image] ) keyword[is] identifier[_Image] : keyword[if] identifier[channels] keyword[is] keyword[None] : identifier[channels] = identifier[image] . identifier[channels] keyword[if] identifier[channels] <= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[_extensions] . identifier[resize_image] ( identifier[image] , identifier[width] , identifier[height] , identifier[channels] , identifier[decode] , identifier[resample_method] ) keyword[elif] identifier[type] ( identifier[image] ) keyword[is] identifier[_SArray] : keyword[if] identifier[channels] keyword[is] keyword[None] : identifier[channels] = literal[int] keyword[if] identifier[channels] <= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[image] . identifier[apply] ( keyword[lambda] identifier[x] : identifier[_extensions] . identifier[resize_image] ( identifier[x] , identifier[width] , identifier[height] , identifier[channels] , identifier[decode] , identifier[resample_method] )) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] )
def resize(image, width, height, channels=None, decode=False, resample='nearest'): """ Resizes the image or SArray of Images to a specific width, height, and number of channels. Parameters ---------- image : turicreate.Image | SArray The image or SArray of images to be resized. width : int The width the image is resized to. height : int The height the image is resized to. channels : int, optional The number of channels the image is resized to. 1 channel corresponds to grayscale, 3 channels corresponds to RGB, and 4 channels corresponds to RGBA images. decode : bool, optional Whether to store the resized image in decoded format. Decoded takes more space, but makes the resize and future operations on the image faster. resample : 'nearest' or 'bilinear' Specify the resampling filter: - ``'nearest'``: Nearest neigbhor, extremely fast - ``'bilinear'``: Bilinear, fast and with less aliasing artifacts Returns ------- out : turicreate.Image Returns a resized Image object. Notes ----- Grayscale Images -> Images with one channel, representing a scale from white to black RGB Images -> Images with 3 channels, with each pixel having Green, Red, and Blue values. RGBA Images -> An RGB image with an opacity channel. Examples -------- Resize a single image >>> img = turicreate.Image('https://static.turi.com/datasets/images/sample.jpg') >>> resized_img = turicreate.image_analysis.resize(img,100,100,1) Resize an SArray of images >>> url ='https://static.turi.com/datasets/images/nested' >>> image_sframe = turicreate.image_analysis.load_images(url, "auto", with_path=False, ... recursive=True) >>> image_sarray = image_sframe["image"] >>> resized_images = turicreate.image_analysis.resize(image_sarray, 100, 100, 1) """ if height < 0 or width < 0: raise ValueError('Cannot resize to negative sizes') # depends on [control=['if'], data=[]] if resample == 'nearest': resample_method = 0 # depends on [control=['if'], data=[]] elif resample == 'bilinear': resample_method = 1 # depends on [control=['if'], data=[]] else: raise ValueError("Unknown resample option: '%s'" % resample) from ...data_structures.sarray import SArray as _SArray from ... import extensions as _extensions if type(image) is _Image: if channels is None: channels = image.channels # depends on [control=['if'], data=['channels']] if channels <= 0: raise ValueError('cannot resize images to 0 or fewer channels') # depends on [control=['if'], data=[]] return _extensions.resize_image(image, width, height, channels, decode, resample_method) # depends on [control=['if'], data=[]] elif type(image) is _SArray: if channels is None: channels = 3 # depends on [control=['if'], data=['channels']] if channels <= 0: raise ValueError('cannot resize images to 0 or fewer channels') # depends on [control=['if'], data=[]] return image.apply(lambda x: _extensions.resize_image(x, width, height, channels, decode, resample_method)) # depends on [control=['if'], data=[]] else: raise ValueError("Cannot call 'resize' on objects that are not either an Image or SArray of Images")
def generate_contentinfo_from_channeldir(self, args, options): """ Create rows in Content.csv for each folder and file in `self.channeldir`. """ LOGGER.info('Generating Content.csv rows folders and file in channeldir') file_path = get_metadata_file_path(self.channeldir, self.contentinfo) with open(file_path, 'a') as csv_file: csvwriter = csv.DictWriter(csv_file, CONTENT_INFO_HEADER) channeldir = args['channeldir'] if channeldir.endswith(os.path.sep): channeldir.rstrip(os.path.sep) # MAIN PROCESSING OF os.walk OUTPUT content_folders = sorted(os.walk(channeldir)) _ = content_folders.pop(0) # Skip over channel root folder for rel_path, _subfolders, filenames in content_folders: LOGGER.info('processing folder ' + str(rel_path)) sorted_filenames = sorted(filenames) self.generate_contentinfo_from_folder(csvwriter, rel_path, sorted_filenames) LOGGER.info('Generted {} row for all folders and files in {}'.format(self.contentinfo, self.channeldir))
def function[generate_contentinfo_from_channeldir, parameter[self, args, options]]: constant[ Create rows in Content.csv for each folder and file in `self.channeldir`. ] call[name[LOGGER].info, parameter[constant[Generating Content.csv rows folders and file in channeldir]]] variable[file_path] assign[=] call[name[get_metadata_file_path], parameter[name[self].channeldir, name[self].contentinfo]] with call[name[open], parameter[name[file_path], constant[a]]] begin[:] variable[csvwriter] assign[=] call[name[csv].DictWriter, parameter[name[csv_file], name[CONTENT_INFO_HEADER]]] variable[channeldir] assign[=] call[name[args]][constant[channeldir]] if call[name[channeldir].endswith, parameter[name[os].path.sep]] begin[:] call[name[channeldir].rstrip, parameter[name[os].path.sep]] variable[content_folders] assign[=] call[name[sorted], parameter[call[name[os].walk, parameter[name[channeldir]]]]] variable[_] assign[=] call[name[content_folders].pop, parameter[constant[0]]] for taget[tuple[[<ast.Name object at 0x7da20e960fa0>, <ast.Name object at 0x7da20e9610f0>, <ast.Name object at 0x7da20e9602e0>]]] in starred[name[content_folders]] begin[:] call[name[LOGGER].info, parameter[binary_operation[constant[processing folder ] + call[name[str], parameter[name[rel_path]]]]]] variable[sorted_filenames] assign[=] call[name[sorted], parameter[name[filenames]]] call[name[self].generate_contentinfo_from_folder, parameter[name[csvwriter], name[rel_path], name[sorted_filenames]]] call[name[LOGGER].info, parameter[call[constant[Generted {} row for all folders and files in {}].format, parameter[name[self].contentinfo, name[self].channeldir]]]]
keyword[def] identifier[generate_contentinfo_from_channeldir] ( identifier[self] , identifier[args] , identifier[options] ): literal[string] identifier[LOGGER] . identifier[info] ( literal[string] ) identifier[file_path] = identifier[get_metadata_file_path] ( identifier[self] . identifier[channeldir] , identifier[self] . identifier[contentinfo] ) keyword[with] identifier[open] ( identifier[file_path] , literal[string] ) keyword[as] identifier[csv_file] : identifier[csvwriter] = identifier[csv] . identifier[DictWriter] ( identifier[csv_file] , identifier[CONTENT_INFO_HEADER] ) identifier[channeldir] = identifier[args] [ literal[string] ] keyword[if] identifier[channeldir] . identifier[endswith] ( identifier[os] . identifier[path] . identifier[sep] ): identifier[channeldir] . identifier[rstrip] ( identifier[os] . identifier[path] . identifier[sep] ) identifier[content_folders] = identifier[sorted] ( identifier[os] . identifier[walk] ( identifier[channeldir] )) identifier[_] = identifier[content_folders] . identifier[pop] ( literal[int] ) keyword[for] identifier[rel_path] , identifier[_subfolders] , identifier[filenames] keyword[in] identifier[content_folders] : identifier[LOGGER] . identifier[info] ( literal[string] + identifier[str] ( identifier[rel_path] )) identifier[sorted_filenames] = identifier[sorted] ( identifier[filenames] ) identifier[self] . identifier[generate_contentinfo_from_folder] ( identifier[csvwriter] , identifier[rel_path] , identifier[sorted_filenames] ) identifier[LOGGER] . identifier[info] ( literal[string] . identifier[format] ( identifier[self] . identifier[contentinfo] , identifier[self] . identifier[channeldir] ))
def generate_contentinfo_from_channeldir(self, args, options): """ Create rows in Content.csv for each folder and file in `self.channeldir`. """ LOGGER.info('Generating Content.csv rows folders and file in channeldir') file_path = get_metadata_file_path(self.channeldir, self.contentinfo) with open(file_path, 'a') as csv_file: csvwriter = csv.DictWriter(csv_file, CONTENT_INFO_HEADER) channeldir = args['channeldir'] if channeldir.endswith(os.path.sep): channeldir.rstrip(os.path.sep) # depends on [control=['if'], data=[]] # MAIN PROCESSING OF os.walk OUTPUT content_folders = sorted(os.walk(channeldir)) _ = content_folders.pop(0) # Skip over channel root folder for (rel_path, _subfolders, filenames) in content_folders: LOGGER.info('processing folder ' + str(rel_path)) sorted_filenames = sorted(filenames) self.generate_contentinfo_from_folder(csvwriter, rel_path, sorted_filenames) # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['csv_file']] LOGGER.info('Generted {} row for all folders and files in {}'.format(self.contentinfo, self.channeldir))
def name_to_system_object(self, name): """ Give SystemObject instance corresponding to the name """ if isinstance(name, str): if self.allow_name_referencing: name = name else: raise NameError('System.allow_name_referencing is set to False, cannot convert string to name') elif isinstance(name, Object): name = str(name) return self.namespace.get(name, None)
def function[name_to_system_object, parameter[self, name]]: constant[ Give SystemObject instance corresponding to the name ] if call[name[isinstance], parameter[name[name], name[str]]] begin[:] if name[self].allow_name_referencing begin[:] variable[name] assign[=] name[name] return[call[name[self].namespace.get, parameter[name[name], constant[None]]]]
keyword[def] identifier[name_to_system_object] ( identifier[self] , identifier[name] ): literal[string] keyword[if] identifier[isinstance] ( identifier[name] , identifier[str] ): keyword[if] identifier[self] . identifier[allow_name_referencing] : identifier[name] = identifier[name] keyword[else] : keyword[raise] identifier[NameError] ( literal[string] ) keyword[elif] identifier[isinstance] ( identifier[name] , identifier[Object] ): identifier[name] = identifier[str] ( identifier[name] ) keyword[return] identifier[self] . identifier[namespace] . identifier[get] ( identifier[name] , keyword[None] )
def name_to_system_object(self, name): """ Give SystemObject instance corresponding to the name """ if isinstance(name, str): if self.allow_name_referencing: name = name # depends on [control=['if'], data=[]] else: raise NameError('System.allow_name_referencing is set to False, cannot convert string to name') # depends on [control=['if'], data=[]] elif isinstance(name, Object): name = str(name) # depends on [control=['if'], data=[]] return self.namespace.get(name, None)
def libvlc_vlm_set_input(p_instance, psz_name, psz_input): '''Set a media's input MRL. This will delete all existing inputs and add the specified one. @param p_instance: the instance. @param psz_name: the media to work on. @param psz_input: the input MRL. @return: 0 on success, -1 on error. ''' f = _Cfunctions.get('libvlc_vlm_set_input', None) or \ _Cfunction('libvlc_vlm_set_input', ((1,), (1,), (1,),), None, ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_char_p) return f(p_instance, psz_name, psz_input)
def function[libvlc_vlm_set_input, parameter[p_instance, psz_name, psz_input]]: constant[Set a media's input MRL. This will delete all existing inputs and add the specified one. @param p_instance: the instance. @param psz_name: the media to work on. @param psz_input: the input MRL. @return: 0 on success, -1 on error. ] variable[f] assign[=] <ast.BoolOp object at 0x7da1b2345390> return[call[name[f], parameter[name[p_instance], name[psz_name], name[psz_input]]]]
keyword[def] identifier[libvlc_vlm_set_input] ( identifier[p_instance] , identifier[psz_name] , identifier[psz_input] ): literal[string] identifier[f] = identifier[_Cfunctions] . identifier[get] ( literal[string] , keyword[None] ) keyword[or] identifier[_Cfunction] ( literal[string] ,(( literal[int] ,),( literal[int] ,),( literal[int] ,),), keyword[None] , identifier[ctypes] . identifier[c_int] , identifier[Instance] , identifier[ctypes] . identifier[c_char_p] , identifier[ctypes] . identifier[c_char_p] ) keyword[return] identifier[f] ( identifier[p_instance] , identifier[psz_name] , identifier[psz_input] )
def libvlc_vlm_set_input(p_instance, psz_name, psz_input): """Set a media's input MRL. This will delete all existing inputs and add the specified one. @param p_instance: the instance. @param psz_name: the media to work on. @param psz_input: the input MRL. @return: 0 on success, -1 on error. """ f = _Cfunctions.get('libvlc_vlm_set_input', None) or _Cfunction('libvlc_vlm_set_input', ((1,), (1,), (1,)), None, ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_char_p) return f(p_instance, psz_name, psz_input)
def parse_header(head): """ Parses the header part of packet Returns a dict """ try: (fromcall, path) = head.split('>', 1) except: raise ParseError("invalid packet header") if (not 1 <= len(fromcall) <= 9 or not re.findall(r"^[a-z0-9]{0,9}(\-[a-z0-9]{1,8})?$", fromcall, re.I)): raise ParseError("fromcallsign is invalid") path = path.split(',') if len(path[0]) == 0: raise ParseError("no tocallsign in header") tocall = path[0] path = path[1:] validate_callsign(tocall, "tocallsign") for digi in path: if not re.findall(r"^[A-Z0-9\-]{1,9}\*?$", digi, re.I): raise ParseError("invalid callsign in path") parsed = { 'from': fromcall, 'to': tocall, 'path': path, } viacall = "" if len(path) >= 2 and re.match(r"^q..$", path[-2]): viacall = path[-1] parsed.update({'via': viacall}) return parsed
def function[parse_header, parameter[head]]: constant[ Parses the header part of packet Returns a dict ] <ast.Try object at 0x7da18ede6620> if <ast.BoolOp object at 0x7da18ede4c70> begin[:] <ast.Raise object at 0x7da18ede7460> variable[path] assign[=] call[name[path].split, parameter[constant[,]]] if compare[call[name[len], parameter[call[name[path]][constant[0]]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da18ede6fb0> variable[tocall] assign[=] call[name[path]][constant[0]] variable[path] assign[=] call[name[path]][<ast.Slice object at 0x7da1aff8c8e0>] call[name[validate_callsign], parameter[name[tocall], constant[tocallsign]]] for taget[name[digi]] in starred[name[path]] begin[:] if <ast.UnaryOp object at 0x7da1aff8dcf0> begin[:] <ast.Raise object at 0x7da1aff8f5b0> variable[parsed] assign[=] dictionary[[<ast.Constant object at 0x7da1aff8cdc0>, <ast.Constant object at 0x7da1aff8de70>, <ast.Constant object at 0x7da1aff8d240>], [<ast.Name object at 0x7da1aff8f7f0>, <ast.Name object at 0x7da1aff8dde0>, <ast.Name object at 0x7da1aff8e770>]] variable[viacall] assign[=] constant[] if <ast.BoolOp object at 0x7da1aff8e230> begin[:] variable[viacall] assign[=] call[name[path]][<ast.UnaryOp object at 0x7da1aff8d630>] call[name[parsed].update, parameter[dictionary[[<ast.Constant object at 0x7da1aff8cb50>], [<ast.Name object at 0x7da1aff8e8c0>]]]] return[name[parsed]]
keyword[def] identifier[parse_header] ( identifier[head] ): literal[string] keyword[try] : ( identifier[fromcall] , identifier[path] )= identifier[head] . identifier[split] ( literal[string] , literal[int] ) keyword[except] : keyword[raise] identifier[ParseError] ( literal[string] ) keyword[if] ( keyword[not] literal[int] <= identifier[len] ( identifier[fromcall] )<= literal[int] keyword[or] keyword[not] identifier[re] . identifier[findall] ( literal[string] , identifier[fromcall] , identifier[re] . identifier[I] )): keyword[raise] identifier[ParseError] ( literal[string] ) identifier[path] = identifier[path] . identifier[split] ( literal[string] ) keyword[if] identifier[len] ( identifier[path] [ literal[int] ])== literal[int] : keyword[raise] identifier[ParseError] ( literal[string] ) identifier[tocall] = identifier[path] [ literal[int] ] identifier[path] = identifier[path] [ literal[int] :] identifier[validate_callsign] ( identifier[tocall] , literal[string] ) keyword[for] identifier[digi] keyword[in] identifier[path] : keyword[if] keyword[not] identifier[re] . identifier[findall] ( literal[string] , identifier[digi] , identifier[re] . identifier[I] ): keyword[raise] identifier[ParseError] ( literal[string] ) identifier[parsed] ={ literal[string] : identifier[fromcall] , literal[string] : identifier[tocall] , literal[string] : identifier[path] , } identifier[viacall] = literal[string] keyword[if] identifier[len] ( identifier[path] )>= literal[int] keyword[and] identifier[re] . identifier[match] ( literal[string] , identifier[path] [- literal[int] ]): identifier[viacall] = identifier[path] [- literal[int] ] identifier[parsed] . identifier[update] ({ literal[string] : identifier[viacall] }) keyword[return] identifier[parsed]
def parse_header(head): """ Parses the header part of packet Returns a dict """ try: (fromcall, path) = head.split('>', 1) # depends on [control=['try'], data=[]] except: raise ParseError('invalid packet header') # depends on [control=['except'], data=[]] if not 1 <= len(fromcall) <= 9 or not re.findall('^[a-z0-9]{0,9}(\\-[a-z0-9]{1,8})?$', fromcall, re.I): raise ParseError('fromcallsign is invalid') # depends on [control=['if'], data=[]] path = path.split(',') if len(path[0]) == 0: raise ParseError('no tocallsign in header') # depends on [control=['if'], data=[]] tocall = path[0] path = path[1:] validate_callsign(tocall, 'tocallsign') for digi in path: if not re.findall('^[A-Z0-9\\-]{1,9}\\*?$', digi, re.I): raise ParseError('invalid callsign in path') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['digi']] parsed = {'from': fromcall, 'to': tocall, 'path': path} viacall = '' if len(path) >= 2 and re.match('^q..$', path[-2]): viacall = path[-1] # depends on [control=['if'], data=[]] parsed.update({'via': viacall}) return parsed
def whatIfOrder(self, contract: Contract, order: Order) -> OrderState: """ Retrieve commission and margin impact without actually placing the order. The given order will not be modified in any way. This method is blocking. Args: contract: Contract to test. order: Order to test. """ return self._run(self.whatIfOrderAsync(contract, order))
def function[whatIfOrder, parameter[self, contract, order]]: constant[ Retrieve commission and margin impact without actually placing the order. The given order will not be modified in any way. This method is blocking. Args: contract: Contract to test. order: Order to test. ] return[call[name[self]._run, parameter[call[name[self].whatIfOrderAsync, parameter[name[contract], name[order]]]]]]
keyword[def] identifier[whatIfOrder] ( identifier[self] , identifier[contract] : identifier[Contract] , identifier[order] : identifier[Order] )-> identifier[OrderState] : literal[string] keyword[return] identifier[self] . identifier[_run] ( identifier[self] . identifier[whatIfOrderAsync] ( identifier[contract] , identifier[order] ))
def whatIfOrder(self, contract: Contract, order: Order) -> OrderState: """ Retrieve commission and margin impact without actually placing the order. The given order will not be modified in any way. This method is blocking. Args: contract: Contract to test. order: Order to test. """ return self._run(self.whatIfOrderAsync(contract, order))
def repair(self, volume_id_or_uri, timeout=-1): """ Removes extra presentations from a specified volume on the storage system. Args: volume_id_or_uri: Can be either the volume id or the volume uri. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView, just stops waiting for its completion. Returns: dict: Storage volume. """ data = { "type": "ExtraManagedStorageVolumePaths", "resourceUri": self._client.build_uri(volume_id_or_uri) } custom_headers = {'Accept-Language': 'en_US'} uri = self.URI + '/repair' return self._client.create(data, uri=uri, timeout=timeout, custom_headers=custom_headers)
def function[repair, parameter[self, volume_id_or_uri, timeout]]: constant[ Removes extra presentations from a specified volume on the storage system. Args: volume_id_or_uri: Can be either the volume id or the volume uri. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView, just stops waiting for its completion. Returns: dict: Storage volume. ] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18f00de40>, <ast.Constant object at 0x7da18f00e380>], [<ast.Constant object at 0x7da18f00e110>, <ast.Call object at 0x7da18f00e590>]] variable[custom_headers] assign[=] dictionary[[<ast.Constant object at 0x7da18f00fac0>], [<ast.Constant object at 0x7da18f00cdc0>]] variable[uri] assign[=] binary_operation[name[self].URI + constant[/repair]] return[call[name[self]._client.create, parameter[name[data]]]]
keyword[def] identifier[repair] ( identifier[self] , identifier[volume_id_or_uri] , identifier[timeout] =- literal[int] ): literal[string] identifier[data] ={ literal[string] : literal[string] , literal[string] : identifier[self] . identifier[_client] . identifier[build_uri] ( identifier[volume_id_or_uri] ) } identifier[custom_headers] ={ literal[string] : literal[string] } identifier[uri] = identifier[self] . identifier[URI] + literal[string] keyword[return] identifier[self] . identifier[_client] . identifier[create] ( identifier[data] , identifier[uri] = identifier[uri] , identifier[timeout] = identifier[timeout] , identifier[custom_headers] = identifier[custom_headers] )
def repair(self, volume_id_or_uri, timeout=-1): """ Removes extra presentations from a specified volume on the storage system. Args: volume_id_or_uri: Can be either the volume id or the volume uri. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView, just stops waiting for its completion. Returns: dict: Storage volume. """ data = {'type': 'ExtraManagedStorageVolumePaths', 'resourceUri': self._client.build_uri(volume_id_or_uri)} custom_headers = {'Accept-Language': 'en_US'} uri = self.URI + '/repair' return self._client.create(data, uri=uri, timeout=timeout, custom_headers=custom_headers)
def _base_placeholder(self): """ Return the master placeholder this layout placeholder inherits from. """ base_ph_type = { PP_PLACEHOLDER.BODY: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.CHART: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.BITMAP: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.CENTER_TITLE: PP_PLACEHOLDER.TITLE, PP_PLACEHOLDER.ORG_CHART: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.DATE: PP_PLACEHOLDER.DATE, PP_PLACEHOLDER.FOOTER: PP_PLACEHOLDER.FOOTER, PP_PLACEHOLDER.MEDIA_CLIP: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.OBJECT: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.PICTURE: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.SLIDE_NUMBER: PP_PLACEHOLDER.SLIDE_NUMBER, PP_PLACEHOLDER.SUBTITLE: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.TABLE: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.TITLE: PP_PLACEHOLDER.TITLE, }[self._element.ph_type] slide_master = self.part.slide_master return slide_master.placeholders.get(base_ph_type, None)
def function[_base_placeholder, parameter[self]]: constant[ Return the master placeholder this layout placeholder inherits from. ] variable[base_ph_type] assign[=] call[dictionary[[<ast.Attribute object at 0x7da20c6a87f0>, <ast.Attribute object at 0x7da20c6aa5c0>, <ast.Attribute object at 0x7da20c6ab1c0>, <ast.Attribute object at 0x7da20c6a9de0>, <ast.Attribute object at 0x7da20c6ab310>, <ast.Attribute object at 0x7da20c6a97e0>, <ast.Attribute object at 0x7da20c6a8220>, <ast.Attribute object at 0x7da20c6ab610>, <ast.Attribute object at 0x7da20c6aa080>, <ast.Attribute object at 0x7da20c6a9cf0>, <ast.Attribute object at 0x7da20c6aadd0>, <ast.Attribute object at 0x7da20c6a82e0>, <ast.Attribute object at 0x7da20c6a9630>, <ast.Attribute object at 0x7da20c6aa590>], [<ast.Attribute object at 0x7da20c6a9ea0>, <ast.Attribute object at 0x7da20c6a85e0>, <ast.Attribute object at 0x7da20c6a8b20>, <ast.Attribute object at 0x7da20c6a92d0>, <ast.Attribute object at 0x7da20c6a8e50>, <ast.Attribute object at 0x7da20c6ab550>, <ast.Attribute object at 0x7da20c6a9a80>, <ast.Attribute object at 0x7da20c6aac80>, <ast.Attribute object at 0x7da20c6a89a0>, <ast.Attribute object at 0x7da20c6aa2f0>, <ast.Attribute object at 0x7da20c6a8a30>, <ast.Attribute object at 0x7da20c6a9090>, <ast.Attribute object at 0x7da20c6aa050>, <ast.Attribute object at 0x7da20c6aabc0>]]][name[self]._element.ph_type] variable[slide_master] assign[=] name[self].part.slide_master return[call[name[slide_master].placeholders.get, parameter[name[base_ph_type], constant[None]]]]
keyword[def] identifier[_base_placeholder] ( identifier[self] ): literal[string] identifier[base_ph_type] ={ identifier[PP_PLACEHOLDER] . identifier[BODY] : identifier[PP_PLACEHOLDER] . identifier[BODY] , identifier[PP_PLACEHOLDER] . identifier[CHART] : identifier[PP_PLACEHOLDER] . identifier[BODY] , identifier[PP_PLACEHOLDER] . identifier[BITMAP] : identifier[PP_PLACEHOLDER] . identifier[BODY] , identifier[PP_PLACEHOLDER] . identifier[CENTER_TITLE] : identifier[PP_PLACEHOLDER] . identifier[TITLE] , identifier[PP_PLACEHOLDER] . identifier[ORG_CHART] : identifier[PP_PLACEHOLDER] . identifier[BODY] , identifier[PP_PLACEHOLDER] . identifier[DATE] : identifier[PP_PLACEHOLDER] . identifier[DATE] , identifier[PP_PLACEHOLDER] . identifier[FOOTER] : identifier[PP_PLACEHOLDER] . identifier[FOOTER] , identifier[PP_PLACEHOLDER] . identifier[MEDIA_CLIP] : identifier[PP_PLACEHOLDER] . identifier[BODY] , identifier[PP_PLACEHOLDER] . identifier[OBJECT] : identifier[PP_PLACEHOLDER] . identifier[BODY] , identifier[PP_PLACEHOLDER] . identifier[PICTURE] : identifier[PP_PLACEHOLDER] . identifier[BODY] , identifier[PP_PLACEHOLDER] . identifier[SLIDE_NUMBER] : identifier[PP_PLACEHOLDER] . identifier[SLIDE_NUMBER] , identifier[PP_PLACEHOLDER] . identifier[SUBTITLE] : identifier[PP_PLACEHOLDER] . identifier[BODY] , identifier[PP_PLACEHOLDER] . identifier[TABLE] : identifier[PP_PLACEHOLDER] . identifier[BODY] , identifier[PP_PLACEHOLDER] . identifier[TITLE] : identifier[PP_PLACEHOLDER] . identifier[TITLE] , }[ identifier[self] . identifier[_element] . identifier[ph_type] ] identifier[slide_master] = identifier[self] . identifier[part] . identifier[slide_master] keyword[return] identifier[slide_master] . identifier[placeholders] . identifier[get] ( identifier[base_ph_type] , keyword[None] )
def _base_placeholder(self): """ Return the master placeholder this layout placeholder inherits from. """ base_ph_type = {PP_PLACEHOLDER.BODY: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.CHART: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.BITMAP: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.CENTER_TITLE: PP_PLACEHOLDER.TITLE, PP_PLACEHOLDER.ORG_CHART: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.DATE: PP_PLACEHOLDER.DATE, PP_PLACEHOLDER.FOOTER: PP_PLACEHOLDER.FOOTER, PP_PLACEHOLDER.MEDIA_CLIP: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.OBJECT: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.PICTURE: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.SLIDE_NUMBER: PP_PLACEHOLDER.SLIDE_NUMBER, PP_PLACEHOLDER.SUBTITLE: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.TABLE: PP_PLACEHOLDER.BODY, PP_PLACEHOLDER.TITLE: PP_PLACEHOLDER.TITLE}[self._element.ph_type] slide_master = self.part.slide_master return slide_master.placeholders.get(base_ph_type, None)
def retrieve(cls, *args, **kwargs): """Return parent method.""" return super(Subscription, cls).retrieve(*args, **kwargs)
def function[retrieve, parameter[cls]]: constant[Return parent method.] return[call[call[name[super], parameter[name[Subscription], name[cls]]].retrieve, parameter[<ast.Starred object at 0x7da1b16e1c30>]]]
keyword[def] identifier[retrieve] ( identifier[cls] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[super] ( identifier[Subscription] , identifier[cls] ). identifier[retrieve] (* identifier[args] ,** identifier[kwargs] )
def retrieve(cls, *args, **kwargs): """Return parent method.""" return super(Subscription, cls).retrieve(*args, **kwargs)
def chat_delete(self, *, channel: str, ts: str, **kwargs) -> SlackResponse: """Deletes a message. Args: channel (str): Channel containing the message to be deleted. e.g. 'C1234567890' ts (str): Timestamp of the message to be deleted. e.g. '1234567890.123456' """ kwargs.update({"channel": channel, "ts": ts}) return self.api_call("chat.delete", json=kwargs)
def function[chat_delete, parameter[self]]: constant[Deletes a message. Args: channel (str): Channel containing the message to be deleted. e.g. 'C1234567890' ts (str): Timestamp of the message to be deleted. e.g. '1234567890.123456' ] call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1b18400>, <ast.Constant object at 0x7da1b1b18940>], [<ast.Name object at 0x7da1b1b1a1d0>, <ast.Name object at 0x7da1b1b1aa40>]]]] return[call[name[self].api_call, parameter[constant[chat.delete]]]]
keyword[def] identifier[chat_delete] ( identifier[self] ,*, identifier[channel] : identifier[str] , identifier[ts] : identifier[str] ,** identifier[kwargs] )-> identifier[SlackResponse] : literal[string] identifier[kwargs] . identifier[update] ({ literal[string] : identifier[channel] , literal[string] : identifier[ts] }) keyword[return] identifier[self] . identifier[api_call] ( literal[string] , identifier[json] = identifier[kwargs] )
def chat_delete(self, *, channel: str, ts: str, **kwargs) -> SlackResponse: """Deletes a message. Args: channel (str): Channel containing the message to be deleted. e.g. 'C1234567890' ts (str): Timestamp of the message to be deleted. e.g. '1234567890.123456' """ kwargs.update({'channel': channel, 'ts': ts}) return self.api_call('chat.delete', json=kwargs)
def reorder_levels(self, dim_order=None, inplace=None, **dim_order_kwargs): """Rearrange index levels using input order. Parameters ---------- dim_order : optional Mapping from names matching dimensions and values given by lists representing new level orders. Every given dimension must have a multi-index. inplace : bool, optional If True, modify the dataarray in-place. Otherwise, return a new DataArray object. **dim_order_kwargs: optional The keyword arguments form of ``dim_order``. One of dim_order or dim_order_kwargs must be provided. Returns ------- obj : DataArray Another dataarray, with this dataarray's data but replaced coordinates. """ inplace = _check_inplace(inplace) dim_order = either_dict_or_kwargs(dim_order, dim_order_kwargs, 'reorder_levels') replace_coords = {} for dim, order in dim_order.items(): coord = self._coords[dim] index = coord.to_index() if not isinstance(index, pd.MultiIndex): raise ValueError("coordinate %r has no MultiIndex" % dim) replace_coords[dim] = IndexVariable(coord.dims, index.reorder_levels(order)) coords = self._coords.copy() coords.update(replace_coords) if inplace: self._coords = coords else: return self._replace(coords=coords)
def function[reorder_levels, parameter[self, dim_order, inplace]]: constant[Rearrange index levels using input order. Parameters ---------- dim_order : optional Mapping from names matching dimensions and values given by lists representing new level orders. Every given dimension must have a multi-index. inplace : bool, optional If True, modify the dataarray in-place. Otherwise, return a new DataArray object. **dim_order_kwargs: optional The keyword arguments form of ``dim_order``. One of dim_order or dim_order_kwargs must be provided. Returns ------- obj : DataArray Another dataarray, with this dataarray's data but replaced coordinates. ] variable[inplace] assign[=] call[name[_check_inplace], parameter[name[inplace]]] variable[dim_order] assign[=] call[name[either_dict_or_kwargs], parameter[name[dim_order], name[dim_order_kwargs], constant[reorder_levels]]] variable[replace_coords] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da20e955060>, <ast.Name object at 0x7da20e955d80>]]] in starred[call[name[dim_order].items, parameter[]]] begin[:] variable[coord] assign[=] call[name[self]._coords][name[dim]] variable[index] assign[=] call[name[coord].to_index, parameter[]] if <ast.UnaryOp object at 0x7da2043467d0> begin[:] <ast.Raise object at 0x7da2043454b0> call[name[replace_coords]][name[dim]] assign[=] call[name[IndexVariable], parameter[name[coord].dims, call[name[index].reorder_levels, parameter[name[order]]]]] variable[coords] assign[=] call[name[self]._coords.copy, parameter[]] call[name[coords].update, parameter[name[replace_coords]]] if name[inplace] begin[:] name[self]._coords assign[=] name[coords]
keyword[def] identifier[reorder_levels] ( identifier[self] , identifier[dim_order] = keyword[None] , identifier[inplace] = keyword[None] , ** identifier[dim_order_kwargs] ): literal[string] identifier[inplace] = identifier[_check_inplace] ( identifier[inplace] ) identifier[dim_order] = identifier[either_dict_or_kwargs] ( identifier[dim_order] , identifier[dim_order_kwargs] , literal[string] ) identifier[replace_coords] ={} keyword[for] identifier[dim] , identifier[order] keyword[in] identifier[dim_order] . identifier[items] (): identifier[coord] = identifier[self] . identifier[_coords] [ identifier[dim] ] identifier[index] = identifier[coord] . identifier[to_index] () keyword[if] keyword[not] identifier[isinstance] ( identifier[index] , identifier[pd] . identifier[MultiIndex] ): keyword[raise] identifier[ValueError] ( literal[string] % identifier[dim] ) identifier[replace_coords] [ identifier[dim] ]= identifier[IndexVariable] ( identifier[coord] . identifier[dims] , identifier[index] . identifier[reorder_levels] ( identifier[order] )) identifier[coords] = identifier[self] . identifier[_coords] . identifier[copy] () identifier[coords] . identifier[update] ( identifier[replace_coords] ) keyword[if] identifier[inplace] : identifier[self] . identifier[_coords] = identifier[coords] keyword[else] : keyword[return] identifier[self] . identifier[_replace] ( identifier[coords] = identifier[coords] )
def reorder_levels(self, dim_order=None, inplace=None, **dim_order_kwargs): """Rearrange index levels using input order. Parameters ---------- dim_order : optional Mapping from names matching dimensions and values given by lists representing new level orders. Every given dimension must have a multi-index. inplace : bool, optional If True, modify the dataarray in-place. Otherwise, return a new DataArray object. **dim_order_kwargs: optional The keyword arguments form of ``dim_order``. One of dim_order or dim_order_kwargs must be provided. Returns ------- obj : DataArray Another dataarray, with this dataarray's data but replaced coordinates. """ inplace = _check_inplace(inplace) dim_order = either_dict_or_kwargs(dim_order, dim_order_kwargs, 'reorder_levels') replace_coords = {} for (dim, order) in dim_order.items(): coord = self._coords[dim] index = coord.to_index() if not isinstance(index, pd.MultiIndex): raise ValueError('coordinate %r has no MultiIndex' % dim) # depends on [control=['if'], data=[]] replace_coords[dim] = IndexVariable(coord.dims, index.reorder_levels(order)) # depends on [control=['for'], data=[]] coords = self._coords.copy() coords.update(replace_coords) if inplace: self._coords = coords # depends on [control=['if'], data=[]] else: return self._replace(coords=coords)
def close_connection(self, host): """close connection(s) to <host> host is the host:port spec, as in 'www.cnn.com:8080' as passed in. no error occurs if there is no connection to that host.""" for h in self._cm.get_all(host): self._cm.remove(h) h.close()
def function[close_connection, parameter[self, host]]: constant[close connection(s) to <host> host is the host:port spec, as in 'www.cnn.com:8080' as passed in. no error occurs if there is no connection to that host.] for taget[name[h]] in starred[call[name[self]._cm.get_all, parameter[name[host]]]] begin[:] call[name[self]._cm.remove, parameter[name[h]]] call[name[h].close, parameter[]]
keyword[def] identifier[close_connection] ( identifier[self] , identifier[host] ): literal[string] keyword[for] identifier[h] keyword[in] identifier[self] . identifier[_cm] . identifier[get_all] ( identifier[host] ): identifier[self] . identifier[_cm] . identifier[remove] ( identifier[h] ) identifier[h] . identifier[close] ()
def close_connection(self, host): """close connection(s) to <host> host is the host:port spec, as in 'www.cnn.com:8080' as passed in. no error occurs if there is no connection to that host.""" for h in self._cm.get_all(host): self._cm.remove(h) h.close() # depends on [control=['for'], data=['h']]
def add_element(self, element, override=False): """Add an element to the parser. :param element: the element class. :param override: whether to replace the default element based on. .. note:: If one needs to call it inside ``__init__()``, please call it after ``super().__init__()`` is called. """ if issubclass(element, inline.InlineElement): dest = self.inline_elements elif issubclass(element, block.BlockElement): dest = self.block_elements else: raise TypeError( 'The element should be a subclass of either `BlockElement` or ' '`InlineElement`.' ) if not override: dest[element.__name__] = element else: for cls in element.__bases__: if cls in dest.values(): dest[cls.__name__] = element break else: dest[element.__name__] = element
def function[add_element, parameter[self, element, override]]: constant[Add an element to the parser. :param element: the element class. :param override: whether to replace the default element based on. .. note:: If one needs to call it inside ``__init__()``, please call it after ``super().__init__()`` is called. ] if call[name[issubclass], parameter[name[element], name[inline].InlineElement]] begin[:] variable[dest] assign[=] name[self].inline_elements if <ast.UnaryOp object at 0x7da2054a6560> begin[:] call[name[dest]][name[element].__name__] assign[=] name[element]
keyword[def] identifier[add_element] ( identifier[self] , identifier[element] , identifier[override] = keyword[False] ): literal[string] keyword[if] identifier[issubclass] ( identifier[element] , identifier[inline] . identifier[InlineElement] ): identifier[dest] = identifier[self] . identifier[inline_elements] keyword[elif] identifier[issubclass] ( identifier[element] , identifier[block] . identifier[BlockElement] ): identifier[dest] = identifier[self] . identifier[block_elements] keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] literal[string] ) keyword[if] keyword[not] identifier[override] : identifier[dest] [ identifier[element] . identifier[__name__] ]= identifier[element] keyword[else] : keyword[for] identifier[cls] keyword[in] identifier[element] . identifier[__bases__] : keyword[if] identifier[cls] keyword[in] identifier[dest] . identifier[values] (): identifier[dest] [ identifier[cls] . identifier[__name__] ]= identifier[element] keyword[break] keyword[else] : identifier[dest] [ identifier[element] . identifier[__name__] ]= identifier[element]
def add_element(self, element, override=False): """Add an element to the parser. :param element: the element class. :param override: whether to replace the default element based on. .. note:: If one needs to call it inside ``__init__()``, please call it after ``super().__init__()`` is called. """ if issubclass(element, inline.InlineElement): dest = self.inline_elements # depends on [control=['if'], data=[]] elif issubclass(element, block.BlockElement): dest = self.block_elements # depends on [control=['if'], data=[]] else: raise TypeError('The element should be a subclass of either `BlockElement` or `InlineElement`.') if not override: dest[element.__name__] = element # depends on [control=['if'], data=[]] else: for cls in element.__bases__: if cls in dest.values(): dest[cls.__name__] = element break # depends on [control=['if'], data=['cls']] # depends on [control=['for'], data=['cls']] else: dest[element.__name__] = element
def stop_script(self, script_id): """ Stops a running script. script_id:= id of stored script. ... status = pi.stop_script(sid) ... """ res = yield from self._pigpio_aio_command(_PI_CMD_PROCS, script_id, 0) return _u2i(res)
def function[stop_script, parameter[self, script_id]]: constant[ Stops a running script. script_id:= id of stored script. ... status = pi.stop_script(sid) ... ] variable[res] assign[=] <ast.YieldFrom object at 0x7da1b24aea40> return[call[name[_u2i], parameter[name[res]]]]
keyword[def] identifier[stop_script] ( identifier[self] , identifier[script_id] ): literal[string] identifier[res] = keyword[yield] keyword[from] identifier[self] . identifier[_pigpio_aio_command] ( identifier[_PI_CMD_PROCS] , identifier[script_id] , literal[int] ) keyword[return] identifier[_u2i] ( identifier[res] )
def stop_script(self, script_id): """ Stops a running script. script_id:= id of stored script. ... status = pi.stop_script(sid) ... """ res = (yield from self._pigpio_aio_command(_PI_CMD_PROCS, script_id, 0)) return _u2i(res)
def connect(): """Connect controller to handle token exchange and query Uber API.""" # Exchange authorization code for acceess token and create session session = auth_flow.get_session(request.url) client = UberRidesClient(session) # Fetch profile for driver profile = client.get_driver_profile().json # Fetch last 50 trips and payments for driver trips = client.get_driver_trips(0, 50).json payments = client.get_driver_payments(0, 50).json return render_template('driver_dashboard.html', profile=profile, trips=trips['trips'], payments=payments['payments'] )
def function[connect, parameter[]]: constant[Connect controller to handle token exchange and query Uber API.] variable[session] assign[=] call[name[auth_flow].get_session, parameter[name[request].url]] variable[client] assign[=] call[name[UberRidesClient], parameter[name[session]]] variable[profile] assign[=] call[name[client].get_driver_profile, parameter[]].json variable[trips] assign[=] call[name[client].get_driver_trips, parameter[constant[0], constant[50]]].json variable[payments] assign[=] call[name[client].get_driver_payments, parameter[constant[0], constant[50]]].json return[call[name[render_template], parameter[constant[driver_dashboard.html]]]]
keyword[def] identifier[connect] (): literal[string] identifier[session] = identifier[auth_flow] . identifier[get_session] ( identifier[request] . identifier[url] ) identifier[client] = identifier[UberRidesClient] ( identifier[session] ) identifier[profile] = identifier[client] . identifier[get_driver_profile] (). identifier[json] identifier[trips] = identifier[client] . identifier[get_driver_trips] ( literal[int] , literal[int] ). identifier[json] identifier[payments] = identifier[client] . identifier[get_driver_payments] ( literal[int] , literal[int] ). identifier[json] keyword[return] identifier[render_template] ( literal[string] , identifier[profile] = identifier[profile] , identifier[trips] = identifier[trips] [ literal[string] ], identifier[payments] = identifier[payments] [ literal[string] ] )
def connect(): """Connect controller to handle token exchange and query Uber API.""" # Exchange authorization code for acceess token and create session session = auth_flow.get_session(request.url) client = UberRidesClient(session) # Fetch profile for driver profile = client.get_driver_profile().json # Fetch last 50 trips and payments for driver trips = client.get_driver_trips(0, 50).json payments = client.get_driver_payments(0, 50).json return render_template('driver_dashboard.html', profile=profile, trips=trips['trips'], payments=payments['payments'])
def p_field_optional2_2(self, p): """ field : name arguments selection_set """ p[0] = Field(name=p[1], arguments=p[2], selections=p[3])
def function[p_field_optional2_2, parameter[self, p]]: constant[ field : name arguments selection_set ] call[name[p]][constant[0]] assign[=] call[name[Field], parameter[]]
keyword[def] identifier[p_field_optional2_2] ( identifier[self] , identifier[p] ): literal[string] identifier[p] [ literal[int] ]= identifier[Field] ( identifier[name] = identifier[p] [ literal[int] ], identifier[arguments] = identifier[p] [ literal[int] ], identifier[selections] = identifier[p] [ literal[int] ])
def p_field_optional2_2(self, p): """ field : name arguments selection_set """ p[0] = Field(name=p[1], arguments=p[2], selections=p[3])
def insert_runner(fun, args=None, kwargs=None, queue=None, backend=None): ''' Insert a reference to a runner into the queue so that it can be run later. fun The runner function that is going to be run args list or comma-seperated string of args to send to fun kwargs dictionary of keyword arguments to send to fun queue queue to insert the runner reference into backend backend that to use for the queue CLI Example: .. code-block:: bash salt-run queue.insert_runner test.stdout_print salt-run queue.insert_runner event.send test_insert_runner kwargs='{"data": {"foo": "bar"}}' ''' if args is None: args = [] elif isinstance(args, six.string_types): args = args.split(',') if kwargs is None: kwargs = {} queue_kwargs = __get_queue_opts(queue=queue, backend=backend) data = {'fun': fun, 'args': args, 'kwargs': kwargs} return insert(items=data, **queue_kwargs)
def function[insert_runner, parameter[fun, args, kwargs, queue, backend]]: constant[ Insert a reference to a runner into the queue so that it can be run later. fun The runner function that is going to be run args list or comma-seperated string of args to send to fun kwargs dictionary of keyword arguments to send to fun queue queue to insert the runner reference into backend backend that to use for the queue CLI Example: .. code-block:: bash salt-run queue.insert_runner test.stdout_print salt-run queue.insert_runner event.send test_insert_runner kwargs='{"data": {"foo": "bar"}}' ] if compare[name[args] is constant[None]] begin[:] variable[args] assign[=] list[[]] if compare[name[kwargs] is constant[None]] begin[:] variable[kwargs] assign[=] dictionary[[], []] variable[queue_kwargs] assign[=] call[name[__get_queue_opts], parameter[]] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da207f036a0>, <ast.Constant object at 0x7da207f01a80>, <ast.Constant object at 0x7da207f017b0>], [<ast.Name object at 0x7da207f00c40>, <ast.Name object at 0x7da207f025f0>, <ast.Name object at 0x7da207f00f70>]] return[call[name[insert], parameter[]]]
keyword[def] identifier[insert_runner] ( identifier[fun] , identifier[args] = keyword[None] , identifier[kwargs] = keyword[None] , identifier[queue] = keyword[None] , identifier[backend] = keyword[None] ): literal[string] keyword[if] identifier[args] keyword[is] keyword[None] : identifier[args] =[] keyword[elif] identifier[isinstance] ( identifier[args] , identifier[six] . identifier[string_types] ): identifier[args] = identifier[args] . identifier[split] ( literal[string] ) keyword[if] identifier[kwargs] keyword[is] keyword[None] : identifier[kwargs] ={} identifier[queue_kwargs] = identifier[__get_queue_opts] ( identifier[queue] = identifier[queue] , identifier[backend] = identifier[backend] ) identifier[data] ={ literal[string] : identifier[fun] , literal[string] : identifier[args] , literal[string] : identifier[kwargs] } keyword[return] identifier[insert] ( identifier[items] = identifier[data] ,** identifier[queue_kwargs] )
def insert_runner(fun, args=None, kwargs=None, queue=None, backend=None): """ Insert a reference to a runner into the queue so that it can be run later. fun The runner function that is going to be run args list or comma-seperated string of args to send to fun kwargs dictionary of keyword arguments to send to fun queue queue to insert the runner reference into backend backend that to use for the queue CLI Example: .. code-block:: bash salt-run queue.insert_runner test.stdout_print salt-run queue.insert_runner event.send test_insert_runner kwargs='{"data": {"foo": "bar"}}' """ if args is None: args = [] # depends on [control=['if'], data=['args']] elif isinstance(args, six.string_types): args = args.split(',') # depends on [control=['if'], data=[]] if kwargs is None: kwargs = {} # depends on [control=['if'], data=['kwargs']] queue_kwargs = __get_queue_opts(queue=queue, backend=backend) data = {'fun': fun, 'args': args, 'kwargs': kwargs} return insert(items=data, **queue_kwargs)
def restart_kernel(self, kernel_id): """Restart a kernel while keeping clients connected.""" self._check_kernel_id(kernel_id) km = self.get_kernel(kernel_id) km.restart_kernel() self.log.info("Kernel restarted: %s" % kernel_id) return kernel_id # the following remains, in case the KM restart machinery is # somehow unacceptable # Get the notebook_id to preserve the kernel/notebook association. notebook_id = self.notebook_for_kernel(kernel_id) # Create the new kernel first so we can move the clients over. new_kernel_id = self.start_kernel() # Now kill the old kernel. self.kill_kernel(kernel_id) # Now save the new kernel/notebook association. We have to save it # after the old kernel is killed as that will delete the mapping. self.set_kernel_for_notebook(notebook_id, new_kernel_id) self.log.info("Kernel restarted: %s" % new_kernel_id) return new_kernel_id
def function[restart_kernel, parameter[self, kernel_id]]: constant[Restart a kernel while keeping clients connected.] call[name[self]._check_kernel_id, parameter[name[kernel_id]]] variable[km] assign[=] call[name[self].get_kernel, parameter[name[kernel_id]]] call[name[km].restart_kernel, parameter[]] call[name[self].log.info, parameter[binary_operation[constant[Kernel restarted: %s] <ast.Mod object at 0x7da2590d6920> name[kernel_id]]]] return[name[kernel_id]] variable[notebook_id] assign[=] call[name[self].notebook_for_kernel, parameter[name[kernel_id]]] variable[new_kernel_id] assign[=] call[name[self].start_kernel, parameter[]] call[name[self].kill_kernel, parameter[name[kernel_id]]] call[name[self].set_kernel_for_notebook, parameter[name[notebook_id], name[new_kernel_id]]] call[name[self].log.info, parameter[binary_operation[constant[Kernel restarted: %s] <ast.Mod object at 0x7da2590d6920> name[new_kernel_id]]]] return[name[new_kernel_id]]
keyword[def] identifier[restart_kernel] ( identifier[self] , identifier[kernel_id] ): literal[string] identifier[self] . identifier[_check_kernel_id] ( identifier[kernel_id] ) identifier[km] = identifier[self] . identifier[get_kernel] ( identifier[kernel_id] ) identifier[km] . identifier[restart_kernel] () identifier[self] . identifier[log] . identifier[info] ( literal[string] % identifier[kernel_id] ) keyword[return] identifier[kernel_id] identifier[notebook_id] = identifier[self] . identifier[notebook_for_kernel] ( identifier[kernel_id] ) identifier[new_kernel_id] = identifier[self] . identifier[start_kernel] () identifier[self] . identifier[kill_kernel] ( identifier[kernel_id] ) identifier[self] . identifier[set_kernel_for_notebook] ( identifier[notebook_id] , identifier[new_kernel_id] ) identifier[self] . identifier[log] . identifier[info] ( literal[string] % identifier[new_kernel_id] ) keyword[return] identifier[new_kernel_id]
def restart_kernel(self, kernel_id): """Restart a kernel while keeping clients connected.""" self._check_kernel_id(kernel_id) km = self.get_kernel(kernel_id) km.restart_kernel() self.log.info('Kernel restarted: %s' % kernel_id) return kernel_id # the following remains, in case the KM restart machinery is # somehow unacceptable # Get the notebook_id to preserve the kernel/notebook association. notebook_id = self.notebook_for_kernel(kernel_id) # Create the new kernel first so we can move the clients over. new_kernel_id = self.start_kernel() # Now kill the old kernel. self.kill_kernel(kernel_id) # Now save the new kernel/notebook association. We have to save it # after the old kernel is killed as that will delete the mapping. self.set_kernel_for_notebook(notebook_id, new_kernel_id) self.log.info('Kernel restarted: %s' % new_kernel_id) return new_kernel_id
def add_prefix(self, ncname: str) -> None: """ Look up ncname and add it to the prefix map if necessary @param ncname: name to add """ if ncname not in self.prefixmap: uri = cu.expand_uri(ncname + ':', self.curi_maps) if uri and '://' in uri: self.prefixmap[ncname] = uri else: print(f"Unrecognized prefix: {ncname}", file=sys.stderr) self.prefixmap[ncname] = f"http://example.org/unknown/{ncname}/"
def function[add_prefix, parameter[self, ncname]]: constant[ Look up ncname and add it to the prefix map if necessary @param ncname: name to add ] if compare[name[ncname] <ast.NotIn object at 0x7da2590d7190> name[self].prefixmap] begin[:] variable[uri] assign[=] call[name[cu].expand_uri, parameter[binary_operation[name[ncname] + constant[:]], name[self].curi_maps]] if <ast.BoolOp object at 0x7da18f09d420> begin[:] call[name[self].prefixmap][name[ncname]] assign[=] name[uri]
keyword[def] identifier[add_prefix] ( identifier[self] , identifier[ncname] : identifier[str] )-> keyword[None] : literal[string] keyword[if] identifier[ncname] keyword[not] keyword[in] identifier[self] . identifier[prefixmap] : identifier[uri] = identifier[cu] . identifier[expand_uri] ( identifier[ncname] + literal[string] , identifier[self] . identifier[curi_maps] ) keyword[if] identifier[uri] keyword[and] literal[string] keyword[in] identifier[uri] : identifier[self] . identifier[prefixmap] [ identifier[ncname] ]= identifier[uri] keyword[else] : identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] ) identifier[self] . identifier[prefixmap] [ identifier[ncname] ]= literal[string]
def add_prefix(self, ncname: str) -> None: """ Look up ncname and add it to the prefix map if necessary @param ncname: name to add """ if ncname not in self.prefixmap: uri = cu.expand_uri(ncname + ':', self.curi_maps) if uri and '://' in uri: self.prefixmap[ncname] = uri # depends on [control=['if'], data=[]] else: print(f'Unrecognized prefix: {ncname}', file=sys.stderr) self.prefixmap[ncname] = f'http://example.org/unknown/{ncname}/' # depends on [control=['if'], data=['ncname']]
def _references(self, i, sequence=False): """Handle references.""" value = '' c = next(i) if c == '\\': # \\ if sequence and self.bslash_abort: raise PathNameException value = c elif c == '/': # \/ if sequence: raise PathNameException i.rewind(1) else: # \a, \b, \c, etc. pass return value
def function[_references, parameter[self, i, sequence]]: constant[Handle references.] variable[value] assign[=] constant[] variable[c] assign[=] call[name[next], parameter[name[i]]] if compare[name[c] equal[==] constant[\]] begin[:] if <ast.BoolOp object at 0x7da1b0203340> begin[:] <ast.Raise object at 0x7da1b0202770> variable[value] assign[=] name[c] return[name[value]]
keyword[def] identifier[_references] ( identifier[self] , identifier[i] , identifier[sequence] = keyword[False] ): literal[string] identifier[value] = literal[string] identifier[c] = identifier[next] ( identifier[i] ) keyword[if] identifier[c] == literal[string] : keyword[if] identifier[sequence] keyword[and] identifier[self] . identifier[bslash_abort] : keyword[raise] identifier[PathNameException] identifier[value] = identifier[c] keyword[elif] identifier[c] == literal[string] : keyword[if] identifier[sequence] : keyword[raise] identifier[PathNameException] identifier[i] . identifier[rewind] ( literal[int] ) keyword[else] : keyword[pass] keyword[return] identifier[value]
def _references(self, i, sequence=False): """Handle references.""" value = '' c = next(i) if c == '\\': # \\ if sequence and self.bslash_abort: raise PathNameException # depends on [control=['if'], data=[]] value = c # depends on [control=['if'], data=['c']] elif c == '/': # \/ if sequence: raise PathNameException # depends on [control=['if'], data=[]] i.rewind(1) # depends on [control=['if'], data=[]] else: # \a, \b, \c, etc. pass return value
def store_meta_data(self, copy_path=None): """Save meta data of state model to the file system This method generates a dictionary of the meta data of the state together with the meta data of all state elements (data ports, outcomes, etc.) and stores it on the filesystem. Secure that the store meta data method is called after storing the core data otherwise the last_stored_path is maybe wrong or None. The copy path is considered to be a state machine file system path but not the current one but e.g. of a as copy saved state machine. The meta data will be stored in respective relative state folder in the state machine hierarchy. This folder has to exist. Dues the core elements of the state machine has to be stored first. :param str copy_path: Optional copy path if meta data is not stored to the file system path of state machine """ if copy_path: meta_file_path_json = os.path.join(copy_path, self.state.get_storage_path(), storage.FILE_NAME_META_DATA) else: if self.state.file_system_path is None: logger.error("Meta data of {0} can be stored temporary arbitrary but by default first after the " "respective state was stored and a file system path is set.".format(self)) return meta_file_path_json = os.path.join(self.state.file_system_path, storage.FILE_NAME_META_DATA) meta_data = deepcopy(self.meta) self._generate_element_meta_data(meta_data) storage_utils.write_dict_to_json(meta_data, meta_file_path_json)
def function[store_meta_data, parameter[self, copy_path]]: constant[Save meta data of state model to the file system This method generates a dictionary of the meta data of the state together with the meta data of all state elements (data ports, outcomes, etc.) and stores it on the filesystem. Secure that the store meta data method is called after storing the core data otherwise the last_stored_path is maybe wrong or None. The copy path is considered to be a state machine file system path but not the current one but e.g. of a as copy saved state machine. The meta data will be stored in respective relative state folder in the state machine hierarchy. This folder has to exist. Dues the core elements of the state machine has to be stored first. :param str copy_path: Optional copy path if meta data is not stored to the file system path of state machine ] if name[copy_path] begin[:] variable[meta_file_path_json] assign[=] call[name[os].path.join, parameter[name[copy_path], call[name[self].state.get_storage_path, parameter[]], name[storage].FILE_NAME_META_DATA]] variable[meta_data] assign[=] call[name[deepcopy], parameter[name[self].meta]] call[name[self]._generate_element_meta_data, parameter[name[meta_data]]] call[name[storage_utils].write_dict_to_json, parameter[name[meta_data], name[meta_file_path_json]]]
keyword[def] identifier[store_meta_data] ( identifier[self] , identifier[copy_path] = keyword[None] ): literal[string] keyword[if] identifier[copy_path] : identifier[meta_file_path_json] = identifier[os] . identifier[path] . identifier[join] ( identifier[copy_path] , identifier[self] . identifier[state] . identifier[get_storage_path] (), identifier[storage] . identifier[FILE_NAME_META_DATA] ) keyword[else] : keyword[if] identifier[self] . identifier[state] . identifier[file_system_path] keyword[is] keyword[None] : identifier[logger] . identifier[error] ( literal[string] literal[string] . identifier[format] ( identifier[self] )) keyword[return] identifier[meta_file_path_json] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[state] . identifier[file_system_path] , identifier[storage] . identifier[FILE_NAME_META_DATA] ) identifier[meta_data] = identifier[deepcopy] ( identifier[self] . identifier[meta] ) identifier[self] . identifier[_generate_element_meta_data] ( identifier[meta_data] ) identifier[storage_utils] . identifier[write_dict_to_json] ( identifier[meta_data] , identifier[meta_file_path_json] )
def store_meta_data(self, copy_path=None): """Save meta data of state model to the file system This method generates a dictionary of the meta data of the state together with the meta data of all state elements (data ports, outcomes, etc.) and stores it on the filesystem. Secure that the store meta data method is called after storing the core data otherwise the last_stored_path is maybe wrong or None. The copy path is considered to be a state machine file system path but not the current one but e.g. of a as copy saved state machine. The meta data will be stored in respective relative state folder in the state machine hierarchy. This folder has to exist. Dues the core elements of the state machine has to be stored first. :param str copy_path: Optional copy path if meta data is not stored to the file system path of state machine """ if copy_path: meta_file_path_json = os.path.join(copy_path, self.state.get_storage_path(), storage.FILE_NAME_META_DATA) # depends on [control=['if'], data=[]] else: if self.state.file_system_path is None: logger.error('Meta data of {0} can be stored temporary arbitrary but by default first after the respective state was stored and a file system path is set.'.format(self)) return # depends on [control=['if'], data=[]] meta_file_path_json = os.path.join(self.state.file_system_path, storage.FILE_NAME_META_DATA) meta_data = deepcopy(self.meta) self._generate_element_meta_data(meta_data) storage_utils.write_dict_to_json(meta_data, meta_file_path_json)
def byteswap(self, fmt=None, start=None, end=None, repeat=True): """Change the endianness in-place. Return number of repeats of fmt done. fmt -- A compact structure string, an integer number of bytes or an iterable of integers. Defaults to 0, which byte reverses the whole bitstring. start -- Start bit position, defaults to 0. end -- End bit position, defaults to self.len. repeat -- If True (the default) the byte swapping pattern is repeated as much as possible. """ start, end = self._validate_slice(start, end) if fmt is None or fmt == 0: # reverse all of the whole bytes. bytesizes = [(end - start) // 8] elif isinstance(fmt, numbers.Integral): if fmt < 0: raise ValueError("Improper byte length {0}.".format(fmt)) bytesizes = [fmt] elif isinstance(fmt, basestring): m = STRUCT_PACK_RE.match(fmt) if not m: raise ValueError("Cannot parse format string {0}.".format(fmt)) # Split the format string into a list of 'q', '4h' etc. formatlist = re.findall(STRUCT_SPLIT_RE, m.group('fmt')) # Now deal with multiplicative factors, 4h -> hhhh etc. bytesizes = [] for f in formatlist: if len(f) == 1: bytesizes.append(PACK_CODE_SIZE[f]) else: bytesizes.extend([PACK_CODE_SIZE[f[-1]]] * int(f[:-1])) elif isinstance(fmt, collections.Iterable): bytesizes = fmt for bytesize in bytesizes: if not isinstance(bytesize, numbers.Integral) or bytesize < 0: raise ValueError("Improper byte length {0}.".format(bytesize)) else: raise TypeError("Format must be an integer, string or iterable.") repeats = 0 totalbitsize = 8 * sum(bytesizes) if not totalbitsize: return 0 if repeat: # Try to repeat up to the end of the bitstring. finalbit = end else: # Just try one (set of) byteswap(s). finalbit = start + totalbitsize for patternend in xrange(start + totalbitsize, finalbit + 1, totalbitsize): bytestart = patternend - totalbitsize for bytesize in bytesizes: byteend = bytestart + bytesize * 8 self._reversebytes(bytestart, byteend) bytestart += bytesize * 8 repeats += 1 return repeats
def function[byteswap, parameter[self, fmt, start, end, repeat]]: constant[Change the endianness in-place. Return number of repeats of fmt done. fmt -- A compact structure string, an integer number of bytes or an iterable of integers. Defaults to 0, which byte reverses the whole bitstring. start -- Start bit position, defaults to 0. end -- End bit position, defaults to self.len. repeat -- If True (the default) the byte swapping pattern is repeated as much as possible. ] <ast.Tuple object at 0x7da1b1039330> assign[=] call[name[self]._validate_slice, parameter[name[start], name[end]]] if <ast.BoolOp object at 0x7da1b103a620> begin[:] variable[bytesizes] assign[=] list[[<ast.BinOp object at 0x7da1b103a830>]] variable[repeats] assign[=] constant[0] variable[totalbitsize] assign[=] binary_operation[constant[8] * call[name[sum], parameter[name[bytesizes]]]] if <ast.UnaryOp object at 0x7da1b1029e40> begin[:] return[constant[0]] if name[repeat] begin[:] variable[finalbit] assign[=] name[end] for taget[name[patternend]] in starred[call[name[xrange], parameter[binary_operation[name[start] + name[totalbitsize]], binary_operation[name[finalbit] + constant[1]], name[totalbitsize]]]] begin[:] variable[bytestart] assign[=] binary_operation[name[patternend] - name[totalbitsize]] for taget[name[bytesize]] in starred[name[bytesizes]] begin[:] variable[byteend] assign[=] binary_operation[name[bytestart] + binary_operation[name[bytesize] * constant[8]]] call[name[self]._reversebytes, parameter[name[bytestart], name[byteend]]] <ast.AugAssign object at 0x7da1b102bb20> <ast.AugAssign object at 0x7da1b102b6d0> return[name[repeats]]
keyword[def] identifier[byteswap] ( identifier[self] , identifier[fmt] = keyword[None] , identifier[start] = keyword[None] , identifier[end] = keyword[None] , identifier[repeat] = keyword[True] ): literal[string] identifier[start] , identifier[end] = identifier[self] . identifier[_validate_slice] ( identifier[start] , identifier[end] ) keyword[if] identifier[fmt] keyword[is] keyword[None] keyword[or] identifier[fmt] == literal[int] : identifier[bytesizes] =[( identifier[end] - identifier[start] )// literal[int] ] keyword[elif] identifier[isinstance] ( identifier[fmt] , identifier[numbers] . identifier[Integral] ): keyword[if] identifier[fmt] < literal[int] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[fmt] )) identifier[bytesizes] =[ identifier[fmt] ] keyword[elif] identifier[isinstance] ( identifier[fmt] , identifier[basestring] ): identifier[m] = identifier[STRUCT_PACK_RE] . identifier[match] ( identifier[fmt] ) keyword[if] keyword[not] identifier[m] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[fmt] )) identifier[formatlist] = identifier[re] . identifier[findall] ( identifier[STRUCT_SPLIT_RE] , identifier[m] . identifier[group] ( literal[string] )) identifier[bytesizes] =[] keyword[for] identifier[f] keyword[in] identifier[formatlist] : keyword[if] identifier[len] ( identifier[f] )== literal[int] : identifier[bytesizes] . identifier[append] ( identifier[PACK_CODE_SIZE] [ identifier[f] ]) keyword[else] : identifier[bytesizes] . identifier[extend] ([ identifier[PACK_CODE_SIZE] [ identifier[f] [- literal[int] ]]]* identifier[int] ( identifier[f] [:- literal[int] ])) keyword[elif] identifier[isinstance] ( identifier[fmt] , identifier[collections] . identifier[Iterable] ): identifier[bytesizes] = identifier[fmt] keyword[for] identifier[bytesize] keyword[in] identifier[bytesizes] : keyword[if] keyword[not] identifier[isinstance] ( identifier[bytesize] , identifier[numbers] . identifier[Integral] ) keyword[or] identifier[bytesize] < literal[int] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[bytesize] )) keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] ) identifier[repeats] = literal[int] identifier[totalbitsize] = literal[int] * identifier[sum] ( identifier[bytesizes] ) keyword[if] keyword[not] identifier[totalbitsize] : keyword[return] literal[int] keyword[if] identifier[repeat] : identifier[finalbit] = identifier[end] keyword[else] : identifier[finalbit] = identifier[start] + identifier[totalbitsize] keyword[for] identifier[patternend] keyword[in] identifier[xrange] ( identifier[start] + identifier[totalbitsize] , identifier[finalbit] + literal[int] , identifier[totalbitsize] ): identifier[bytestart] = identifier[patternend] - identifier[totalbitsize] keyword[for] identifier[bytesize] keyword[in] identifier[bytesizes] : identifier[byteend] = identifier[bytestart] + identifier[bytesize] * literal[int] identifier[self] . identifier[_reversebytes] ( identifier[bytestart] , identifier[byteend] ) identifier[bytestart] += identifier[bytesize] * literal[int] identifier[repeats] += literal[int] keyword[return] identifier[repeats]
def byteswap(self, fmt=None, start=None, end=None, repeat=True): """Change the endianness in-place. Return number of repeats of fmt done. fmt -- A compact structure string, an integer number of bytes or an iterable of integers. Defaults to 0, which byte reverses the whole bitstring. start -- Start bit position, defaults to 0. end -- End bit position, defaults to self.len. repeat -- If True (the default) the byte swapping pattern is repeated as much as possible. """ (start, end) = self._validate_slice(start, end) if fmt is None or fmt == 0: # reverse all of the whole bytes. bytesizes = [(end - start) // 8] # depends on [control=['if'], data=[]] elif isinstance(fmt, numbers.Integral): if fmt < 0: raise ValueError('Improper byte length {0}.'.format(fmt)) # depends on [control=['if'], data=['fmt']] bytesizes = [fmt] # depends on [control=['if'], data=[]] elif isinstance(fmt, basestring): m = STRUCT_PACK_RE.match(fmt) if not m: raise ValueError('Cannot parse format string {0}.'.format(fmt)) # depends on [control=['if'], data=[]] # Split the format string into a list of 'q', '4h' etc. formatlist = re.findall(STRUCT_SPLIT_RE, m.group('fmt')) # Now deal with multiplicative factors, 4h -> hhhh etc. bytesizes = [] for f in formatlist: if len(f) == 1: bytesizes.append(PACK_CODE_SIZE[f]) # depends on [control=['if'], data=[]] else: bytesizes.extend([PACK_CODE_SIZE[f[-1]]] * int(f[:-1])) # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=[]] elif isinstance(fmt, collections.Iterable): bytesizes = fmt for bytesize in bytesizes: if not isinstance(bytesize, numbers.Integral) or bytesize < 0: raise ValueError('Improper byte length {0}.'.format(bytesize)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['bytesize']] # depends on [control=['if'], data=[]] else: raise TypeError('Format must be an integer, string or iterable.') repeats = 0 totalbitsize = 8 * sum(bytesizes) if not totalbitsize: return 0 # depends on [control=['if'], data=[]] if repeat: # Try to repeat up to the end of the bitstring. finalbit = end # depends on [control=['if'], data=[]] else: # Just try one (set of) byteswap(s). finalbit = start + totalbitsize for patternend in xrange(start + totalbitsize, finalbit + 1, totalbitsize): bytestart = patternend - totalbitsize for bytesize in bytesizes: byteend = bytestart + bytesize * 8 self._reversebytes(bytestart, byteend) bytestart += bytesize * 8 # depends on [control=['for'], data=['bytesize']] repeats += 1 # depends on [control=['for'], data=['patternend']] return repeats
def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument
def function[find_return_with_argument, parameter[self, node]]: constant[Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. ] for taget[name[item]] in starred[name[node].body] begin[:] if <ast.BoolOp object at 0x7da1b1939360> begin[:] return[name[item]]
keyword[def] identifier[find_return_with_argument] ( identifier[self] , identifier[node] ): literal[string] keyword[for] identifier[item] keyword[in] identifier[node] . identifier[body] : keyword[if] identifier[isinstance] ( identifier[item] , identifier[ast] . identifier[Return] ) keyword[and] identifier[item] . identifier[value] : keyword[return] identifier[item] keyword[elif] keyword[not] identifier[isinstance] ( identifier[item] , identifier[ast] . identifier[FunctionDef] ) keyword[and] identifier[hasattr] ( identifier[item] , literal[string] ): identifier[return_with_argument] = identifier[self] . identifier[find_return_with_argument] ( identifier[item] ) keyword[if] identifier[return_with_argument] : keyword[return] identifier[return_with_argument]
def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item # depends on [control=['if'], data=[]] elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']]
def _to_png(self, delay=3): """Export the HTML to byte representation of a PNG image. Uses selenium to render the HTML and record a PNG. You may need to adjust the `delay` time keyword argument if maps render without data or tiles. Examples -------- >>> m._to_png() >>> m._to_png(time=10) # Wait 10 seconds between render and snapshot. """ if self._png_image is None: from selenium import webdriver options = webdriver.firefox.options.Options() options.add_argument('--headless') driver = webdriver.Firefox(options=options) html = self.get_root().render() with _tmp_html(html) as fname: # We need the tempfile to avoid JS security issues. driver.get('file:///{path}'.format(path=fname)) driver.maximize_window() time.sleep(delay) png = driver.get_screenshot_as_png() driver.quit() self._png_image = png return self._png_image
def function[_to_png, parameter[self, delay]]: constant[Export the HTML to byte representation of a PNG image. Uses selenium to render the HTML and record a PNG. You may need to adjust the `delay` time keyword argument if maps render without data or tiles. Examples -------- >>> m._to_png() >>> m._to_png(time=10) # Wait 10 seconds between render and snapshot. ] if compare[name[self]._png_image is constant[None]] begin[:] from relative_module[selenium] import module[webdriver] variable[options] assign[=] call[name[webdriver].firefox.options.Options, parameter[]] call[name[options].add_argument, parameter[constant[--headless]]] variable[driver] assign[=] call[name[webdriver].Firefox, parameter[]] variable[html] assign[=] call[call[name[self].get_root, parameter[]].render, parameter[]] with call[name[_tmp_html], parameter[name[html]]] begin[:] call[name[driver].get, parameter[call[constant[file:///{path}].format, parameter[]]]] call[name[driver].maximize_window, parameter[]] call[name[time].sleep, parameter[name[delay]]] variable[png] assign[=] call[name[driver].get_screenshot_as_png, parameter[]] call[name[driver].quit, parameter[]] name[self]._png_image assign[=] name[png] return[name[self]._png_image]
keyword[def] identifier[_to_png] ( identifier[self] , identifier[delay] = literal[int] ): literal[string] keyword[if] identifier[self] . identifier[_png_image] keyword[is] keyword[None] : keyword[from] identifier[selenium] keyword[import] identifier[webdriver] identifier[options] = identifier[webdriver] . identifier[firefox] . identifier[options] . identifier[Options] () identifier[options] . identifier[add_argument] ( literal[string] ) identifier[driver] = identifier[webdriver] . identifier[Firefox] ( identifier[options] = identifier[options] ) identifier[html] = identifier[self] . identifier[get_root] (). identifier[render] () keyword[with] identifier[_tmp_html] ( identifier[html] ) keyword[as] identifier[fname] : identifier[driver] . identifier[get] ( literal[string] . identifier[format] ( identifier[path] = identifier[fname] )) identifier[driver] . identifier[maximize_window] () identifier[time] . identifier[sleep] ( identifier[delay] ) identifier[png] = identifier[driver] . identifier[get_screenshot_as_png] () identifier[driver] . identifier[quit] () identifier[self] . identifier[_png_image] = identifier[png] keyword[return] identifier[self] . identifier[_png_image]
def _to_png(self, delay=3): """Export the HTML to byte representation of a PNG image. Uses selenium to render the HTML and record a PNG. You may need to adjust the `delay` time keyword argument if maps render without data or tiles. Examples -------- >>> m._to_png() >>> m._to_png(time=10) # Wait 10 seconds between render and snapshot. """ if self._png_image is None: from selenium import webdriver options = webdriver.firefox.options.Options() options.add_argument('--headless') driver = webdriver.Firefox(options=options) html = self.get_root().render() with _tmp_html(html) as fname: # We need the tempfile to avoid JS security issues. driver.get('file:///{path}'.format(path=fname)) driver.maximize_window() time.sleep(delay) png = driver.get_screenshot_as_png() driver.quit() # depends on [control=['with'], data=['fname']] self._png_image = png # depends on [control=['if'], data=[]] return self._png_image
def render_tooltip(self, tooltip, obj): """Render the tooltip for this column for an object """ if self.tooltip_attr: val = getattr(obj, self.tooltip_attr) elif self.tooltip_value: val = self.tooltip_value else: return False setter = getattr(tooltip, TOOLTIP_SETTERS.get(self.tooltip_type)) if self.tooltip_type in TOOLTIP_SIZED_TYPES: setter(val, self.tooltip_image_size) else: setter(val) return True
def function[render_tooltip, parameter[self, tooltip, obj]]: constant[Render the tooltip for this column for an object ] if name[self].tooltip_attr begin[:] variable[val] assign[=] call[name[getattr], parameter[name[obj], name[self].tooltip_attr]] variable[setter] assign[=] call[name[getattr], parameter[name[tooltip], call[name[TOOLTIP_SETTERS].get, parameter[name[self].tooltip_type]]]] if compare[name[self].tooltip_type in name[TOOLTIP_SIZED_TYPES]] begin[:] call[name[setter], parameter[name[val], name[self].tooltip_image_size]] return[constant[True]]
keyword[def] identifier[render_tooltip] ( identifier[self] , identifier[tooltip] , identifier[obj] ): literal[string] keyword[if] identifier[self] . identifier[tooltip_attr] : identifier[val] = identifier[getattr] ( identifier[obj] , identifier[self] . identifier[tooltip_attr] ) keyword[elif] identifier[self] . identifier[tooltip_value] : identifier[val] = identifier[self] . identifier[tooltip_value] keyword[else] : keyword[return] keyword[False] identifier[setter] = identifier[getattr] ( identifier[tooltip] , identifier[TOOLTIP_SETTERS] . identifier[get] ( identifier[self] . identifier[tooltip_type] )) keyword[if] identifier[self] . identifier[tooltip_type] keyword[in] identifier[TOOLTIP_SIZED_TYPES] : identifier[setter] ( identifier[val] , identifier[self] . identifier[tooltip_image_size] ) keyword[else] : identifier[setter] ( identifier[val] ) keyword[return] keyword[True]
def render_tooltip(self, tooltip, obj): """Render the tooltip for this column for an object """ if self.tooltip_attr: val = getattr(obj, self.tooltip_attr) # depends on [control=['if'], data=[]] elif self.tooltip_value: val = self.tooltip_value # depends on [control=['if'], data=[]] else: return False setter = getattr(tooltip, TOOLTIP_SETTERS.get(self.tooltip_type)) if self.tooltip_type in TOOLTIP_SIZED_TYPES: setter(val, self.tooltip_image_size) # depends on [control=['if'], data=[]] else: setter(val) return True
def identify_hosting_service(repo_url, hosting_services=HOSTING_SERVICES): """ Determines the hosting service of `repo_url`. :param repo_url: Repo URL of unknown type. :returns: Hosting service or raises UnknownHostingService exception. """ repo_url = unicode(repo_url) for service in hosting_services: if service in repo_url: return service raise UnknownHostingService
def function[identify_hosting_service, parameter[repo_url, hosting_services]]: constant[ Determines the hosting service of `repo_url`. :param repo_url: Repo URL of unknown type. :returns: Hosting service or raises UnknownHostingService exception. ] variable[repo_url] assign[=] call[name[unicode], parameter[name[repo_url]]] for taget[name[service]] in starred[name[hosting_services]] begin[:] if compare[name[service] in name[repo_url]] begin[:] return[name[service]] <ast.Raise object at 0x7da2041d81c0>
keyword[def] identifier[identify_hosting_service] ( identifier[repo_url] , identifier[hosting_services] = identifier[HOSTING_SERVICES] ): literal[string] identifier[repo_url] = identifier[unicode] ( identifier[repo_url] ) keyword[for] identifier[service] keyword[in] identifier[hosting_services] : keyword[if] identifier[service] keyword[in] identifier[repo_url] : keyword[return] identifier[service] keyword[raise] identifier[UnknownHostingService]
def identify_hosting_service(repo_url, hosting_services=HOSTING_SERVICES): """ Determines the hosting service of `repo_url`. :param repo_url: Repo URL of unknown type. :returns: Hosting service or raises UnknownHostingService exception. """ repo_url = unicode(repo_url) for service in hosting_services: if service in repo_url: return service # depends on [control=['if'], data=['service']] # depends on [control=['for'], data=['service']] raise UnknownHostingService
def post(self, request, enterprise_customer_uuid): """ Handle POST request - handle form submissions. Arguments: request (django.http.request.HttpRequest): Request instance enterprise_customer_uuid (str): Enterprise Customer UUID """ transmit_courses_metadata_form = TransmitEnterpriseCoursesForm(request.POST) # check that form data is well-formed if transmit_courses_metadata_form.is_valid(): channel_worker_username = transmit_courses_metadata_form.cleaned_data['channel_worker_username'] # call `transmit_content_metadata` management command to trigger # transmission of enterprise courses metadata call_command( 'transmit_content_metadata', '--catalog_user', channel_worker_username, enterprise_customer=enterprise_customer_uuid ) # Redirect to GET return HttpResponseRedirect('') context = self._build_context(request, enterprise_customer_uuid) context.update({self.ContextParameters.TRANSMIT_COURSES_METADATA_FORM: transmit_courses_metadata_form}) return render(request, self.template, context)
def function[post, parameter[self, request, enterprise_customer_uuid]]: constant[ Handle POST request - handle form submissions. Arguments: request (django.http.request.HttpRequest): Request instance enterprise_customer_uuid (str): Enterprise Customer UUID ] variable[transmit_courses_metadata_form] assign[=] call[name[TransmitEnterpriseCoursesForm], parameter[name[request].POST]] if call[name[transmit_courses_metadata_form].is_valid, parameter[]] begin[:] variable[channel_worker_username] assign[=] call[name[transmit_courses_metadata_form].cleaned_data][constant[channel_worker_username]] call[name[call_command], parameter[constant[transmit_content_metadata], constant[--catalog_user], name[channel_worker_username]]] return[call[name[HttpResponseRedirect], parameter[constant[]]]] variable[context] assign[=] call[name[self]._build_context, parameter[name[request], name[enterprise_customer_uuid]]] call[name[context].update, parameter[dictionary[[<ast.Attribute object at 0x7da1b0051030>], [<ast.Name object at 0x7da1b0050fa0>]]]] return[call[name[render], parameter[name[request], name[self].template, name[context]]]]
keyword[def] identifier[post] ( identifier[self] , identifier[request] , identifier[enterprise_customer_uuid] ): literal[string] identifier[transmit_courses_metadata_form] = identifier[TransmitEnterpriseCoursesForm] ( identifier[request] . identifier[POST] ) keyword[if] identifier[transmit_courses_metadata_form] . identifier[is_valid] (): identifier[channel_worker_username] = identifier[transmit_courses_metadata_form] . identifier[cleaned_data] [ literal[string] ] identifier[call_command] ( literal[string] , literal[string] , identifier[channel_worker_username] , identifier[enterprise_customer] = identifier[enterprise_customer_uuid] ) keyword[return] identifier[HttpResponseRedirect] ( literal[string] ) identifier[context] = identifier[self] . identifier[_build_context] ( identifier[request] , identifier[enterprise_customer_uuid] ) identifier[context] . identifier[update] ({ identifier[self] . identifier[ContextParameters] . identifier[TRANSMIT_COURSES_METADATA_FORM] : identifier[transmit_courses_metadata_form] }) keyword[return] identifier[render] ( identifier[request] , identifier[self] . identifier[template] , identifier[context] )
def post(self, request, enterprise_customer_uuid): """ Handle POST request - handle form submissions. Arguments: request (django.http.request.HttpRequest): Request instance enterprise_customer_uuid (str): Enterprise Customer UUID """ transmit_courses_metadata_form = TransmitEnterpriseCoursesForm(request.POST) # check that form data is well-formed if transmit_courses_metadata_form.is_valid(): channel_worker_username = transmit_courses_metadata_form.cleaned_data['channel_worker_username'] # call `transmit_content_metadata` management command to trigger # transmission of enterprise courses metadata call_command('transmit_content_metadata', '--catalog_user', channel_worker_username, enterprise_customer=enterprise_customer_uuid) # Redirect to GET return HttpResponseRedirect('') # depends on [control=['if'], data=[]] context = self._build_context(request, enterprise_customer_uuid) context.update({self.ContextParameters.TRANSMIT_COURSES_METADATA_FORM: transmit_courses_metadata_form}) return render(request, self.template, context)
def solve_sdp(sdp, solver=None, solverparameters=None): """Call a solver on the SDP relaxation. Upon successful solution, it returns the primal and dual objective values along with the solution matrices. :param sdpRelaxation: The SDP relaxation to be solved. :type sdpRelaxation: :class:`ncpol2sdpa.SdpRelaxation`. :param solver: The solver to be called, either `None`, "sdpa", "mosek", "cvxpy", "scs", or "cvxopt". The default is `None`, which triggers autodetect. :type solver: str. :param solverparameters: Parameters to be passed to the solver. Actual options depend on the solver: SDPA: - `"executable"`: Specify the executable for SDPA. E.g., `"executable":"/usr/local/bin/sdpa"`, or `"executable":"sdpa_gmp"` - `"paramsfile"`: Specify the parameter file Mosek: Refer to the Mosek documentation. All arguments are passed on. Cvxopt: Refer to the PICOS documentation. All arguments are passed on. Cvxpy: Refer to the Cvxpy documentation. All arguments are passed on. SCS: Refer to the Cvxpy documentation. All arguments are passed on. :type solverparameters: dict of str. :returns: tuple of the primal and dual optimum, and the solutions for the primal and dual. :rtype: (float, float, list of `numpy.array`, list of `numpy.array`) """ solvers = autodetect_solvers(solverparameters) solver = solver.lower() if solver is not None else solver if solvers == []: raise Exception("Could not find any SDP solver. Please install SDPA," + " Mosek, Cvxpy, or Picos with Cvxopt") elif solver is not None and solver not in solvers: print("Available solvers: " + str(solvers)) if solver == "cvxopt": try: import cvxopt except ImportError: pass else: raise Exception("Cvxopt is detected, but Picos is not. " "Please install Picos to use Cvxopt") raise Exception("Could not detect requested " + solver) elif solver is None: solver = solvers[0] primal, dual, x_mat, y_mat, status = None, None, None, None, None tstart = time.time() if solver == "sdpa": primal, dual, x_mat, y_mat, status = \ solve_with_sdpa(sdp, solverparameters) elif solver == "cvxpy": primal, dual, x_mat, y_mat, status = \ solve_with_cvxpy(sdp, solverparameters) elif solver == "scs": if solverparameters is None: solverparameters_ = {"solver": "SCS"} else: solverparameters_ = solverparameters.copy() solverparameters_["solver"] = "SCS" primal, dual, x_mat, y_mat, status = \ solve_with_cvxpy(sdp, solverparameters_) elif solver == "mosek": primal, dual, x_mat, y_mat, status = \ solve_with_mosek(sdp, solverparameters) elif solver == "cvxopt": primal, dual, x_mat, y_mat, status = \ solve_with_cvxopt(sdp, solverparameters) # We have to compensate for the equality constraints for constraint in sdp.constraints[sdp._n_inequalities:]: idx = sdp._constraint_to_block_index[constraint] sdp._constraint_to_block_index[constraint] = (idx[0],) else: raise Exception("Unkown solver: " + solver) sdp.solution_time = time.time() - tstart sdp.primal = primal sdp.dual = dual sdp.x_mat = x_mat sdp.y_mat = y_mat sdp.status = status return primal, dual, x_mat, y_mat
def function[solve_sdp, parameter[sdp, solver, solverparameters]]: constant[Call a solver on the SDP relaxation. Upon successful solution, it returns the primal and dual objective values along with the solution matrices. :param sdpRelaxation: The SDP relaxation to be solved. :type sdpRelaxation: :class:`ncpol2sdpa.SdpRelaxation`. :param solver: The solver to be called, either `None`, "sdpa", "mosek", "cvxpy", "scs", or "cvxopt". The default is `None`, which triggers autodetect. :type solver: str. :param solverparameters: Parameters to be passed to the solver. Actual options depend on the solver: SDPA: - `"executable"`: Specify the executable for SDPA. E.g., `"executable":"/usr/local/bin/sdpa"`, or `"executable":"sdpa_gmp"` - `"paramsfile"`: Specify the parameter file Mosek: Refer to the Mosek documentation. All arguments are passed on. Cvxopt: Refer to the PICOS documentation. All arguments are passed on. Cvxpy: Refer to the Cvxpy documentation. All arguments are passed on. SCS: Refer to the Cvxpy documentation. All arguments are passed on. :type solverparameters: dict of str. :returns: tuple of the primal and dual optimum, and the solutions for the primal and dual. :rtype: (float, float, list of `numpy.array`, list of `numpy.array`) ] variable[solvers] assign[=] call[name[autodetect_solvers], parameter[name[solverparameters]]] variable[solver] assign[=] <ast.IfExp object at 0x7da1b0fa7e80> if compare[name[solvers] equal[==] list[[]]] begin[:] <ast.Raise object at 0x7da1b0fa7190> <ast.Tuple object at 0x7da1b0f0c6a0> assign[=] tuple[[<ast.Constant object at 0x7da1b0f0dab0>, <ast.Constant object at 0x7da1b0f0e1d0>, <ast.Constant object at 0x7da1b0f0e200>, <ast.Constant object at 0x7da1b0f0e440>, <ast.Constant object at 0x7da1b0f0d1b0>]] variable[tstart] assign[=] call[name[time].time, parameter[]] if compare[name[solver] equal[==] constant[sdpa]] begin[:] <ast.Tuple object at 0x7da1b0f0e170> assign[=] call[name[solve_with_sdpa], parameter[name[sdp], name[solverparameters]]] name[sdp].solution_time assign[=] binary_operation[call[name[time].time, parameter[]] - name[tstart]] name[sdp].primal assign[=] name[primal] name[sdp].dual assign[=] name[dual] name[sdp].x_mat assign[=] name[x_mat] name[sdp].y_mat assign[=] name[y_mat] name[sdp].status assign[=] name[status] return[tuple[[<ast.Name object at 0x7da1b0f53ca0>, <ast.Name object at 0x7da1b0f53280>, <ast.Name object at 0x7da1b0f53c40>, <ast.Name object at 0x7da1b0f53cd0>]]]
keyword[def] identifier[solve_sdp] ( identifier[sdp] , identifier[solver] = keyword[None] , identifier[solverparameters] = keyword[None] ): literal[string] identifier[solvers] = identifier[autodetect_solvers] ( identifier[solverparameters] ) identifier[solver] = identifier[solver] . identifier[lower] () keyword[if] identifier[solver] keyword[is] keyword[not] keyword[None] keyword[else] identifier[solver] keyword[if] identifier[solvers] ==[]: keyword[raise] identifier[Exception] ( literal[string] + literal[string] ) keyword[elif] identifier[solver] keyword[is] keyword[not] keyword[None] keyword[and] identifier[solver] keyword[not] keyword[in] identifier[solvers] : identifier[print] ( literal[string] + identifier[str] ( identifier[solvers] )) keyword[if] identifier[solver] == literal[string] : keyword[try] : keyword[import] identifier[cvxopt] keyword[except] identifier[ImportError] : keyword[pass] keyword[else] : keyword[raise] identifier[Exception] ( literal[string] literal[string] ) keyword[raise] identifier[Exception] ( literal[string] + identifier[solver] ) keyword[elif] identifier[solver] keyword[is] keyword[None] : identifier[solver] = identifier[solvers] [ literal[int] ] identifier[primal] , identifier[dual] , identifier[x_mat] , identifier[y_mat] , identifier[status] = keyword[None] , keyword[None] , keyword[None] , keyword[None] , keyword[None] identifier[tstart] = identifier[time] . identifier[time] () keyword[if] identifier[solver] == literal[string] : identifier[primal] , identifier[dual] , identifier[x_mat] , identifier[y_mat] , identifier[status] = identifier[solve_with_sdpa] ( identifier[sdp] , identifier[solverparameters] ) keyword[elif] identifier[solver] == literal[string] : identifier[primal] , identifier[dual] , identifier[x_mat] , identifier[y_mat] , identifier[status] = identifier[solve_with_cvxpy] ( identifier[sdp] , identifier[solverparameters] ) keyword[elif] identifier[solver] == literal[string] : keyword[if] identifier[solverparameters] keyword[is] keyword[None] : identifier[solverparameters_] ={ literal[string] : literal[string] } keyword[else] : identifier[solverparameters_] = identifier[solverparameters] . identifier[copy] () identifier[solverparameters_] [ literal[string] ]= literal[string] identifier[primal] , identifier[dual] , identifier[x_mat] , identifier[y_mat] , identifier[status] = identifier[solve_with_cvxpy] ( identifier[sdp] , identifier[solverparameters_] ) keyword[elif] identifier[solver] == literal[string] : identifier[primal] , identifier[dual] , identifier[x_mat] , identifier[y_mat] , identifier[status] = identifier[solve_with_mosek] ( identifier[sdp] , identifier[solverparameters] ) keyword[elif] identifier[solver] == literal[string] : identifier[primal] , identifier[dual] , identifier[x_mat] , identifier[y_mat] , identifier[status] = identifier[solve_with_cvxopt] ( identifier[sdp] , identifier[solverparameters] ) keyword[for] identifier[constraint] keyword[in] identifier[sdp] . identifier[constraints] [ identifier[sdp] . identifier[_n_inequalities] :]: identifier[idx] = identifier[sdp] . identifier[_constraint_to_block_index] [ identifier[constraint] ] identifier[sdp] . identifier[_constraint_to_block_index] [ identifier[constraint] ]=( identifier[idx] [ literal[int] ],) keyword[else] : keyword[raise] identifier[Exception] ( literal[string] + identifier[solver] ) identifier[sdp] . identifier[solution_time] = identifier[time] . identifier[time] ()- identifier[tstart] identifier[sdp] . identifier[primal] = identifier[primal] identifier[sdp] . identifier[dual] = identifier[dual] identifier[sdp] . identifier[x_mat] = identifier[x_mat] identifier[sdp] . identifier[y_mat] = identifier[y_mat] identifier[sdp] . identifier[status] = identifier[status] keyword[return] identifier[primal] , identifier[dual] , identifier[x_mat] , identifier[y_mat]
def solve_sdp(sdp, solver=None, solverparameters=None): """Call a solver on the SDP relaxation. Upon successful solution, it returns the primal and dual objective values along with the solution matrices. :param sdpRelaxation: The SDP relaxation to be solved. :type sdpRelaxation: :class:`ncpol2sdpa.SdpRelaxation`. :param solver: The solver to be called, either `None`, "sdpa", "mosek", "cvxpy", "scs", or "cvxopt". The default is `None`, which triggers autodetect. :type solver: str. :param solverparameters: Parameters to be passed to the solver. Actual options depend on the solver: SDPA: - `"executable"`: Specify the executable for SDPA. E.g., `"executable":"/usr/local/bin/sdpa"`, or `"executable":"sdpa_gmp"` - `"paramsfile"`: Specify the parameter file Mosek: Refer to the Mosek documentation. All arguments are passed on. Cvxopt: Refer to the PICOS documentation. All arguments are passed on. Cvxpy: Refer to the Cvxpy documentation. All arguments are passed on. SCS: Refer to the Cvxpy documentation. All arguments are passed on. :type solverparameters: dict of str. :returns: tuple of the primal and dual optimum, and the solutions for the primal and dual. :rtype: (float, float, list of `numpy.array`, list of `numpy.array`) """ solvers = autodetect_solvers(solverparameters) solver = solver.lower() if solver is not None else solver if solvers == []: raise Exception('Could not find any SDP solver. Please install SDPA,' + ' Mosek, Cvxpy, or Picos with Cvxopt') # depends on [control=['if'], data=[]] elif solver is not None and solver not in solvers: print('Available solvers: ' + str(solvers)) if solver == 'cvxopt': try: import cvxopt # depends on [control=['try'], data=[]] except ImportError: pass # depends on [control=['except'], data=[]] else: raise Exception('Cvxopt is detected, but Picos is not. Please install Picos to use Cvxopt') # depends on [control=['if'], data=[]] raise Exception('Could not detect requested ' + solver) # depends on [control=['if'], data=[]] elif solver is None: solver = solvers[0] # depends on [control=['if'], data=['solver']] (primal, dual, x_mat, y_mat, status) = (None, None, None, None, None) tstart = time.time() if solver == 'sdpa': (primal, dual, x_mat, y_mat, status) = solve_with_sdpa(sdp, solverparameters) # depends on [control=['if'], data=[]] elif solver == 'cvxpy': (primal, dual, x_mat, y_mat, status) = solve_with_cvxpy(sdp, solverparameters) # depends on [control=['if'], data=[]] elif solver == 'scs': if solverparameters is None: solverparameters_ = {'solver': 'SCS'} # depends on [control=['if'], data=[]] else: solverparameters_ = solverparameters.copy() solverparameters_['solver'] = 'SCS' (primal, dual, x_mat, y_mat, status) = solve_with_cvxpy(sdp, solverparameters_) # depends on [control=['if'], data=[]] elif solver == 'mosek': (primal, dual, x_mat, y_mat, status) = solve_with_mosek(sdp, solverparameters) # depends on [control=['if'], data=[]] elif solver == 'cvxopt': (primal, dual, x_mat, y_mat, status) = solve_with_cvxopt(sdp, solverparameters) # We have to compensate for the equality constraints for constraint in sdp.constraints[sdp._n_inequalities:]: idx = sdp._constraint_to_block_index[constraint] sdp._constraint_to_block_index[constraint] = (idx[0],) # depends on [control=['for'], data=['constraint']] # depends on [control=['if'], data=[]] else: raise Exception('Unkown solver: ' + solver) sdp.solution_time = time.time() - tstart sdp.primal = primal sdp.dual = dual sdp.x_mat = x_mat sdp.y_mat = y_mat sdp.status = status return (primal, dual, x_mat, y_mat)
def set_element_text(parent_to_parse, element_path=None, element_text=u''): """ Assigns a string value to the parsed parent element and then returns it. If element_path is provided and doesn't exist, it is inserted with element_text. :see: get_element(parent_to_parse, element_path) """ return _set_element_property(parent_to_parse, element_path, _ELEM_TEXT, element_text)
def function[set_element_text, parameter[parent_to_parse, element_path, element_text]]: constant[ Assigns a string value to the parsed parent element and then returns it. If element_path is provided and doesn't exist, it is inserted with element_text. :see: get_element(parent_to_parse, element_path) ] return[call[name[_set_element_property], parameter[name[parent_to_parse], name[element_path], name[_ELEM_TEXT], name[element_text]]]]
keyword[def] identifier[set_element_text] ( identifier[parent_to_parse] , identifier[element_path] = keyword[None] , identifier[element_text] = literal[string] ): literal[string] keyword[return] identifier[_set_element_property] ( identifier[parent_to_parse] , identifier[element_path] , identifier[_ELEM_TEXT] , identifier[element_text] )
def set_element_text(parent_to_parse, element_path=None, element_text=u''): """ Assigns a string value to the parsed parent element and then returns it. If element_path is provided and doesn't exist, it is inserted with element_text. :see: get_element(parent_to_parse, element_path) """ return _set_element_property(parent_to_parse, element_path, _ELEM_TEXT, element_text)
def pop_indexes(ol,indexes,**kwargs): ''' from elist.jprint import pobj from elist.elist import * ol = [1,2,3,4,5,6] id(ol) rslt = pop_indexes(ol,{0,-3,5}) ol id(ol) id(rslt['list']) #### ol = [1,2,3,4,5,6] id(ol) rslt = pop_indexes(ol,{0,-3,5},mode="original") rslt ol id(ol) ''' length = ol.__len__() indexes = list(map(lambda index:uniform_index(index,length),list(indexes))) if('mode' in kwargs): mode = kwargs["mode"] else: mode = "new" if(mode == "new"): cpol = copy.deepcopy(ol) new = [] popped = [] for i in range(0,length): if(i in indexes): popped.append(cpol[i]) else: new.append(cpol[i]) return({'popped':popped,'list':new}) else: tmp = [] popped = [] for i in range(0,length): if(i in indexes): popped.append(ol[i]) else: tmp.append(ol[i]) ol.clear() for i in range(0,tmp.__len__()): ol.append(tmp[i]) return(popped)
def function[pop_indexes, parameter[ol, indexes]]: constant[ from elist.jprint import pobj from elist.elist import * ol = [1,2,3,4,5,6] id(ol) rslt = pop_indexes(ol,{0,-3,5}) ol id(ol) id(rslt['list']) #### ol = [1,2,3,4,5,6] id(ol) rslt = pop_indexes(ol,{0,-3,5},mode="original") rslt ol id(ol) ] variable[length] assign[=] call[name[ol].__len__, parameter[]] variable[indexes] assign[=] call[name[list], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1aff010c0>, call[name[list], parameter[name[indexes]]]]]]] if compare[constant[mode] in name[kwargs]] begin[:] variable[mode] assign[=] call[name[kwargs]][constant[mode]] if compare[name[mode] equal[==] constant[new]] begin[:] variable[cpol] assign[=] call[name[copy].deepcopy, parameter[name[ol]]] variable[new] assign[=] list[[]] variable[popped] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[length]]]] begin[:] if compare[name[i] in name[indexes]] begin[:] call[name[popped].append, parameter[call[name[cpol]][name[i]]]] return[dictionary[[<ast.Constant object at 0x7da1aff00100>, <ast.Constant object at 0x7da1aff01360>], [<ast.Name object at 0x7da1aff01270>, <ast.Name object at 0x7da1aff00340>]]]
keyword[def] identifier[pop_indexes] ( identifier[ol] , identifier[indexes] ,** identifier[kwargs] ): literal[string] identifier[length] = identifier[ol] . identifier[__len__] () identifier[indexes] = identifier[list] ( identifier[map] ( keyword[lambda] identifier[index] : identifier[uniform_index] ( identifier[index] , identifier[length] ), identifier[list] ( identifier[indexes] ))) keyword[if] ( literal[string] keyword[in] identifier[kwargs] ): identifier[mode] = identifier[kwargs] [ literal[string] ] keyword[else] : identifier[mode] = literal[string] keyword[if] ( identifier[mode] == literal[string] ): identifier[cpol] = identifier[copy] . identifier[deepcopy] ( identifier[ol] ) identifier[new] =[] identifier[popped] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[length] ): keyword[if] ( identifier[i] keyword[in] identifier[indexes] ): identifier[popped] . identifier[append] ( identifier[cpol] [ identifier[i] ]) keyword[else] : identifier[new] . identifier[append] ( identifier[cpol] [ identifier[i] ]) keyword[return] ({ literal[string] : identifier[popped] , literal[string] : identifier[new] }) keyword[else] : identifier[tmp] =[] identifier[popped] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[length] ): keyword[if] ( identifier[i] keyword[in] identifier[indexes] ): identifier[popped] . identifier[append] ( identifier[ol] [ identifier[i] ]) keyword[else] : identifier[tmp] . identifier[append] ( identifier[ol] [ identifier[i] ]) identifier[ol] . identifier[clear] () keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[tmp] . identifier[__len__] ()): identifier[ol] . identifier[append] ( identifier[tmp] [ identifier[i] ]) keyword[return] ( identifier[popped] )
def pop_indexes(ol, indexes, **kwargs): """ from elist.jprint import pobj from elist.elist import * ol = [1,2,3,4,5,6] id(ol) rslt = pop_indexes(ol,{0,-3,5}) ol id(ol) id(rslt['list']) #### ol = [1,2,3,4,5,6] id(ol) rslt = pop_indexes(ol,{0,-3,5},mode="original") rslt ol id(ol) """ length = ol.__len__() indexes = list(map(lambda index: uniform_index(index, length), list(indexes))) if 'mode' in kwargs: mode = kwargs['mode'] # depends on [control=['if'], data=['kwargs']] else: mode = 'new' if mode == 'new': cpol = copy.deepcopy(ol) new = [] popped = [] for i in range(0, length): if i in indexes: popped.append(cpol[i]) # depends on [control=['if'], data=['i']] else: new.append(cpol[i]) # depends on [control=['for'], data=['i']] return {'popped': popped, 'list': new} # depends on [control=['if'], data=[]] else: tmp = [] popped = [] for i in range(0, length): if i in indexes: popped.append(ol[i]) # depends on [control=['if'], data=['i']] else: tmp.append(ol[i]) # depends on [control=['for'], data=['i']] ol.clear() for i in range(0, tmp.__len__()): ol.append(tmp[i]) # depends on [control=['for'], data=['i']] return popped
def create_edges(cells_nodes): """Setup edge-node and edge-cell relations. Adapted from voropy. """ # Create the idx_hierarchy (nodes->edges->cells), i.e., the value of # `self.idx_hierarchy[0, 2, 27]` is the index of the node of cell 27, edge # 2, node 0. The shape of `self.idx_hierarchy` is `(2, 3, n)`, where `n` is # the number of cells. Make sure that the k-th edge is opposite of the k-th # point in the triangle. local_idx = numpy.array([[1, 2], [2, 0], [0, 1]]).T # Map idx back to the nodes. This is useful if quantities which are in # idx shape need to be added up into nodes (e.g., equation system rhs). nds = cells_nodes.T idx_hierarchy = nds[local_idx] s = idx_hierarchy.shape a = numpy.sort(idx_hierarchy.reshape(s[0], s[1] * s[2]).T) b = numpy.ascontiguousarray(a).view( numpy.dtype((numpy.void, a.dtype.itemsize * a.shape[1])) ) _, idx, inv, cts = numpy.unique( b, return_index=True, return_inverse=True, return_counts=True ) # No edge has more than 2 cells. This assertion fails, for example, if # cells are listed twice. assert all(cts < 3) edge_nodes = a[idx] cells_edges = inv.reshape(3, -1).T return edge_nodes, cells_edges
def function[create_edges, parameter[cells_nodes]]: constant[Setup edge-node and edge-cell relations. Adapted from voropy. ] variable[local_idx] assign[=] call[name[numpy].array, parameter[list[[<ast.List object at 0x7da2047eb4c0>, <ast.List object at 0x7da2047ea230>, <ast.List object at 0x7da2047e8880>]]]].T variable[nds] assign[=] name[cells_nodes].T variable[idx_hierarchy] assign[=] call[name[nds]][name[local_idx]] variable[s] assign[=] name[idx_hierarchy].shape variable[a] assign[=] call[name[numpy].sort, parameter[call[name[idx_hierarchy].reshape, parameter[call[name[s]][constant[0]], binary_operation[call[name[s]][constant[1]] * call[name[s]][constant[2]]]]].T]] variable[b] assign[=] call[call[name[numpy].ascontiguousarray, parameter[name[a]]].view, parameter[call[name[numpy].dtype, parameter[tuple[[<ast.Attribute object at 0x7da20e956050>, <ast.BinOp object at 0x7da20e956d40>]]]]]] <ast.Tuple object at 0x7da20e957670> assign[=] call[name[numpy].unique, parameter[name[b]]] assert[call[name[all], parameter[compare[name[cts] less[<] constant[3]]]]] variable[edge_nodes] assign[=] call[name[a]][name[idx]] variable[cells_edges] assign[=] call[name[inv].reshape, parameter[constant[3], <ast.UnaryOp object at 0x7da20e957310>]].T return[tuple[[<ast.Name object at 0x7da20e955750>, <ast.Name object at 0x7da20e956440>]]]
keyword[def] identifier[create_edges] ( identifier[cells_nodes] ): literal[string] identifier[local_idx] = identifier[numpy] . identifier[array] ([[ literal[int] , literal[int] ],[ literal[int] , literal[int] ],[ literal[int] , literal[int] ]]). identifier[T] identifier[nds] = identifier[cells_nodes] . identifier[T] identifier[idx_hierarchy] = identifier[nds] [ identifier[local_idx] ] identifier[s] = identifier[idx_hierarchy] . identifier[shape] identifier[a] = identifier[numpy] . identifier[sort] ( identifier[idx_hierarchy] . identifier[reshape] ( identifier[s] [ literal[int] ], identifier[s] [ literal[int] ]* identifier[s] [ literal[int] ]). identifier[T] ) identifier[b] = identifier[numpy] . identifier[ascontiguousarray] ( identifier[a] ). identifier[view] ( identifier[numpy] . identifier[dtype] (( identifier[numpy] . identifier[void] , identifier[a] . identifier[dtype] . identifier[itemsize] * identifier[a] . identifier[shape] [ literal[int] ])) ) identifier[_] , identifier[idx] , identifier[inv] , identifier[cts] = identifier[numpy] . identifier[unique] ( identifier[b] , identifier[return_index] = keyword[True] , identifier[return_inverse] = keyword[True] , identifier[return_counts] = keyword[True] ) keyword[assert] identifier[all] ( identifier[cts] < literal[int] ) identifier[edge_nodes] = identifier[a] [ identifier[idx] ] identifier[cells_edges] = identifier[inv] . identifier[reshape] ( literal[int] ,- literal[int] ). identifier[T] keyword[return] identifier[edge_nodes] , identifier[cells_edges]
def create_edges(cells_nodes): """Setup edge-node and edge-cell relations. Adapted from voropy. """ # Create the idx_hierarchy (nodes->edges->cells), i.e., the value of # `self.idx_hierarchy[0, 2, 27]` is the index of the node of cell 27, edge # 2, node 0. The shape of `self.idx_hierarchy` is `(2, 3, n)`, where `n` is # the number of cells. Make sure that the k-th edge is opposite of the k-th # point in the triangle. local_idx = numpy.array([[1, 2], [2, 0], [0, 1]]).T # Map idx back to the nodes. This is useful if quantities which are in # idx shape need to be added up into nodes (e.g., equation system rhs). nds = cells_nodes.T idx_hierarchy = nds[local_idx] s = idx_hierarchy.shape a = numpy.sort(idx_hierarchy.reshape(s[0], s[1] * s[2]).T) b = numpy.ascontiguousarray(a).view(numpy.dtype((numpy.void, a.dtype.itemsize * a.shape[1]))) (_, idx, inv, cts) = numpy.unique(b, return_index=True, return_inverse=True, return_counts=True) # No edge has more than 2 cells. This assertion fails, for example, if # cells are listed twice. assert all(cts < 3) edge_nodes = a[idx] cells_edges = inv.reshape(3, -1).T return (edge_nodes, cells_edges)
def to_df(self): '''Conversion method to Pandas DataFrame. To be attached to ResultDict. Returns ======== List : of Pandas DataFrames in order of Total, First, Second ''' total, first, (idx, second) = Si_to_pandas_dict(self) names = self.problem['names'] ret = [pd.DataFrame(total, index=names), pd.DataFrame(first, index=names)] if second: ret += [pd.DataFrame(second, index=idx)] return ret
def function[to_df, parameter[self]]: constant[Conversion method to Pandas DataFrame. To be attached to ResultDict. Returns ======== List : of Pandas DataFrames in order of Total, First, Second ] <ast.Tuple object at 0x7da1b1653d60> assign[=] call[name[Si_to_pandas_dict], parameter[name[self]]] variable[names] assign[=] call[name[self].problem][constant[names]] variable[ret] assign[=] list[[<ast.Call object at 0x7da1b1634040>, <ast.Call object at 0x7da1b1634190>]] if name[second] begin[:] <ast.AugAssign object at 0x7da1b1634370> return[name[ret]]
keyword[def] identifier[to_df] ( identifier[self] ): literal[string] identifier[total] , identifier[first] ,( identifier[idx] , identifier[second] )= identifier[Si_to_pandas_dict] ( identifier[self] ) identifier[names] = identifier[self] . identifier[problem] [ literal[string] ] identifier[ret] =[ identifier[pd] . identifier[DataFrame] ( identifier[total] , identifier[index] = identifier[names] ), identifier[pd] . identifier[DataFrame] ( identifier[first] , identifier[index] = identifier[names] )] keyword[if] identifier[second] : identifier[ret] +=[ identifier[pd] . identifier[DataFrame] ( identifier[second] , identifier[index] = identifier[idx] )] keyword[return] identifier[ret]
def to_df(self): """Conversion method to Pandas DataFrame. To be attached to ResultDict. Returns ======== List : of Pandas DataFrames in order of Total, First, Second """ (total, first, (idx, second)) = Si_to_pandas_dict(self) names = self.problem['names'] ret = [pd.DataFrame(total, index=names), pd.DataFrame(first, index=names)] if second: ret += [pd.DataFrame(second, index=idx)] # depends on [control=['if'], data=[]] return ret
def sidpath(self, sid): """ Parameters ---------- sid : int Asset identifier. Returns ------- out : string Full path to the bcolz rootdir for the given sid. """ sid_subdir = _sid_subdir_path(sid) return join(self._rootdir, sid_subdir)
def function[sidpath, parameter[self, sid]]: constant[ Parameters ---------- sid : int Asset identifier. Returns ------- out : string Full path to the bcolz rootdir for the given sid. ] variable[sid_subdir] assign[=] call[name[_sid_subdir_path], parameter[name[sid]]] return[call[name[join], parameter[name[self]._rootdir, name[sid_subdir]]]]
keyword[def] identifier[sidpath] ( identifier[self] , identifier[sid] ): literal[string] identifier[sid_subdir] = identifier[_sid_subdir_path] ( identifier[sid] ) keyword[return] identifier[join] ( identifier[self] . identifier[_rootdir] , identifier[sid_subdir] )
def sidpath(self, sid): """ Parameters ---------- sid : int Asset identifier. Returns ------- out : string Full path to the bcolz rootdir for the given sid. """ sid_subdir = _sid_subdir_path(sid) return join(self._rootdir, sid_subdir)
def accumulation_distribution(close_data, high_data, low_data, volume): """ Accumulation/Distribution. Formula: A/D = (Ct - Lt) - (Ht - Ct) / (Ht - Lt) * Vt + A/Dt-1 """ catch_errors.check_for_input_len_diff( close_data, high_data, low_data, volume ) ad = np.zeros(len(close_data)) for idx in range(1, len(close_data)): ad[idx] = ( (((close_data[idx] - low_data[idx]) - (high_data[idx] - close_data[idx])) / (high_data[idx] - low_data[idx]) * volume[idx]) + ad[idx-1] ) return ad
def function[accumulation_distribution, parameter[close_data, high_data, low_data, volume]]: constant[ Accumulation/Distribution. Formula: A/D = (Ct - Lt) - (Ht - Ct) / (Ht - Lt) * Vt + A/Dt-1 ] call[name[catch_errors].check_for_input_len_diff, parameter[name[close_data], name[high_data], name[low_data], name[volume]]] variable[ad] assign[=] call[name[np].zeros, parameter[call[name[len], parameter[name[close_data]]]]] for taget[name[idx]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[close_data]]]]]] begin[:] call[name[ad]][name[idx]] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[close_data]][name[idx]] - call[name[low_data]][name[idx]]] - binary_operation[call[name[high_data]][name[idx]] - call[name[close_data]][name[idx]]]] / binary_operation[call[name[high_data]][name[idx]] - call[name[low_data]][name[idx]]]] * call[name[volume]][name[idx]]] + call[name[ad]][binary_operation[name[idx] - constant[1]]]] return[name[ad]]
keyword[def] identifier[accumulation_distribution] ( identifier[close_data] , identifier[high_data] , identifier[low_data] , identifier[volume] ): literal[string] identifier[catch_errors] . identifier[check_for_input_len_diff] ( identifier[close_data] , identifier[high_data] , identifier[low_data] , identifier[volume] ) identifier[ad] = identifier[np] . identifier[zeros] ( identifier[len] ( identifier[close_data] )) keyword[for] identifier[idx] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[close_data] )): identifier[ad] [ identifier[idx] ]=( ((( identifier[close_data] [ identifier[idx] ]- identifier[low_data] [ identifier[idx] ])- ( identifier[high_data] [ identifier[idx] ]- identifier[close_data] [ identifier[idx] ]))/ ( identifier[high_data] [ identifier[idx] ]- identifier[low_data] [ identifier[idx] ])* identifier[volume] [ identifier[idx] ])+ identifier[ad] [ identifier[idx] - literal[int] ] ) keyword[return] identifier[ad]
def accumulation_distribution(close_data, high_data, low_data, volume): """ Accumulation/Distribution. Formula: A/D = (Ct - Lt) - (Ht - Ct) / (Ht - Lt) * Vt + A/Dt-1 """ catch_errors.check_for_input_len_diff(close_data, high_data, low_data, volume) ad = np.zeros(len(close_data)) for idx in range(1, len(close_data)): ad[idx] = (close_data[idx] - low_data[idx] - (high_data[idx] - close_data[idx])) / (high_data[idx] - low_data[idx]) * volume[idx] + ad[idx - 1] # depends on [control=['for'], data=['idx']] return ad
def print_tree(self, ast_obj=None): """Convert AST object to tree view of BEL AST Returns: prints tree of BEL AST to STDOUT """ if not ast_obj: ast_obj = self if hasattr(self, "bel_subject"): print("Subject:") self.bel_subject.print_tree(self.bel_subject, indent=0) if hasattr(self, "bel_relation"): print("Relation:", self.bel_relation) if hasattr(self, "bel_object"): if self.bel_object.type == "BELAst": if hasattr(self, "bel_subject"): print("Nested Subject:") self.bel_object.bel_subject.print_tree(indent=0) if hasattr(self, "bel_relation"): print("Nested Relation:", self.bel_object.bel_relation) if hasattr(self, "bel_object"): print("Nested Object:") self.bel_object.bel_object.print_tree(indent=0) else: print("Object:") self.bel_object.print_tree(self.bel_object, indent=0) return self
def function[print_tree, parameter[self, ast_obj]]: constant[Convert AST object to tree view of BEL AST Returns: prints tree of BEL AST to STDOUT ] if <ast.UnaryOp object at 0x7da1b19cdc30> begin[:] variable[ast_obj] assign[=] name[self] if call[name[hasattr], parameter[name[self], constant[bel_subject]]] begin[:] call[name[print], parameter[constant[Subject:]]] call[name[self].bel_subject.print_tree, parameter[name[self].bel_subject]] if call[name[hasattr], parameter[name[self], constant[bel_relation]]] begin[:] call[name[print], parameter[constant[Relation:], name[self].bel_relation]] if call[name[hasattr], parameter[name[self], constant[bel_object]]] begin[:] if compare[name[self].bel_object.type equal[==] constant[BELAst]] begin[:] if call[name[hasattr], parameter[name[self], constant[bel_subject]]] begin[:] call[name[print], parameter[constant[Nested Subject:]]] call[name[self].bel_object.bel_subject.print_tree, parameter[]] if call[name[hasattr], parameter[name[self], constant[bel_relation]]] begin[:] call[name[print], parameter[constant[Nested Relation:], name[self].bel_object.bel_relation]] if call[name[hasattr], parameter[name[self], constant[bel_object]]] begin[:] call[name[print], parameter[constant[Nested Object:]]] call[name[self].bel_object.bel_object.print_tree, parameter[]] return[name[self]]
keyword[def] identifier[print_tree] ( identifier[self] , identifier[ast_obj] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[ast_obj] : identifier[ast_obj] = identifier[self] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[print] ( literal[string] ) identifier[self] . identifier[bel_subject] . identifier[print_tree] ( identifier[self] . identifier[bel_subject] , identifier[indent] = literal[int] ) keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[print] ( literal[string] , identifier[self] . identifier[bel_relation] ) keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[if] identifier[self] . identifier[bel_object] . identifier[type] == literal[string] : keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[print] ( literal[string] ) identifier[self] . identifier[bel_object] . identifier[bel_subject] . identifier[print_tree] ( identifier[indent] = literal[int] ) keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[print] ( literal[string] , identifier[self] . identifier[bel_object] . identifier[bel_relation] ) keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[print] ( literal[string] ) identifier[self] . identifier[bel_object] . identifier[bel_object] . identifier[print_tree] ( identifier[indent] = literal[int] ) keyword[else] : identifier[print] ( literal[string] ) identifier[self] . identifier[bel_object] . identifier[print_tree] ( identifier[self] . identifier[bel_object] , identifier[indent] = literal[int] ) keyword[return] identifier[self]
def print_tree(self, ast_obj=None): """Convert AST object to tree view of BEL AST Returns: prints tree of BEL AST to STDOUT """ if not ast_obj: ast_obj = self # depends on [control=['if'], data=[]] if hasattr(self, 'bel_subject'): print('Subject:') self.bel_subject.print_tree(self.bel_subject, indent=0) # depends on [control=['if'], data=[]] if hasattr(self, 'bel_relation'): print('Relation:', self.bel_relation) # depends on [control=['if'], data=[]] if hasattr(self, 'bel_object'): if self.bel_object.type == 'BELAst': if hasattr(self, 'bel_subject'): print('Nested Subject:') self.bel_object.bel_subject.print_tree(indent=0) # depends on [control=['if'], data=[]] if hasattr(self, 'bel_relation'): print('Nested Relation:', self.bel_object.bel_relation) # depends on [control=['if'], data=[]] if hasattr(self, 'bel_object'): print('Nested Object:') self.bel_object.bel_object.print_tree(indent=0) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: print('Object:') self.bel_object.print_tree(self.bel_object, indent=0) # depends on [control=['if'], data=[]] return self
def GET_savedmodifiedconditionitemvalues(self) -> None: """ToDo: extend functionality and add tests""" dict_ = state.modifiedconditionitemvalues.get(self._id) if dict_ is None: self.GET_conditionitemvalues() else: for name, value in dict_.items(): self._outputs[name] = value
def function[GET_savedmodifiedconditionitemvalues, parameter[self]]: constant[ToDo: extend functionality and add tests] variable[dict_] assign[=] call[name[state].modifiedconditionitemvalues.get, parameter[name[self]._id]] if compare[name[dict_] is constant[None]] begin[:] call[name[self].GET_conditionitemvalues, parameter[]]
keyword[def] identifier[GET_savedmodifiedconditionitemvalues] ( identifier[self] )-> keyword[None] : literal[string] identifier[dict_] = identifier[state] . identifier[modifiedconditionitemvalues] . identifier[get] ( identifier[self] . identifier[_id] ) keyword[if] identifier[dict_] keyword[is] keyword[None] : identifier[self] . identifier[GET_conditionitemvalues] () keyword[else] : keyword[for] identifier[name] , identifier[value] keyword[in] identifier[dict_] . identifier[items] (): identifier[self] . identifier[_outputs] [ identifier[name] ]= identifier[value]
def GET_savedmodifiedconditionitemvalues(self) -> None: """ToDo: extend functionality and add tests""" dict_ = state.modifiedconditionitemvalues.get(self._id) if dict_ is None: self.GET_conditionitemvalues() # depends on [control=['if'], data=[]] else: for (name, value) in dict_.items(): self._outputs[name] = value # depends on [control=['for'], data=[]]
def _maybe_pack_examples(self, generator): """Wraps generator with packer if self.packed_length.""" if not self.packed_length: return generator return generator_utils.pack_examples( generator, self.has_inputs, self.packed_length, spacing=self.packed_spacing, chop_long_sequences=not self.has_inputs)
def function[_maybe_pack_examples, parameter[self, generator]]: constant[Wraps generator with packer if self.packed_length.] if <ast.UnaryOp object at 0x7da204567670> begin[:] return[name[generator]] return[call[name[generator_utils].pack_examples, parameter[name[generator], name[self].has_inputs, name[self].packed_length]]]
keyword[def] identifier[_maybe_pack_examples] ( identifier[self] , identifier[generator] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[packed_length] : keyword[return] identifier[generator] keyword[return] identifier[generator_utils] . identifier[pack_examples] ( identifier[generator] , identifier[self] . identifier[has_inputs] , identifier[self] . identifier[packed_length] , identifier[spacing] = identifier[self] . identifier[packed_spacing] , identifier[chop_long_sequences] = keyword[not] identifier[self] . identifier[has_inputs] )
def _maybe_pack_examples(self, generator): """Wraps generator with packer if self.packed_length.""" if not self.packed_length: return generator # depends on [control=['if'], data=[]] return generator_utils.pack_examples(generator, self.has_inputs, self.packed_length, spacing=self.packed_spacing, chop_long_sequences=not self.has_inputs)
def stream_log(self, callback, connection_id='monitor'): """ Stream Redis activity one line at a time to the given callback. :param callback: A function that accepts a single argument, the Redis command. """ conn = self.connection_pool.get_connection(connection_id, None) conn.send_command('monitor') while callback(conn.read_response()): pass
def function[stream_log, parameter[self, callback, connection_id]]: constant[ Stream Redis activity one line at a time to the given callback. :param callback: A function that accepts a single argument, the Redis command. ] variable[conn] assign[=] call[name[self].connection_pool.get_connection, parameter[name[connection_id], constant[None]]] call[name[conn].send_command, parameter[constant[monitor]]] while call[name[callback], parameter[call[name[conn].read_response, parameter[]]]] begin[:] pass
keyword[def] identifier[stream_log] ( identifier[self] , identifier[callback] , identifier[connection_id] = literal[string] ): literal[string] identifier[conn] = identifier[self] . identifier[connection_pool] . identifier[get_connection] ( identifier[connection_id] , keyword[None] ) identifier[conn] . identifier[send_command] ( literal[string] ) keyword[while] identifier[callback] ( identifier[conn] . identifier[read_response] ()): keyword[pass]
def stream_log(self, callback, connection_id='monitor'): """ Stream Redis activity one line at a time to the given callback. :param callback: A function that accepts a single argument, the Redis command. """ conn = self.connection_pool.get_connection(connection_id, None) conn.send_command('monitor') while callback(conn.read_response()): pass # depends on [control=['while'], data=[]]