code stringlengths 281 23.7M |
|---|
def run(options):
time_start = time.time()
if options.psk_file:
assert options.bridge, 'PSK is only supported with bridging due to python limitations, sorry about that'
auth_pairs = options.psk_file.readlines()
assert ((options.thread_ratio * options.processes) <= len(auth_pairs)), "can't handle more threads*procs than keys!"
options.processes = min(options.processes, len(auth_pairs))
print(('Using first %d keys from: %s' % (options.processes, options.psk_file.name)))
pool = multiprocessing.Pool(processes=options.processes)
if (options.thread_ratio == 1):
auth_pairs = auth_pairs[:options.processes]
result_set = [pool.apply_async(_worker, (options, x, auth.strip())) for (x, auth) in enumerate(auth_pairs)]
else:
result_set = []
for x in range(options.processes):
ll = options.thread_ratio
keyset = auth_pairs[(x * ll):((x * ll) + options.thread_ratio)]
print(('process number: %d using keyset: %s' % (x, keyset)))
result_set.append(pool.apply_async(_worker_threaded, (options, x, keyset)))
else:
pool = multiprocessing.Pool(processes=options.processes)
if (options.thread_ratio == 1):
result_set = [pool.apply_async(_worker, (options, x)) for x in range(options.processes)]
else:
result_set = [pool.apply_async(_worker_threaded, (options, x)) for x in range(options.processes)]
completed_set = []
while (len(completed_set) < options.processes):
hold_set = []
for result in result_set:
if result.ready():
completed_set.append(result)
else:
hold_set.append(result)
result_set = hold_set
print(('Completed workers: %d/%d' % (len(completed_set), options.processes)))
if (len(result_set) > 0):
time.sleep(1)
time_end = time.time()
stats_set = []
for result in completed_set:
s = result.get()
if (options.thread_ratio == 1):
beem.print_publish_stats(s)
stats_set.append(s)
if (options.thread_ratio == 1):
agg_stats = beem.aggregate_publish_stats(stats_set)
agg_stats['time_total'] = (time_end - time_start)
beem.print_publish_stats(agg_stats)
if (options.json is not None):
beem.json_dump_stats(agg_stats, options.json)
else:
agg_stats_set = [beem.aggregate_publish_stats(x) for x in stats_set]
for x in agg_stats_set:
x['time_total'] = (time_end - time_start)
[beem.print_publish_stats(x) for x in agg_stats_set]
if (options.json is not None):
beem.json_dump_stats(agg_stats_set, options.json) |
def _install_plugins(distribution, skip_docker, only_docker=False):
installer_paths = Path((get_src_dir() + '/plugins/')).glob('*/*/install.py')
for install_script in installer_paths:
plugin_name = install_script.parent.name
plugin_type = install_script.parent.parent.name
plugin = importlib.import_module(f'plugins.{plugin_type}.{plugin_name}.install')
plugin_installer = plugin.Installer(distribution, skip_docker=skip_docker)
logging.info(f'Installing {plugin_name} plugin.')
if (not only_docker):
plugin_installer.install()
else:
with OperateInDirectory(plugin_installer.base_path):
plugin_installer.install_docker_images()
logging.info(f'''Finished installing {plugin_name} plugin.
''') |
def xml_encode(value, key=None, quote=True):
if hasattr(value, '__xml__'):
return value.__xml__(key, quote)
if isinstance(value, dict):
return tag[key](*[tag[k](xml_encode(v, None, quote)) for (k, v) in value.items()])
if isinstance(value, list):
return tag[key](*[tag[item](xml_encode(item, None, quote)) for item in value])
return htmlescape(value) |
class op_popup(bpy.types.Operator):
bl_idname = 'ui.textools_popup'
bl_label = 'Message'
message: StringProperty()
def execute(self, context):
self.report({'INFO'}, self.message)
print(self.message)
return {'FINISHED'}
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_popup(self, width=200)
def draw(self, context):
self.layout.label(text=self.message) |
def diff(path_to_new, path_to_old):
output = ''
new = esptool.loader.StubFlasher(path_to_new)
old = esptool.loader.StubFlasher(path_to_old)
if (new.data_start != old.data_start):
output += ' Data start: New {:#x}, old {:#x} \n'.format(new.data_start, old.data_start)
if (new.text_start != old.text_start):
output += ' Text start: New {:#x}, old {:#x} \n'.format(new.text_start, old.text_start)
if (new.entry != old.entry):
output += ' Entrypoint: New {:#x}, old {:#x} \n'.format(new.entry, old.entry)
if (new.data != old.data):
if (len(new.data) == len(old.data)):
for (i, (new_b, old_b)) in enumerate(zip(new.data, old.data)):
if (new_b != old_b):
output += ' Data byte {:#x}: new {:#04x} old {:#04x} \n'.format(i, new_b, old_b)
else:
output += ' Data length: New {} bytes, old {} bytes \n'.format(len(new.data), len(old.data))
if (new.text != old.text):
if (len(new.text) == len(old.text)):
for (i, (new_b, old_b)) in enumerate(zip(new.text, old.text)):
if (new_b != old_b):
output += ' Text byte {:#x}: new {:#04x} old {:#04x} \n'.format(i, new_b, old_b)
else:
output += ' Text length: New {} bytes, old {} bytes \n'.format(len(new.text), len(old.text))
return output |
class bsn_time_reply(bsn_header):
version = 6
type = 4
experimenter = 6035143
subtype = 45
def __init__(self, xid=None, time_ms=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (time_ms != None):
self.time_ms = time_ms
else:
self.time_ms = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(struct.pack('!Q', self.time_ms))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_time_reply()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 4)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 45)
obj.time_ms = reader.read('!Q')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.time_ms != other.time_ms):
return False
return True
def pretty_print(self, q):
q.text('bsn_time_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('time_ms = ')
q.text(('%#x' % self.time_ms))
q.breakable()
q.text('}') |
class Cleanup(Validator):
rule = re.compile('[^\\x09\\x0a\\x0d\\x20-\\x7e]')
def __init__(self, regex=None, message=None):
super().__init__(message=message)
self.regex = (self.rule if (regex is None) else re.compile(regex))
def __call__(self, value):
v = self.regex.sub('', (to_unicode(value) or '').strip())
return (v, None) |
def infer(text, sdp_ratio, noise_scale, noise_scale_w, length_scale, sid, language, hps, net_g, device):
(bert, ja_bert, phones, tones, lang_ids) = get_text(text, language, hps, device)
with torch.no_grad():
x_tst = phones.to(device).unsqueeze(0)
tones = tones.to(device).unsqueeze(0)
lang_ids = lang_ids.to(device).unsqueeze(0)
bert = bert.to(device).unsqueeze(0)
ja_bert = ja_bert.to(device).unsqueeze(0)
x_tst_lengths = torch.LongTensor([phones.size(0)]).to(device)
del phones
speakers = torch.LongTensor([hps.data.spk2id[sid]]).to(device)
audio = net_g.infer(x_tst, x_tst_lengths, speakers, tones, lang_ids, bert, ja_bert, sdp_ratio=sdp_ratio, noise_scale=noise_scale, noise_scale_w=noise_scale_w, length_scale=length_scale)[0][(0, 0)].data.cpu().float().numpy()
del x_tst, x_tst_lengths, speakers, tones, lang_ids, bert, ja_bert
if torch.cuda.is_available():
torch.cuda.empty_cache()
return audio |
class BlockedDofOrderType(DofOrderInfo):
def __init__(self, n_DOF_pressure, model_info='no model info set'):
DofOrderInfo.__init__(self, 'blocked', model_info='no model info set')
self.n_DOF_pressure = n_DOF_pressure
def create_DOF_lists(self, ownership_range, num_equations, num_components):
pressureDOF = numpy.arange(start=ownership_range[0], stop=(ownership_range[0] + self.n_DOF_pressure), dtype='i')
velocityDOF = numpy.arange(start=(ownership_range[0] + self.n_DOF_pressure), stop=(ownership_range[0] + num_equations), step=1, dtype='i')
return [velocityDOF, pressureDOF] |
class OptionSeriesTilemapSonificationTracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestRestCompareFirmware():
def _rest_upload_firmware(self, test_client, fw):
testfile_path = (Path(get_test_data_dir()) / fw.path)
file_content = testfile_path.read_bytes()
data = {'binary': standard_b64encode(file_content).decode(), 'file_name': 'test.zip', 'device_name': 'test_device', 'device_part': 'full', 'device_class': 'test_class', 'version': '1.0', 'vendor': 'test_vendor', 'release_date': '1970-01-01', 'tags': '', 'requested_analysis_systems': ['software_components']}
rv = test_client.put('/rest/firmware', json=data, follow_redirects=True)
assert (b'"status": 0' in rv.data), 'rest upload not successful'
assert (fw.uid.encode() in rv.data), 'uid not found in REST upload reply'
def _rest_search(self, test_client, fw):
query = urllib.parse.quote('{"device_class": "test_class"}')
rv = test_client.get(f'/rest/firmware?query={query}', follow_redirects=True)
assert (fw.uid.encode() in rv.data), 'test firmware not found in REST search'
def _rest_start_compare(self, test_client):
data = {'uid_list': [test_fw_a.uid, test_fw_c.uid]}
rv = test_client.put('/rest/compare', json=data, follow_redirects=True)
assert (b'Compare started' in rv.data), 'could not start REST compare'
def _rest_get_compare(self, test_client):
rv = test_client.get(f'/rest/compare/{test_fw_a.uid};{test_fw_c.uid}', follow_redirects=True)
assert (b'Compare not found in database.' not in rv.data), 'compare not found in database'
assert (b'"files_in_common": {"' in rv.data), 'REST compare not successful'
.SchedulerTestConfig(items_to_analyze=((4 * 2) * 3))
.usefixtures('intercom_backend_binding')
def test_run_from_upload_to_show_analysis(self, test_client, analysis_finished_event, comparison_finished_event):
self._rest_upload_firmware(test_client, test_fw_a)
self._rest_upload_firmware(test_client, test_fw_c)
assert analysis_finished_event.wait(timeout=20)
self._rest_search(test_client, test_fw_a)
self._rest_search(test_client, test_fw_c)
self._rest_start_compare(test_client)
assert comparison_finished_event.wait(timeout=20)
self._rest_get_compare(test_client) |
class OptionSeriesVectorData(Options):
def accessibility(self) -> 'OptionSeriesVectorDataAccessibility':
return self._config_sub_data('accessibility', OptionSeriesVectorDataAccessibility)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def colorIndex(self):
return self._config_get(None)
def colorIndex(self, num: float):
self._config(num, js_type=False)
def custom(self):
return self._config_get(None)
def custom(self, value: Any):
self._config(value, js_type=False)
def dataLabels(self) -> 'OptionSeriesVectorDataDatalabels':
return self._config_sub_data('dataLabels', OptionSeriesVectorDataDatalabels)
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def direction(self):
return self._config_get(None)
def direction(self, num: float):
self._config(num, js_type=False)
def dragDrop(self) -> 'OptionSeriesVectorDataDragdrop':
return self._config_sub_data('dragDrop', OptionSeriesVectorDataDragdrop)
def drilldown(self):
return self._config_get(None)
def drilldown(self, text: str):
self._config(text, js_type=False)
def events(self) -> 'OptionSeriesVectorDataEvents':
return self._config_sub_data('events', OptionSeriesVectorDataEvents)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def labelrank(self):
return self._config_get(None)
def labelrank(self, num: float):
self._config(num, js_type=False)
def length(self):
return self._config_get(None)
def length(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionSeriesVectorDataMarker':
return self._config_sub_data('marker', OptionSeriesVectorDataMarker)
def name(self):
return self._config_get(None)
def name(self, text: str):
self._config(text, js_type=False)
def selected(self):
return self._config_get(False)
def selected(self, flag: bool):
self._config(flag, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
.parametrize('_estimator, _scoring, _distribution, _cv, _n_probes, _random_state', _input_params)
def test_input_params_assignment(_estimator, _scoring, _distribution, _cv, _n_probes, _random_state):
sel = ProbeFeatureSelection(estimator=_estimator, scoring=_scoring, distribution=_distribution, cv=_cv, n_probes=_n_probes, random_state=_random_state)
assert (sel.estimator == _estimator)
assert (sel.scoring == _scoring)
assert (sel.distribution == _distribution)
assert (sel.cv == _cv)
assert (sel.n_probes == _n_probes)
assert (sel.random_state == _random_state) |
class AsymptoteLexer(RegexLexer):
name = 'Asymptote'
aliases = ['asy', 'asymptote']
filenames = ['*.asy']
mimetypes = ['text/x-asymptote']
_ws = '(?:\\s|//.*?\\n|/\\*.*?\\*/)+'
tokens = {'whitespace': [('\\n', Text), ('\\s+', Text), ('\\\\\\n', Text), ('//(\\n|(.|\\n)*?[^\\\\]\\n)', Comment), ('/(\\\\\\n)?\\*(.|\\n)*?\\*(\\\\\\n)?/', Comment)], 'statements': [('"(|\\\\"|[^"])*"', String), ("'", String, 'string'), ('(\\d+\\.\\d*|\\.\\d+|\\d+)[eE][+-]?\\d+[lL]?', Number.Float), ('(\\d+\\.\\d*|\\.\\d+|\\d+[fF])[fF]?', Number.Float), ('0x[0-9a-fA-F]+[Ll]?', Number.Hex), ('0[0-7]+[Ll]?', Number.Oct), ('\\d+[Ll]?', Number.Integer), ('[~!%^&*+=|?:<>/-]', Operator), ('[()\\[\\],.]', Punctuation), ('\\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)), ('(and|controls|tension|atleast|curl|if|else|while|for|do|return|break|continue|struct|typedef|new|access|import|unravel|from|include|quote|static|public|private|restricted|this|explicit|true|false|null|cycle|newframe|operator)\\b', Keyword), ('(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|light|line|linefit|marginT|marker|mass|object|pair|parabola|path|path3|pen|picture|point|position|projection|real|revolution|scaleT|scientific|segment|side|slice|splitface|string|surface|tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|transformation|tree|triangle|trilinear|triple|vector|vertex|void)(?=\\s+[a-zA-Z])', Keyword.Type), ('(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|picture|position|real|revolution|slice|splitface|ticksgridT|tickvalues|tree|triple|vertex|void)\\b', Keyword.Type), ('[a-zA-Z_]\\w*:(?!:)', Name.Label), ('[a-zA-Z_]\\w*', Name)], 'root': [include('whitespace'), ((('((?:[\\w*\\s])+?(?:\\s|\\*))([a-zA-Z_]\\w*)(\\s*\\([^;]*?\\))(' + _ws) + ')(\\{)'), bygroups(using(this), Name.Function, using(this), using(this), Punctuation), 'function'), ((('((?:[\\w*\\s])+?(?:\\s|\\*))([a-zA-Z_]\\w*)(\\s*\\([^;]*?\\))(' + _ws) + ')(;)'), bygroups(using(this), Name.Function, using(this), using(this), Punctuation)), default('statement')], 'statement': [include('whitespace'), include('statements'), ('[{}]', Punctuation), (';', Punctuation, '#pop')], 'function': [include('whitespace'), include('statements'), (';', Punctuation), ('\\{', Punctuation, '#push'), ('\\}', Punctuation, '#pop')], 'string': [("'", String, '#pop'), ('\\\\([\\\\abfnrtv"\\\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), ('\\n', String), ("[^\\\\'\\n]+", String), ('\\\\\\n', String), ('\\\\n', String), ('\\\\', String)]}
def get_tokens_unprocessed(self, text):
from .pygments.lexers._asy_builtins import ASYFUNCNAME, ASYVARNAME
for (index, token, value) in RegexLexer.get_tokens_unprocessed(self, text):
if ((token is Name) and (value in ASYFUNCNAME)):
token = Name.Function
elif ((token is Name) and (value in ASYVARNAME)):
token = Name.Variable
(yield (index, token, value)) |
class JsTop():
def extendColumns(jsSchema, params):
pass
alias = 'top'
params = ('countItems', 'value', 'sortType')
value = "\n var tmpRec = {};\n data.forEach(function(rec){\n if(tmpRec[rec[value]] === undefined){ tmpRec[rec[value]] = [rec] } else {tmpRec[rec[value]].push(rec)}});\n \n var result = []; \n Object.keys(tmpRec).sort().forEach(function(key){\n tmpRec[key].forEach(function(rec){result.push(rec)})});\n \n if (sortType == 'desc'){ result = result.slice(-countItems)}\n else {result = result.slice(0, countItems)}\n " |
class EchoNestPlaylist(WebPlaylist):
def __init__(self, shell, source):
WebPlaylist.__init__(self, shell, source, 'echonest_playlist')
def search_website(self):
apikey = 'N685TONJGZSHBDZMP'
url = '
artist = self.search_entry.get_string(RB.RhythmDBPropType.ARTIST)
artist = urllib.parse.quote(artist.encode('utf8'))
formatted_url = url.format(urllib.parse.quote(apikey), artist)
print(formatted_url)
cachekey = ('artist:%s' % artist)
self.info_cache.fetch(cachekey, formatted_url, self.similar_info_cb, None)
def similar_info_cb(self, data, _):
if (not data):
print('nothing to do')
self.display_error_message()
self._clear_next()
return
similar = json.loads(data.decode('utf-8'))
self.artist = {}
if ('songs' not in similar['response']):
print('No matching data returned from EchoNest')
self.display_error_message()
self._clear_next()
return
for song in similar['response']['songs']:
name = RB.search_fold(song['artist_name'])
if (name not in self.artist):
self.artist[name] = []
self.artist[name].append(RB.search_fold(song['title']))
if (len(self.artist) == 0):
print('no artists returned')
self._clear_next()
return
query_model = self.shell.props.library_source.props.base_query_model
self._load_albums(iter(query_model), albums={}, model=query_model, total=len(query_model), progress=0.0) |
def get_sink_callers(ghidra_analysis, sink_functions):
sink_callers = []
for func in sink_functions:
sink_references = ghidra_analysis.flat_api.getReferencesTo(func.getEntryPoint())
for sink_ref in sink_references:
calling_function = ghidra_analysis.flat_api.getFunctionContaining(sink_ref.getFromAddress())
if ((calling_function is not None) and (not calling_function.isThunk()) and (calling_function not in sink_callers)):
sink_callers.append(calling_function)
return sink_callers |
class BillingStatus(ModelNormal):
allowed_values = {('status',): {'PENDING': 'Pending', 'OUTSTANDING': 'Outstanding', 'PAID': 'Paid', 'MTD': 'MTD'}}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'status': (str,), 'sent_at': (datetime, none_type)}
_property
def discriminator():
return None
attribute_map = {'status': 'status', 'sent_at': 'sent_at'}
read_only_vars = {'sent_at'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def call_pip(pip_args: List[str], timeout: float=300, retry: bool=False) -> None:
command = [sys.executable, '-m', 'pip', *pip_args]
result = subprocess.run(command, stdout=PIPE, stderr=PIPE, timeout=timeout, check=False)
if ((result.returncode == 1) and retry):
result = subprocess.run(command, stdout=PIPE, stderr=PIPE, timeout=timeout, check=False)
enforce((result.returncode == 0), f'pip install failed. Return code != 0: stderr is {str(result.stderr)}') |
class CmdBoot(COMMAND_DEFAULT_CLASS):
key = 'boot'
switch_options = ('quiet', 'sid')
locks = 'cmd:perm(boot) or perm(Admin)'
help_category = 'Admin'
def func(self):
caller = self.caller
args = self.args
if (not args):
caller.msg('Usage: boot[/switches] <account> [:reason]')
return
if (':' in args):
(args, reason) = [a.strip() for a in args.split(':', 1)]
else:
(args, reason) = (args, '')
boot_list = []
if ('sid' in self.switches):
sessions = evennia.SESSION_HANDLER.get_sessions(True)
for sess in sessions:
if (sess.sessid == int(args)):
boot_list.append(sess)
break
else:
pobj = search.account_search(args)
if (not pobj):
caller.msg(f'Account {args} was not found.')
return
pobj = pobj[0]
if (not pobj.access(caller, 'boot')):
caller.msg(f"You don't have the permission to boot {pobj.key}.")
return
matches = evennia.SESSION_HANDLER.sessions_from_account(pobj)
for match in matches:
boot_list.append(match)
if (not boot_list):
caller.msg('No matching sessions found. The Account does not seem to be online.')
return
feedback = None
if ('quiet' not in self.switches):
feedback = f'''You have been disconnected by {caller.name}.
'''
if reason:
feedback += f'''
Reason given: {reason}'''
for session in boot_list:
session.msg(feedback)
session.account.disconnect_session_from_account(session)
if (pobj and boot_list):
logger.log_sec(f'Booted: {pobj} (Reason: {reason}, Caller: {caller}, IP: {self.session.address}).') |
def build_ait_module_gemm_rcr(*, ms, n, k, split_k, test_name):
target = detect_target(use_fp16_acc=True)
input_params = {'dtype': 'float16', 'is_input': True}
a = Tensor(shape=[shape_utils.gen_int_var_min_max(ms), k], name='a', **input_params)
b = Tensor(shape=[n, k], name='b', **input_params)
bias = Tensor(shape=[n], name='bias', **input_params)
OP = ops.gemm_rcr_bias()
OP._attrs['split_k_hints'] = (split_k,)
output = OP(a, b, bias)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
return compile_model(output, target, './tmp', test_name=test_name) |
class AllListMembers(Rule):
rule: Rule
def __init__(self, rule: Rule, name: str='all_list_members') -> None:
Rule.__init__(self, name)
self.rule = rule
def apply(self, test: Any) -> RuleResult:
if (not isinstance(test, list)):
return Fail(test)
if (len(test) == 0):
return Success(test, test)
results = [self.rule.apply(child) for child in test]
if any((result.is_fail() for result in results)):
return Fail(test)
new_values = list(_expand_edits((result.expect_success() for result in results)))
if _list_unchanged(new_values, test):
return Success(test, test)
return Success(test, new_values)
def __str__(self) -> str:
return f'all_list_members( {str(self.rule)} )'
def always_succeeds(self) -> bool:
return False |
class TestLinkCont(unittest.TestCase):
def test_link_cont(self):
def generator(dut):
test_packet = ContPacket(([primitives['SYNC'], primitives['SYNC'], primitives['SYNC'], primitives['SYNC'], primitives['SYNC'], primitives['SYNC'], primitives['ALIGN'], primitives['ALIGN'], primitives['SYNC'], primitives['SYNC'], 0, 1, 2, 3, 4, 5, 6, 7, primitives['SYNC'], primitives['SYNC'], primitives['SYNC'], primitives['SYNC'], primitives['ALIGN'], primitives['ALIGN'], primitives['SYNC'], primitives['SYNC'], primitives['SYNC'], primitives['SYNC']] * 4))
streamer_packet = ContPacket(test_packet)
(yield from dut.streamer.send_blocking(streamer_packet))
(yield from dut.logger.receive(len(test_packet)))
(s, l, e) = check(streamer_packet, dut.logger.packet)
print(((((('shift ' + str(s)) + ' / length ') + str(l)) + ' / errors ') + str(e)))
self.assertEqual(s, 0)
self.assertEqual(e, 0)
class DUT(Module):
def __init__(self):
self.submodules.streamer = ContStreamer()
self.submodules.streamer_randomizer = Randomizer(phy_description(32), level=50)
self.submodules.inserter = LiteSATACONTInserter(phy_description(32))
self.submodules.remover = LiteSATACONTRemover(phy_description(32))
self.submodules.logger_randomizer = Randomizer(phy_description(32), level=50)
self.submodules.logger = ContLogger()
self.submodules.pipeline = Pipeline(self.streamer, self.streamer_randomizer, self.inserter, self.remover, self.logger_randomizer, self.logger)
dut = DUT()
generators = {'sys': [generator(dut), dut.streamer.generator(), dut.streamer_randomizer.generator(), dut.logger.generator(), dut.logger_randomizer.generator()]}
clocks = {'sys': 10}
run_simulation(dut, generators, clocks) |
def logarithmic_utility(utility_params_by_good_id: Dict[(str, float)], quantities_by_good_id: Dict[(str, int)], quantity_shift: int=100) -> float:
enforce((quantity_shift >= 0), 'The quantity_shift argument must be a non-negative integer.')
goodwise_utility = [((utility_params_by_good_id[good_id] * math.log((quantity + quantity_shift))) if ((quantity + quantity_shift) > 0) else (- 10000)) for (good_id, quantity) in quantities_by_good_id.items()]
return sum(goodwise_utility) |
class AttributeDictTranslator():
def _remove_hexbytes(cls, value: Any) -> Any:
if (value is None):
return value
if isinstance(value, HexBytes):
return value.hex()
if isinstance(value, list):
return cls._process_list(value, cls._remove_hexbytes)
if (type(value) in (bool, int, float, str, bytes)):
return value
if isinstance(value, AttributeDict):
return cls.to_dict(value)
raise NotImplementedError(f'Unknown type conversion. Found type: {type(value)}')
def _add_hexbytes(cls, value: Any) -> Any:
if (value is None):
return value
if isinstance(value, str):
try:
int(value, 16)
return HexBytes(value)
except Exception:
return value
if isinstance(value, list):
return cls._process_list(value, cls._add_hexbytes)
if isinstance(value, dict):
return cls.from_dict(value)
if (type(value) in (bool, int, float, bytes)):
return value
raise NotImplementedError(f'Unknown type conversion. Found type: {type(value)}')
def _process_list(cls, li: list, callable_name: Callable) -> List:
return [callable_name(el) for el in li]
def _valid_key(cls, key: Any) -> str:
if isinstance(key, str):
return key
raise ValueError('Key must be string.')
def to_dict(cls, attr_dict: Union[(AttributeDict, TxReceipt, TxData)]) -> JSONLike:
if (not isinstance(attr_dict, AttributeDict)):
raise ValueError('No AttributeDict provided.')
result = {cls._valid_key(key): cls._remove_hexbytes(value) for (key, value) in attr_dict.items()}
return result
def from_dict(cls, di: JSONLike) -> AttributeDict:
if (not isinstance(di, dict)):
raise ValueError('No dict provided.')
processed_dict = {cls._valid_key(key): cls._add_hexbytes(value) for (key, value) in di.items()}
return AttributeDict(processed_dict) |
class OptionSeriesNetworkgraphNodesMarkerStatesInactive(Options):
def animation(self) -> 'OptionSeriesNetworkgraphNodesMarkerStatesInactiveAnimation':
return self._config_sub_data('animation', OptionSeriesNetworkgraphNodesMarkerStatesInactiveAnimation)
def opacity(self):
return self._config_get(0.3)
def opacity(self, num: float):
self._config(num, js_type=False) |
def test_send_multicast():
multicast = messaging.MulticastMessage(notification=messaging.Notification('Title', 'Body'), tokens=['not-a-token', 'also-not-a-token'])
batch_response = messaging.send_multicast(multicast)
assert (batch_response.success_count == 0)
assert (batch_response.failure_count == 2)
assert (len(batch_response.responses) == 2)
for response in batch_response.responses:
assert (response.success is False)
assert (response.exception is not None)
assert (response.message_id is None) |
_tag
def generate_tag_html():
tag_list = Tags.objects.all()[:15]
tag_html = []
for tag in tag_list:
if tag.articles_set.all():
tag_html.append(f'<li>{tag.title} <i>{tag.articles_set.count()}</i></li>')
else:
tag_html.append(f'<li>{tag.title}</li>')
return mark_safe(''.join(tag_html)) |
def get_suggested(form: FlowResult, key: str) -> Any:
for schema_key in form['data_schema'].schema:
if (schema_key == key):
if ((schema_key.description is None) or ('suggested_value' not in schema_key.description)):
return None
return schema_key.description['suggested_value']
raise KeyError(f"Key '{key}' not found") |
def _maybe_add_data_augmentations_for_example(training_example: TrainingExample, formatted_examples_being_built: List[str], indices_of_all_categories: range, formatter_configs: FormatterConfigs) -> None:
violated_category_indices = _convert_category_codes_to_indices(training_example.violated_category_codes, formatter_configs)
nonviolated_category_indices = list((set(indices_of_all_categories) - set(violated_category_indices)))
_maybe_add_example_with_dropped_nonviolated_prompt_categories(training_example, formatted_examples_being_built, indices_of_all_categories, nonviolated_category_indices, formatter_configs)
_maybe_add_example_with_dropped_violated_and_nonviolated_prompt_categories(training_example, formatted_examples_being_built, indices_of_all_categories, violated_category_indices, nonviolated_category_indices, formatter_configs) |
class BaseMessage(BaseModel, ABC):
content: str
index: int = 0
round_index: int = 0
additional_kwargs: dict = Field(default_factory=dict)
def type(self) -> str:
def pass_to_model(self) -> bool:
return True
def to_dict(self) -> Dict:
return {'type': self.type, 'data': self.dict(), 'index': self.index, 'round_index': self.round_index} |
class TreeLayout():
def __init__(self, name, ts=None, ns=None, aligned_faces=False, active=True, legend=True):
self.name = name
self.active = active
self.aligned_faces = aligned_faces
self.description = ''
self.legend = legend
self.always_render = False
self.ts = ts
self.ns = ns
def set_tree_style(self, tree, style):
if self.aligned_faces:
style.aligned_panel = True
if self.ts:
self.ts(style)
def set_node_style(self, node):
if self.ns:
self.ns(node) |
def cache_generate_code(kernel, comm):
_cachedir = os.environ.get('PYOP2_CACHE_DIR', os.path.join(tempfile.gettempdir(), ('pyop2-cache-uid%d' % os.getuid())))
key = kernel.cache_key[0]
(shard, disk_key) = (key[:2], key[2:])
filepath = os.path.join(_cachedir, shard, disk_key)
if os.path.exists(filepath):
with open(filepath, 'r') as f:
code = f.read()
else:
code = loopy.generate_code_v2(kernel.code).device_code()
if (comm.rank == 0):
os.makedirs(os.path.join(_cachedir, shard), exist_ok=True)
with open(filepath, 'w') as f:
f.write(code)
comm.barrier()
return code |
class StreamPacket():
def __init__(self, data, params={}):
assert (type(data) == list)
for b in data:
assert ((type(b) == int) and (b >= 0) and (b < 256))
assert (type(params) == dict)
for (param_key, param_value) in params.items():
assert (type(param_key) == str)
assert (type(param_value) == int)
self.data = data
self.params = params
def compare(self, other, quiet=True, output_target=sys.stdout):
if (len(self.data) != len(other.data)):
if (not quiet):
print('Length mismatch in number of received bytes of packet: {} {}'.format(len(self.data), len(other.data)), file=sys.stdout)
return False
for (nbyte, (byte_a, byte_b)) in enumerate(zip(self.data, other.data)):
if (byte_a != byte_b):
if (not quiet):
print('Mismatch between sent and received bytes {}: 0x{:02x} 0x{:02x}'.format(nbyte, byte_a, byte_b), file=sys.stdout)
return False
if (set(self.params.keys()) != set(other.params.keys())):
if (not quiet):
print('Sent and received packets have different param fields: {} {}'.format(self.params.keys(), other.params.keys()), file=sys.stdout)
return False
for (param_name, self_param_value) in self.params.items():
other_param_value = other.params[param_name]
if (self_param_value != other_param_value):
if (not quiet):
print('Sent and received packets have different value for param signal "{}": 0x{:x} 0x{:x}'.format(param_name, self_param_value, other_param_value), file=sys.stdout)
return False
return True |
def execute(cmd, no_except=True, inline=False, init='', g=None):
console = ''
colors = []
if (g is None):
g = {'Ramp': Ramp, 'Steps': Steps, 'Row': Row, 'HtmlRow': HtmlRow, 'HtmlSteps': HtmlSteps, 'HtmlGradient': HtmlGradient}
if init:
execute(init.strip(), g=g)
m = RE_INIT.match(cmd)
if m:
block_init = m.group(1)
src = cmd[m.end():]
execute(block_init, g=g)
else:
src = cmd
lines = src.split('\n')
try:
tree = ast.parse(src)
except Exception as e:
if no_except:
if (not inline):
from pymdownx.superfences import SuperFencesException
raise SuperFencesException from e
else:
from pymdownx.inlinehilite import InlineHiliteException
raise InlineHiliteException from e
import traceback
return ('{}'.format(traceback.format_exc()), colors)
for node in tree.body:
result = []
start = node.lineno
end = node.end_lineno
stmt = lines[(start - 1):end]
command = ''
for (i, line) in enumerate(stmt, 0):
if (i == 0):
stmt[i] = ('>>> ' + line)
else:
stmt[i] = ('... ' + line)
command += '\n'.join(stmt)
if isinstance(node, AST_BLOCKS):
command += '\n... '
try:
with StreamOut() as s:
for x in evaluate(node, g):
result.append(x)
text = s.read()
if text:
result.append(AtomicString(text))
console += command
except Exception as e:
if no_except:
if (not inline):
from pymdownx.superfences import SuperFencesException
raise SuperFencesException from e
else:
from pymdownx.inlinehilite import InlineHiliteException
raise InlineHiliteException from e
import traceback
console += '{}\n{}'.format(command, traceback.format_exc())
break
result_text = '\n'
for r in result:
if (r is None):
continue
for clist in get_colors(r):
if clist:
colors.append(clist)
result_text += '{}{}'.format((repr(r) if (isinstance(r, str) and (not isinstance(r, AtomicString))) else str(r)), ('\n' if (not isinstance(r, AtomicString)) else ''))
console += result_text
return (console, colors) |
def downgrade():
op.alter_column('flicket_topic', 'hours', existing_type=sa.Numeric(precision=10, scale=2), type_=sa.Numeric(precision=10, scale=0), existing_nullable=True, existing_server_default=sa.text("'0'"))
op.alter_column('flicket_post', 'hours', existing_type=sa.Numeric(precision=10, scale=2), type_=sa.Numeric(precision=10, scale=0), existing_nullable=True, existing_server_default=sa.text("'0'")) |
class TestSerializer(TestCase):
def setUp(self):
class SimpleSerializer(Serializer):
username = serializers.CharField()
password = serializers.CharField()
age = serializers.IntegerField()
class CrudSerializer(Serializer):
username = serializers.CharField()
password = serializers.CharField()
age = serializers.IntegerField()
async def acreate(self, validated_data):
return MockObject(**validated_data)
async def aupdate(self, instance, validated_data):
return MockObject(**validated_data)
class MyModelSerializer(ModelSerializer):
class Meta():
model = User
fields = ('username',)
self.simple_serializer = SimpleSerializer
self.crud_serializer = CrudSerializer
self.model_serializer = MyModelSerializer
self.default_data = {'username': 'test', 'password': 'test', 'age': 25}
self.default_object = MockObject(**self.default_data)
async def test_serializer_valid(self):
data = {'username': 'test', 'password': 'test', 'age': 10}
serializer = self.simple_serializer(data=data)
assert serializer.is_valid()
assert ((await serializer.adata) == data)
assert (serializer.errors == {})
async def test_modelserializer_valid(self):
data = {'username': 'test'}
serializer = self.model_serializer(data=data)
assert (await sync_to_async(serializer.is_valid)())
assert ((await serializer.adata) == data)
assert (serializer.errors == {})
async def test_serializer_invalid(self):
data = {'username': 'test', 'password': 'test'}
serializer = self.simple_serializer(data=data)
assert (not serializer.is_valid())
assert (serializer.validated_data == {})
assert ((await serializer.adata) == data)
assert (serializer.errors == {'age': ['This field is required.']})
async def test_many_argument(self):
data = [{'username': 'test', 'password': 'test', 'age': 10}]
serializer = self.simple_serializer(data=data, many=True)
assert serializer.is_valid()
assert (serializer.validated_data == data)
assert ((await serializer.adata) == data)
async def test_invalid_datatype(self):
data = [{'username': 'test', 'password': 'test', 'age': 10}]
serializer = self.simple_serializer(data=data)
assert (not serializer.is_valid())
assert (serializer.validated_data == {})
assert ((await serializer.adata) == {})
assert (serializer.errors == {'non_field_errors': ['Invalid data. Expected a dictionary, but got list.']})
async def test_partial_validation(self):
data = {'username': 'test', 'password': 'test'}
serializer = self.simple_serializer(data=data, partial=True)
assert serializer.is_valid()
assert (serializer.validated_data == data)
assert (serializer.errors == {})
async def test_serialize_chainmap(self):
data = ({'username': 'test'}, {'password': 'test'}, {'age': 10})
serializer = self.simple_serializer(data=ChainMap(*data))
assert serializer.is_valid()
assert (serializer.validated_data == {'username': 'test', 'password': 'test', 'age': 10})
assert (serializer.errors == {})
async def test_crud_serializer_create(self):
data = self.default_data
serializer = self.crud_serializer(data=data)
assert serializer.is_valid()
created_object = (await serializer.acreate(serializer.validated_data))
assert isinstance(created_object, MockObject)
assert (created_object.username == data['username'])
assert (created_object.password == data['password'])
assert (created_object.age == data['age'])
async def test_crud_serializer_update(self):
default_object = self.default_object
data = {'username': 'test2', 'password': 'test2', 'age': 30}
serializer = self.crud_serializer(default_object, data=data)
assert serializer.is_valid()
updated_object = (await serializer.aupdate(default_object, serializer.validated_data))
assert isinstance(updated_object, MockObject)
assert (updated_object.username == data['username'])
assert (updated_object.password == data['password'])
assert (updated_object.age == data['age'])
async def test_crud_serializer_save(self):
data = self.default_data
serializer = self.crud_serializer(data=data)
assert serializer.is_valid()
created_object = (await serializer.asave())
assert isinstance(created_object, MockObject)
assert (created_object.username == data['username'])
assert (created_object.password == data['password'])
assert (created_object.age == data['age'])
async def test_crud_serializer_to_representation(self):
default_object = self.default_object
serializer = self.crud_serializer(default_object)
representation = (await serializer.ato_representation(default_object))
assert isinstance(representation, dict)
assert (representation['username'] == default_object.username)
assert (representation['password'] == default_object.password)
assert (representation['age'] == default_object.age)
def test_sync_serializer_valid(self):
data = {'username': 'test', 'password': 'test', 'age': 10}
serializer = self.simple_serializer(data=data)
assert serializer.is_valid()
assert (serializer.data == data)
assert (serializer.errors == {}) |
def gas_buffer(*args: Tuple[(float, None)]) -> Union[(float, None)]:
if (not is_connected()):
raise ConnectionError('Not connected to any network')
if args:
if (args[0] is None):
CONFIG.active_network['settings']['gas_buffer'] = 1
elif isinstance(args[0], (float, int)):
CONFIG.active_network['settings']['gas_buffer'] = args[0]
else:
raise TypeError('Invalid gas buffer - must be given as a float, int or None')
return CONFIG.active_network['settings']['gas_buffer'] |
def pack_A1_0(data):
min_max_ratio = (min(data) / max(data))
if (min_max_ratio < _eps):
log.warning('min to max ratio is too low at %f and it could cause algorithm stability issues. Try to remove insignificant data', min_max_ratio)
assert (data == sorted(data, reverse=True)), 'data must be sorted (desc)'
placed_circles = []
radiuses = [math.sqrt(value) for value in data]
for (radius, next_) in look_ahead(radiuses):
placed_circles = place_new_A1_0(radius, next_, placed_circles, get_hole_degree_radius_w)
return placed_circles |
class ClassificationPreset(MetricPreset):
columns: Optional[List[str]]
probas_threshold: Optional[float]
k: Optional[int]
def __init__(self, columns: Optional[List[str]]=None, probas_threshold: Optional[float]=None, k: Optional[int]=None):
super().__init__()
self.columns = columns
self.probas_threshold = probas_threshold
self.k = k
def generate_metrics(self, data_definition: DataDefinition, additional_data: Optional[Dict[(str, Any)]]):
result = [ClassificationQualityMetric(probas_threshold=self.probas_threshold, k=self.k), ClassificationClassBalance(), ClassificationConfusionMatrix(probas_threshold=self.probas_threshold, k=self.k), ClassificationQualityByClass(probas_threshold=self.probas_threshold, k=self.k)]
columns = data_definition.get_prediction_columns()
if ((columns is not None) and (columns.prediction_probas is not None)):
result.extend([ClassificationClassSeparationPlot(), ClassificationProbDistribution(), ClassificationRocCurve(), ClassificationPRCurve(), ClassificationPRTable()])
result.append(ClassificationQualityByFeatureTable(columns=self.columns))
return result |
class Websocket(RSGIIngressMixin, _Websocket):
__slots__ = ['_scope', '_proto']
def __init__(self, scope: Scope, path: str, protocol: WSTransport):
super().__init__(scope, path, protocol)
self._flow_receive = None
self._flow_send = None
self.receive = self._accept_and_receive
self.send = self._accept_and_send
async def accept(self, headers: Optional[Dict[(str, str)]]=None, subprotocol: Optional[str]=None):
if self._proto.transport:
return
(await self._proto.init())
self.receive = self._wrapped_receive
self.send = self._wrapped_send
async def _wrapped_receive(self) -> Any:
data = (await self._proto.receive()).data
for method in self._flow_receive:
data = method(data)
return data
async def _wrapped_send(self, data: Any):
for method in self._flow_send:
data = method(data)
trx = (self._proto.transport.send_str if isinstance(data, str) else self._proto.transport.send_bytes)
try:
(await trx(data))
except ProtocolClosed:
if (not self._proto.interrupted):
raise
(await self._proto.noop.wait()) |
class OptionSeriesHeatmapDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
_recovery_question.command()
_context
def execute(ctx):
error = MODULE.check_options()
if error:
return
msg = f"Attempting to set the recovery question and answer for user ID {MODULE_OPTIONS['id']['value']}"
LOGGER.info(msg)
index_event(ctx.obj.es, module=__name__, event_type='INFO', event=msg)
click.echo(f'[*] {msg}')
url = f"{ctx.obj.base_url}/users/{MODULE_OPTIONS['id']['value']}"
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': f'SSWS {ctx.obj.api_token}'}
params = {}
payload = {'credentials': {'recovery_question': {'question': MODULE_OPTIONS['question']['value'], 'answer': MODULE_OPTIONS['answer']['value']}}}
try:
response = ctx.obj.session.post(url, headers=headers, params=params, json=payload, timeout=7)
except Exception as e:
LOGGER.error(e, exc_info=True)
index_event(ctx.obj.es, module=__name__, event_type='ERROR', event=e)
click.secho(f'[!] {URL_OR_API_TOKEN_ERROR}', fg='red')
response = None
if response.ok:
msg = f"Recovery question and answer set for user {MODULE_OPTIONS['id']['value']}"
LOGGER.info(msg)
index_event(ctx.obj.es, module=__name__, event_type='INFO', event=msg)
click.secho(f'[*] {msg}', fg='green')
ctx.obj.okta.get_user(ctx, MODULE_OPTIONS['id']['value'])
else:
msg = f'''Error setting recovery question and answer for Okta user
Response Code: {response.status_code} | Response Reason: {response.reason}
Error Code: {response.json().get('errorCode')} | Error Summary: {response.json().get('errorSummary')}'''
LOGGER.error(msg)
index_event(ctx.obj.es, module=__name__, event_type='ERROR', event=msg)
click.secho(f'[!] {msg}', fg='red')
return |
def make_list_for_entry(center, data, list_for_entry, error_logs):
for entry in data['list']:
if (entry['total_quantity'] > 0):
warehouse = center.get('erpnext_warehouse')
if (not warehouse):
err_msg = _('Center {0} is not linked to any ERPNext Warehouse.').format(frappe.bold(center.get('center_name')))
error_logs.append(err_msg)
continue
record = {'item_code': entry['product_code'], 'item_name': entry['product_name'], 'warehouse': warehouse, 'qty': entry['total_quantity'], 'allow_zero_valuation_rate': 1}
list_for_entry.append(record)
return list_for_entry |
def data_processer(json_data, logger: Logger):
generation = json_data['Fuel']['Type']
production = {}
for fuel in generation:
try:
k = mapping[fuel['CATEGORY']]
except KeyError:
logger.warning("Key '{}' is missing from the MISO fuel mapping.".format(fuel['CATEGORY']))
k = 'unknown'
v = float(fuel['ACT'])
production[k] = (production.get(k, 0.0) + v)
timestamp = json_data['RefId']
split_time = timestamp.split(' ')
time_junk = {1, 2}
useful_time_parts = [v for (i, v) in enumerate(split_time) if (i not in time_junk)]
if (useful_time_parts[(- 1)] != 'EST'):
raise ValueError('Timezone reported for US-MISO has changed.')
time_data = ' '.join(useful_time_parts)
tzinfos = {'EST': tz.gettz('America/New_York')}
dt = parser.parse(time_data, tzinfos=tzinfos)
return (dt, production) |
class CamLoadMode(BaseMode):
name = Mode.cam_load
keymap = {Action.quit: False, Action.cam_load: True}
def enter(self):
if (Global.mode_mgr.last_mode == Mode.aim):
Global.mode_mgr.last_mode = Mode.view
mouse.mode(MouseMode.ABSOLUTE)
self.selection = None
self.register_keymap_event('escape', Action.quit, True)
self.register_keymap_event('2', Action.cam_load, True)
self.register_keymap_event('2-up', Action.cam_load, False)
self.render_camera_load_buttons()
tasks.add(self.cam_load_task, 'cam_load_task')
tasks.add(self.shared_task, 'shared_task')
def exit(self):
if self.selection:
cam.load_saved_state(name=f'save_{self.selection}', ok_if_not_exists=True)
tasks.remove('cam_load_task')
tasks.remove('shared_task')
mouse.touch()
self.cam_load_slots.hide()
def render_camera_load_buttons(self):
self.cam_load_slots = GenericMenu(title='Release key with moused hovered over desired save slot', frame_color=(0, 0, 0, 0.2), title_pos=(0, 0, 0.45))
pos = (- 1.2)
for slot in range(1, 10):
exists = (f'save_{slot}' in cam.states)
button = self.cam_load_slots.add_button(text=(f'{slot}', f'{slot}', ('load' if exists else 'empty'), f'{slot}'), command=(lambda : None), scale=0.1, text_scale=0.6, frameSize=((- 1.2), 1.2, (- 1.2), 1.2), frameColor=((0.3, 0.6, 0.6, 1.0) if exists else (0.8, 0.8, 0.8, 1.0)))
button.setPos((pos, 0, 0.25))
button.bind(DGG.WITHIN, self.update_load_selection, extraArgs=[slot])
button.bind(DGG.WITHOUT, self.update_load_selection, extraArgs=[None])
pos += 0.3
self.cam_load_slots.show()
def update_load_selection(self, state, coords):
self.selection = state
def cam_load_task(self, task):
if (not self.keymap[Action.cam_load]):
enter_kwargs = (dict(load_prev_cam=True) if (Global.mode_mgr.last_mode == Mode.aim) else dict())
Global.mode_mgr.change_mode(Global.mode_mgr.last_mode, enter_kwargs=enter_kwargs)
return task.cont |
def test_trigger_transform(dash_duo):
app = DashProxy(prevent_initial_callbacks=True, transforms=[TriggerTransform()])
app.layout = html.Div([html.Button(id='btn1'), html.Button(id='btn2'), html.Button(id='btn3'), html.Button(id='btn4'), html.Div(id='log')])
(Output('log', 'children'), Trigger('btn1', 'n_clicks'), Input('btn2', 'n_clicks'), Trigger('btn3', 'n_clicks'), State('btn4', 'n_clicks'))
def update(n_clicks2, n_clicks4):
return f'{str(n_clicks2)}-{str(n_clicks4)}'
dash_duo.start_server(app)
log = dash_duo.find_element('#log')
assert (log.text == '')
dash_duo.find_element('#btn1').click()
time.sleep(0.1)
assert (log.text == 'None-None')
dash_duo.find_element('#btn2').click()
time.sleep(0.1)
assert (log.text == '1-None')
dash_duo.find_element('#btn4').click()
time.sleep(0.1)
assert (log.text == '1-None')
dash_duo.find_element('#btn3').click()
time.sleep(0.1)
assert (log.text == '1-1') |
class Draw_grab_width(QLabel):
def __init__(self, parent):
super(Draw_grab_width, self).__init__(parent)
self.h = 18
self.painter = QPainter(self)
def paintEvent(self, event):
self.painter.setPen(QPen(Qt.green, 1, Qt.SolidLine))
self.painter.drawLine(0, 0, self.width(), 0)
self.painter.drawLine(0, self.h, self.width(), self.h)
self.painter.setPen(QPen(Qt.red, 1, Qt.SolidLine))
self.painter.drawLine((self.width() / 2), 0, (self.width() / 2), self.h)
self.painter.drawLine(0, (self.h / 2), self.width(), (self.h / 2))
self.painter.end()
super().paintEvent(event) |
def parse_obj_with_group(obj_name):
vlines = []
vnlines = []
glines = {}
with open(obj_name) as f:
for line in f:
if line.startswith('v '):
vlines.append(line)
if line.startswith('vn '):
vnlines.append(line)
if line.startswith('g'):
g_name = line.strip().split(' ')[(- 1)]
try:
print(g_name, len(glines[g_name]))
except KeyError:
glines[g_name] = []
g_label = g_name
if line.startswith('f'):
glines[g_name].append(line)
return (vlines, vnlines, glines) |
def extractChibitranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Food Chain', 'The Man Standing on Top of the Food Chain', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def get_flet_server_job_ids():
account_name = os.environ.get('APPVEYOR_ACCOUNT_NAME')
project_slug = os.environ.get('APPVEYOR_PROJECT_SLUG')
build_id = os.environ.get('APPVEYOR_BUILD_ID')
url = f'
print(f'Fetching build details at {url}')
req = urllib.request.Request(url)
req.add_header('Content-type', 'application/json')
project = json.loads(urllib.request.urlopen(req).read().decode())
for job in project['build']['jobs']:
build_jobs[job['name']] = job['jobId'] |
def test_cli_record_output():
given_avro_input = os.path.join(data_dir, 'weather.avro')
given_cmd_args = [sys.executable, '-m', 'fastavro', given_avro_input]
expected_data = [{'station': '011990-99999', 'time': (- ), 'temp': 0}, {'station': '011990-99999', 'time': (- ), 'temp': 22}, {'station': '011990-99999', 'time': (- ), 'temp': (- 11)}, {'station': '012650-99999', 'time': (- ), 'temp': 111}, {'station': '012650-99999', 'time': (- ), 'temp': 78}]
result_output = subprocess.check_output(given_cmd_args).decode().splitlines()
data = [json.loads(result_line_out) for result_line_out in result_output]
assert (data == expected_data) |
def get_sha_functions(bv_size_in, bv_size_out=256):
label = 'sha{}'.format(bv_size_in)
label_inv = (label + '_inv')
sha_func = z3.Function(label, z3.BitVecSort(bv_size_in), z3.BitVecSort(bv_size_out))
sha_func_inv = z3.Function(label_inv, z3.BitVecSort(bv_size_out), z3.BitVecSort(bv_size_in))
return (sha_func, sha_func_inv) |
def test_should_generate_simple_policy():
records = [Record('autoscaling.amazonaws.com', 'DescribeLaunchConfigurations'), Record('sts.amazonaws.com', 'AssumeRole')]
assert (generate_policy(records) == PolicyDocument(Version='2012-10-17', Statement=[Statement(Effect='Allow', Action=[Action('autoscaling', 'DescribeLaunchConfigurations'), Action('sts', 'AssumeRole')], Resource=['*'])])) |
(scope='function')
def custom_fields_system(db, system, system_third_party_sharing, custom_field_definition_system, custom_field_definition_system_2, custom_field_definition_system_disabled):
field_1 = sql_models.CustomField.create(db=db, data={'resource_type': custom_field_definition_system.resource_type, 'resource_id': system.fides_key, 'custom_field_definition_id': custom_field_definition_system.id, 'value': ['Test value 1']})
field_2 = sql_models.CustomField.create(db=db, data={'resource_type': custom_field_definition_system.resource_type, 'resource_id': system.fides_key, 'custom_field_definition_id': custom_field_definition_system.id, 'value': ['Test value 2']})
field_3 = sql_models.CustomField.create(db=db, data={'resource_type': custom_field_definition_system_2.resource_type, 'resource_id': system.fides_key, 'custom_field_definition_id': custom_field_definition_system_2.id, 'value': ['Test value 3']})
field_4 = sql_models.CustomField.create(db=db, data={'resource_type': custom_field_definition_system_2.resource_type, 'resource_id': system_third_party_sharing.fides_key, 'custom_field_definition_id': custom_field_definition_system_2.id, 'value': ['Test value 4']})
field_5 = sql_models.CustomField.create(db=db, data={'resource_type': custom_field_definition_system_disabled.resource_type, 'resource_id': system_third_party_sharing.fides_key, 'custom_field_definition_id': custom_field_definition_system_disabled.id, 'value': ['Disabled value, should be filtered out!']})
(yield (field_1, field_2, field_3, field_4, field_5))
db.delete(field_1)
db.delete(field_2)
db.delete(field_3)
db.delete(field_4)
db.delete(field_5) |
class Recursion(TestCase):
def test_nocache(self):
untouched = object()
class R(cache.Recursion, length=1):
def resume(R_self, history):
nonlocal received_history
received_history = tuple(history)
(yield from range((0 if (not history) else (history[(- 1)] + 1)), 10))
received_history = untouched
self.assertEqual(tuple(R()), tuple(range(10)))
self.assertEqual(received_history, ())
def test_cache(self):
read = (lambda iterable, n: tuple((item for (i, item) in zip(range(n), iterable))))
untouched = object()
for length in (1, 2, 3):
with self.subTest(length=length), tmpcache():
class R(cache.Recursion, length=length):
def resume(R_self, history):
nonlocal received_history
received_history = tuple(history)
(yield from range((0 if (not history) else (history[(- 1)] + 1)), 10))
received_history = untouched
self.assertEqual(read(R(), 4), tuple(range(4)))
self.assertEqual(received_history, ())
received_history = untouched
self.assertEqual(read(R(), 3), tuple(range(3)))
self.assertEqual(received_history, untouched)
received_history = untouched
self.assertEqual(read(R(), 6), tuple(range(6)))
self.assertEqual(received_history, tuple(range((4 - length), 4)))
received_history = untouched
self.assertEqual(read(R(), 12), tuple(range(10)))
self.assertEqual(received_history, tuple(range((6 - length), 6)))
received_history = untouched
self.assertEqual(read(R(), 12), tuple(range(10)))
self.assertEqual(received_history, untouched)
def test_cache_exception(self):
read = (lambda iterable, n: tuple((item for (i, item) in zip(range(n), iterable))))
untouched = object()
class R(cache.Recursion, length=1):
def resume(R_self, history):
nonlocal received_history
received_history = tuple(history)
(yield from range((0 if (not history) else (history[(- 1)] + 1)), 2))
raise TestException('spam')
with tmpcache():
received_history = untouched
with self.assertRaises(TestException) as cm:
read(R(), 3)
self.assertEqual(cm.exception.args[0], 'spam')
self.assertEqual(received_history, ())
received_history = untouched
self.assertEqual(read(R(), 2), tuple(range(2)))
self.assertEqual(received_history, untouched)
received_history = untouched
with self.assertRaises(TestException) as cm:
read(R(), 4)
self.assertEqual(cm.exception.args[0], 'spam')
self.assertEqual(received_history, (1,))
def test_corruption(self):
read = (lambda iterable, n: tuple((item for (i, item) in zip(range(n), iterable))))
untouched = object()
class R(cache.Recursion, length=1):
def resume(R_self, history):
nonlocal received_history
received_history = tuple(history)
(yield from range((0 if (not history) else (history[(- 1)] + 1)), 10))
for icorrupted in range(3):
for corruption in ('', 'bogus'):
with self.subTest(corruption=corruption, icorrupted=icorrupted), tmpcache() as cachedir:
received_history = untouched
self.assertEqual(read(R(), 4), tuple(range(4)))
self.assertEqual(received_history, ())
cache_files = tuple(cachedir.iterdir())
self.assertEqual(len(cache_files), 1)
(cache_file,) = cache_files
self.assertTrue((cache_file / '{:04d}'.format(icorrupted)).exists())
with (cache_file / '{:04d}'.format(icorrupted)).open('wb') as f:
f.write(corruption.encode())
received_history = untouched
self.assertEqual(read(R(), 6), tuple(range(6)))
self.assertEqual(received_history, (((icorrupted - 1),) if icorrupted else ()))
((cache._lock_file is cache._lock_file_fallback), 'platform does not support file locks')
def test_concurrent_access(self):
read = (lambda iterable, n: tuple((item for (i, item) in zip(range(n), iterable))))
untouched = object()
class R(cache.Recursion, length=1):
def resume(R_self, history):
nonlocal received_history
received_history = tuple(history)
(yield from range((0 if (not history) else (history[(- 1)] + 1)), 10))
def wrapper(n):
nonlocal nsuccess
assert (read(R(), n) == tuple(range(n)))
nsuccess += 1
for ilock in range(3):
with self.subTest(ilock=ilock), tmpcache() as cachedir:
nsuccess = 0
received_history = untouched
wrapper(4)
self.assertEqual(received_history, ())
self.assertEqual(nsuccess, 1)
cache_files = tuple(cachedir.iterdir())
self.assertEqual(len(cache_files), 1)
cache_file = (cache_files[0] / '{:04d}'.format(ilock))
assert cache_file.exists()
with cache_file.open('r+b') as f:
cache._lock_file(f)
received_history = untouched
t = threading.Thread(target=(lambda : wrapper(5)), daemon=True)
t.start()
t.join(timeout=1)
self.assertEqual(received_history, untouched)
self.assertEqual(nsuccess, 1)
t.join(timeout=5)
self.assertFalse(t.is_alive())
self.assertEqual(received_history, (3,))
self.assertEqual(nsuccess, 2) |
class ImageSerializer(s.ConditionalDCBoundSerializer):
_backup_attrs_map_ = {'owner': 'owner_id', 'dc_bound': 'dc_bound_id'}
_model_ = Image
_update_fields_ = ('alias', 'version', 'dc_bound', 'owner', 'access', 'desc', 'resize', 'deploy', 'tags')
_default_fields_ = ('name', 'alias', 'owner')
name = s.RegexField('^[A-Za-z0-9][A-Za-z0-9\\._-]*$', max_length=32)
uuid = s.CharField(read_only=True)
alias = s.SafeCharField(max_length=32)
version = s.SafeCharField(max_length=16, default='1.0')
owner = s.SlugRelatedField(slug_field='username', queryset=User.objects)
access = s.IntegerChoiceField(choices=Image.ACCESS, default=Image.PRIVATE)
desc = s.SafeCharField(max_length=128, required=False)
ostype = s.IntegerChoiceField(choices=Image.OSTYPE, read_only=True)
size = s.IntegerField(read_only=True)
resize = s.BooleanField(default=False)
deploy = s.BooleanField(default=False)
tags = s.TagField(required=False, default=[])
status = s.IntegerChoiceField(choices=Image.STATUS, read_only=True, required=False)
created = s.DateTimeField(read_only=True, required=False)
def __init__(self, request, img, *args, **kwargs):
super(ImageSerializer, self).__init__(request, img, *args, **kwargs)
if (not kwargs.get('many', False)):
self.update_manifest = True
self._dc_bound = img.dc_bound
self.fields['owner'].queryset = get_owners(request, all=True)
def create_img_backup(self):
items = self._backup_attrs_map_
return {items.get(attr, attr): getattr(self.object, items.get(attr, attr)) for attr in self._update_fields_}
def _normalize(self, attr, value):
if (attr == 'dc_bound'):
return self._dc_bound
return super(ImageSerializer, self)._normalize(attr, value)
def validate_owner(self, attrs, source):
validate_owner(self.object, attrs.get(source, None), _('Image'))
return attrs
def validate(self, attrs):
db_only_manifest_keys = {'dc_bound', 'dc_bound_bool', 'owner'}
if db_only_manifest_keys.issuperset(attrs.keys()):
self.update_manifest = False
try:
alias = attrs['alias']
except KeyError:
alias = self.object.alias
try:
version = attrs['version']
except KeyError:
version = self.object.version
qs = Image.objects
if self.object.pk:
qs = qs.exclude(pk=self.object.pk)
if qs.filter(alias__iexact=alias, version=version).exists():
self._errors['alias'] = s.ErrorList([_('This alias is already in use. Please supply a different alias or version.')])
if ((self.request.method == 'POST') and self._dc_bound):
limit = self._dc_bound.settings.VMS_IMAGE_LIMIT
if (limit is not None):
if (Image.objects.filter(dc_bound=self._dc_bound).count() >= int(limit)):
raise s.ValidationError(_('Maximum number of server disk images reached'))
return super(ImageSerializer, self).validate(attrs) |
def main(argv=sys.argv):
try:
print_help = True
(options, args) = parse_options(argv)
if options['show_screen_log']:
logfile = screen.get_logfile(node=options['show_screen_log'])
if (not os.path.isfile(logfile)):
raise Exception(('screen logfile not found for: %s' % options['show_screen_log']))
cmd = ' '.join([Settings.LOG_VIEWER, str(logfile)])
print(cmd)
p = subprocess.Popen(shlex.split(cmd))
p.wait()
print_help = False
if options['tail_screen_log']:
logfile = screen.get_logfile(node=options['tail_screen_log'])
if (not os.path.isfile(logfile)):
raise Exception(('screen logfile not found for: %s' % options['tail_screen_log']))
cmd = ' '.join(['tail', '-f', '-n', '25', str(logfile)])
print(cmd)
p = subprocess.Popen(shlex.split(cmd))
p.wait()
print_help = False
elif options['show_ros_log']:
logfile = screen.get_ros_logfile(node=options['show_ros_log'])
if (not os.path.isfile(logfile)):
raise Exception(('ros logfile not found for: %s' % options['show_ros_log']))
cmd = ' '.join([Settings.LOG_VIEWER, str(logfile)])
print(cmd)
p = subprocess.Popen(shlex.split(cmd))
p.wait()
print_help = False
elif options['ros_log_path']:
if (options['ros_log_path'] == '[]'):
print(get_ros_home())
else:
print(screen.get_logfile(node=options['ros_log_path']))
print_help = False
elif options['delete_logs']:
logfile = screen.get_logfile(node=options['delete_logs'])
pidfile = screen.get_pidfile(node=options['delete_logs'])
roslog = screen.get_ros_logfile(node=options['delete_logs'])
if os.path.isfile(logfile):
os.remove(logfile)
if os.path.isfile(pidfile):
os.remove(pidfile)
if os.path.isfile(roslog):
os.remove(roslog)
print_help = False
elif (options['node_type'] and options['package'] and options['node_name']):
runNode(options['package'], options['node_type'], options['node_name'], args, options['prefix'], options['node_respawn'], options['masteruri'], loglevel=options['loglevel'])
print_help = False
elif options['pidkill']:
import signal
os.kill(int(options['pidkill']), signal.SIGKILL)
print_help = False
elif options['package']:
print(roslib.packages.get_pkg_dir(options['package']))
print_help = False
if print_help:
parser = _get_optparse()
parser.print_help()
time.sleep(3)
except Exception as e:
sys.stderr.write(('%s\n' % e)) |
class DrillStand(Boxes):
description = 'Note: `sh` gives the height of the rows front to back. It though should have the same number of entries as `sy`. These heights are the one on the left side and increase throughout the row. To have each compartment a bit higher than the previous one the steps in `sh` should be a bit bigger than `extra_height`.\n\nAssembly:\n\n\n\nStart with putting the slots of the inner walls together. Be especially careful with adding the bottom. It is always asymmetrical and flush with the right/lower side while being a little short on the left/higher side to not protrude into the side wall.\n\n| | |\n| ---- | ---- |\n|  |  |\n| Then add the front and the back wall. | Add the very left and right walls last. |\n|  |  |\n'
ui_group = 'Misc'
def __init__(self) -> None:
Boxes.__init__(self)
self.addSettingsArgs(edges.StackableSettings, height=1.0, width=3)
self.addSettingsArgs(edges.FingerJointSettings)
self.buildArgParser(sx='25*6', sy='10:20:30', sh='25:40:60')
self.argparser.add_argument('--extra_height', action='store', type=float, default=15.0, help='height difference left to right')
def yWall(self, nr, move=None):
t = self.thickness
(x, sx, y, sy, sh) = (self.x, self.sx, self.y, self.sy, self.sh)
eh = ((self.extra_height * ((sum(sx[:nr]) + (nr * t)) - t)) / x)
(tw, th) = (((sum(sy) + (t * len(sy))) + t), (max(sh) + eh))
if self.move(tw, th, move, True):
return
self.moveTo(t)
self.polyline(y, 90)
self.edges['f']((sh[(- 1)] + eh))
self.corner(90)
for i in range((len(sy) - 1), 0, (- 1)):
s1 = (max((sh[i] - sh[(i - 1)]), 0) + (4 * t))
s2 = (max((sh[(i - 1)] - sh[i]), 0) + (4 * t))
self.polyline(sy[i], 90, s1, (- 90), t, (- 90), s2, 90)
self.polyline(sy[0], 90)
self.edges['f']((sh[0] + eh))
self.corner(90)
self.move(tw, th, move)
def sideWall(self, extra_height=0.0, foot_height=0.0, edges='sFf', move=None):
t = self.thickness
(x, sx, y, sy, sh) = (self.x, self.sx, self.y, self.sy, self.sh)
eh = extra_height
fh = foot_height
edges = [self.edges.get(e, e) for e in edges]
tw = ((sum(sy) + (t * len(sy))) + t)
th = (((max(sh) + eh) + fh) + edges[0].spacing())
if self.move(tw, th, move, True):
return
self.moveTo(edges[0].margin())
edges[0]((y + (2 * t)))
self.edgeCorner(edges[0], 'e')
self.edge(fh)
self.step((edges[1].startwidth() - t))
edges[1]((sh[(- 1)] + eh))
self.edgeCorner(edges[1], 'e')
for i in range((len(sy) - 1), 0, (- 1)):
self.edge(sy[i])
if (sh[i] > sh[(i - 1)]):
self.fingerHolesAt((0.5 * t), self.burn, (sh[i] + eh), 90)
self.polyline(t, 90, (sh[i] - sh[(i - 1)]), (- 90))
else:
self.polyline(0, (- 90), (sh[(i - 1)] - sh[i]), 90, t)
self.fingerHolesAt(((- 0.5) * t), self.burn, (sh[(i - 1)] + eh))
self.polyline(sy[0])
self.edgeCorner('e', edges[2])
edges[2]((sh[0] + eh))
self.step((t - edges[2].endwidth()))
self.polyline(fh)
self.edgeCorner('e', edges[0])
self.move(tw, th, move)
def xWall(self, nr, move=None):
t = self.thickness
(x, sx, y, sy, sh) = (self.x, self.sx, self.y, self.sy, self.sh)
eh = self.extra_height
(tw, th) = ((x + (2 * t)), ((sh[nr] + eh) + t))
a = math.degrees(math.atan((eh / x)))
fa = (1 / math.cos(math.radians(a)))
if self.move(tw, th, move, True):
return
self.moveTo(t, (eh + t), (- a))
for i in range((len(sx) - 1)):
self.edges['f']((fa * sx[i]))
h = min(sh[(nr - 1)], sh[nr])
s1 = ((h - (3.95 * t)) + ((self.extra_height * (sum(sx[:(i + 1)]) + (i * t))) / x))
s2 = ((h - (3.95 * t)) + ((self.extra_height * ((sum(sx[:(i + 1)]) + (i * t)) + t)) / x))
self.polyline(0, (90 + a), s1, (- 90), t, (- 90), s2, (90 - a))
self.edges['f']((fa * sx[(- 1)]))
self.polyline(0, (90 + a))
self.edges['f']((sh[nr] + eh))
self.polyline(0, 90, x, 90)
self.edges['f'](sh[nr])
self.polyline(0, (90 + a))
self.move(tw, th, move)
def xOutsideWall(self, h, edges='fFeF', move=None):
t = self.thickness
(x, sx, y, sy, sh) = (self.x, self.sx, self.y, self.sy, self.sh)
edges = [self.edges.get(e, e) for e in edges]
eh = self.extra_height
tw = ((x + edges[1].spacing()) + edges[3].spacing())
th = (((h + eh) + edges[0].spacing()) + edges[2].spacing())
a = math.degrees(math.atan((eh / x)))
fa = (1 / math.cos(math.radians(a)))
if self.move(tw, th, move, True):
return
self.moveTo(edges[3].spacing(), (eh + edges[0].margin()), (- a))
self.edge((t * math.tan(math.radians(a))))
if isinstance(edges[0], boxes.edges.FingerHoleEdge):
with self.saved_context():
self.moveTo(0, 0, a)
self.fingerHolesAt(0, (1.5 * t), ((x * fa) - (t * math.tan(math.radians(a)))), (- a))
self.edge(((x * fa) - (t * math.tan(math.radians(a)))))
elif isinstance(edges[0], boxes.edges.FingerJointEdge):
edges[0](((x * fa) - (t * math.tan(math.radians(a)))))
else:
raise ValueError('Only edges h and f supported: ')
self.corner(a)
self.edgeCorner(edges[0], 'e', 90)
self.corner((- 90))
self.edgeCorner('e', edges[1], 90)
edges[1]((eh + h))
self.edgeCorner(edges[1], edges[2], 90)
edges[2](x)
self.edgeCorner(edges[2], edges[3], 90)
edges[3](h)
self.edgeCorner(edges[3], 'e', 90)
self.corner((- 90))
self.edgeCorner('e', edges[0], 90)
self.moveTo(0, (self.burn + edges[0].startwidth()), 0)
for i in range(1, len(sx)):
posx = ((sum(sx[:i]) + (i * t)) - (0.5 * t))
length = (h + ((self.extra_height * ((sum(sx[:i]) + (i * t)) - t)) / x))
self.fingerHolesAt(posx, h, length, (- 90))
self.move(tw, th, move)
def bottomCB(self):
t = self.thickness
(x, sx, y, sy, sh) = (self.x, self.sx, self.y, self.sy, self.sh)
eh = self.extra_height
a = math.degrees(math.atan((eh / x)))
fa = (1 / math.cos(math.radians(a)))
posy = ((- 0.5) * t)
for i in range((len(sy) - 1)):
posy += (sy[i] + t)
posx = ((- t) * math.tan(math.radians(a)))
for j in range(len(sx)):
self.fingerHolesAt(posx, posy, (fa * sx[j]), 0)
posx += ((fa * sx[j]) + (fa * t))
def render(self):
t = self.thickness
(sx, sy, sh) = (self.sx, self.sy, self.sh)
self.x = x = ((sum(sx) + (len(sx) * t)) - t)
self.y = y = ((sum(sy) + (len(sy) * t)) - t)
bottom_angle = math.atan((self.extra_height / x))
self.xOutsideWall(sh[0], 'hFeF', move='up')
for i in range(1, len(sy)):
self.xWall(i, move='up')
self.xOutsideWall(sh[(- 1)], 'hfef', move='up')
self.rectangularWall(((x / math.cos(bottom_angle)) - (t * math.tan(bottom_angle))), y, 'fefe', callback=[self.bottomCB], move='up')
self.sideWall(foot_height=(self.extra_height + (2 * t)), move='right')
for i in range(1, len(sx)):
self.yWall(i, move='right')
self.sideWall(self.extra_height, (2 * t), move='right') |
.parametrize('model,full_refresh,dbt_vars', [('m1', True, None), ('m1', False, {'key1': 'x', 'key2': 'y', 'key3': 'z'}), (None, True, {'key1': 'x', 'key2': 'y', 'key3': 'z'}), (None, False, None)])
('subprocess.run')
def test_dbt_runner_run(mock_subprocess_run, model, full_refresh, dbt_vars):
project_dir = 'proj_dir'
profiles_dir = 'prof_dir'
expanded_dbt_vars = json.dumps(dbt_vars)
dbt_runner = DbtRunner(project_dir=project_dir, profiles_dir=profiles_dir)
dbt_runner.run(model, full_refresh=full_refresh, vars=dbt_vars)
mock_subprocess_run.assert_called()
if (model is not None):
assert (model in mock_subprocess_run.call_args[0][0])
if full_refresh:
assert ('--full-refresh' in mock_subprocess_run.call_args[0][0])
if (dbt_vars is None):
assert ('--vars' not in mock_subprocess_run.call_args[0][0])
assert (expanded_dbt_vars not in mock_subprocess_run.call_args[0][0])
if (dbt_vars is not None):
assert ('--vars' in mock_subprocess_run.call_args[0][0])
assert (expanded_dbt_vars in mock_subprocess_run.call_args[0][0]) |
class OptionSeriesBulletSonification(Options):
def contextTracks(self) -> 'OptionSeriesBulletSonificationContexttracks':
return self._config_sub_data('contextTracks', OptionSeriesBulletSonificationContexttracks)
def defaultInstrumentOptions(self) -> 'OptionSeriesBulletSonificationDefaultinstrumentoptions':
return self._config_sub_data('defaultInstrumentOptions', OptionSeriesBulletSonificationDefaultinstrumentoptions)
def defaultSpeechOptions(self) -> 'OptionSeriesBulletSonificationDefaultspeechoptions':
return self._config_sub_data('defaultSpeechOptions', OptionSeriesBulletSonificationDefaultspeechoptions)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def pointGrouping(self) -> 'OptionSeriesBulletSonificationPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesBulletSonificationPointgrouping)
def tracks(self) -> 'OptionSeriesBulletSonificationTracks':
return self._config_sub_data('tracks', OptionSeriesBulletSonificationTracks) |
.parametrize('xml', ['<root><element key="value">text</element><element>text</element>tail<empty-element/></root>', '<root>\n <element key="value">text</element>\n <element>text</element>tail\n <empty-element/>\n</root>', '<axis default="400" maximum="1000" minimum="1" name="weight" tag="wght"><labelname xml:lang="fa-IR"></labelname></axis>'], ids=['simple_xml_no_indent', 'simple_xml_indent', 'xml_ns_attrib_utf_8'])
def test_roundtrip_string(xml):
root = etree.fromstring(xml.encode('utf-8'))
result = etree.tostring(root, encoding='utf-8').decode('utf-8')
assert (result == xml) |
.usefixtures('prepare_shared_client_config')
class TestAsyncClientDeviceGeneration():
def _verify_event(self, client: AsyncClientDevice, expected_start_time: int, expected_end_time: int) -> None:
assertEqual(client.training_schedule.start_time, expected_start_time)
assertEqual(client.training_schedule.end_time, expected_end_time)
def test_provide_client_event_generation(self) -> None:
event_list = [EventTimingInfo(prev_event_start_to_current_start=1, duration=3), EventTimingInfo(prev_event_start_to_current_start=2, duration=5), EventTimingInfo(prev_event_start_to_current_start=2, duration=1), EventTimingInfo(prev_event_start_to_current_start=10, duration=10)]
start_times_gaps = [val.prev_event_start_to_current_start for val in event_list]
start_times = [sum(start_times_gaps[0:(x + 1)]) for x in range(0, len(start_times_gaps))]
durations = [d.duration for d in event_list]
end_times = [(t[0] + t[1]) for t in zip(start_times, durations)]
event_generator = AsyncTrainingEventGeneratorFromList(**OmegaConf.structured(AsyncTrainingEventGeneratorFromListConfig(training_events=event_list)))
num_users = len(event_list)
data_provider = create_mock_data_provider(num_users=num_users, examples_per_user=1)
user_selector = RandomAsyncUserSelector(data_provider=data_provider)
current_time = 0
for (start, end) in zip(start_times, end_times):
client = AsyncClientFactory.create(current_time=current_time, event_generator=event_generator, user_selector=user_selector, client_config=self.shared_client_config)
self._verify_event(client, expected_start_time=start, expected_end_time=end)
current_time = client.next_event_time()
def test_sequential_client_training_schedule(self) -> None:
num_users = 100
training_start_time_distr = PoissonAsyncTrainingStartTimeDistrConfig(training_rate=10)
duration_distr = PerExampleGaussianDurationDistributionConfig(training_duration_mean=0, training_duration_sd=0)
event_generator = AsyncTrainingEventGenerator(**OmegaConf.structured(AsyncTrainingEventGeneratorConfig(training_start_time_distribution=training_start_time_distr, duration_distribution_generator=duration_distr)))
examples_per_user = 1
data_provider = create_mock_data_provider(num_users=num_users, examples_per_user=examples_per_user)
user_selector = RandomAsyncUserSelector(data_provider=data_provider)
current_time = 0
clients = []
for _ in range(num_users):
client = AsyncClientFactory.create(current_time=current_time, event_generator=event_generator, user_selector=user_selector, client_config=self.shared_client_config)
assertEqual(client.training_schedule.start_time, client.training_schedule.end_time)
current_time = client.next_event_time()
clients.append(client)
for (client_1, client_2) in zip(clients, clients[1:]):
assert (client_1.training_schedule.end_time <= client_2.training_schedule.start_time)
assert (client_1.training_schedule.start_time < client_2.training_schedule.start_time)
def _build_clients_training_duration_dist(self, duration_distr_config: Type[DurationDistributionConfig], num_users: int) -> List[AsyncClientDevice]:
training_start_time_distr = PoissonAsyncTrainingStartTimeDistrConfig(training_rate=1000)
duration_distr = duration_distr_config(training_duration_mean=1000, training_duration_sd=0)
event_generator = AsyncTrainingEventGenerator(**OmegaConf.structured(AsyncTrainingEventGeneratorConfig(training_start_time_distribution=training_start_time_distr, duration_distribution_generator=duration_distr)))
num_examples_per_user = list(reversed(range(1, (num_users + 1))))
data = [([1] * num_example) for (num_example, _) in zip(num_examples_per_user, range(num_users))]
data_provider = FLDataProviderFromList(train_user_list=data, eval_user_list=data, test_user_list=data, model=MockFLModel())
user_selector = RoundRobinAsyncUserSelector(data_provider=data_provider)
clients = []
current_time = 0
for _ in range(num_users):
client = AsyncClientFactory.create(current_time=current_time, event_generator=event_generator, user_selector=user_selector, client_config=self.shared_client_config)
current_time = client.next_event_time()
clients.append(client)
return clients
def test_training_duration_per_example_gaussian(self) -> None:
num_users = 50
clients = self._build_clients_training_duration_dist(duration_distr_config=PerExampleGaussianDurationDistributionConfig, num_users=num_users)
for (client_1, client_2) in zip(clients, clients[1:]):
assert (client_1.training_schedule.end_time > client_2.training_schedule.end_time)
def test_training_duration_per_user_gaussian(self) -> None:
num_users = 50
clients = self._build_clients_training_duration_dist(duration_distr_config=PerUserGaussianDurationDistributionConfig, num_users=num_users)
for (client_1, client_2) in zip(clients, clients[1:]):
assert (client_1.training_schedule.end_time < client_2.training_schedule.end_time) |
class TestPCPreValidationCLI(TestCase):
('fbpcs.pc_pre_validation.pc_pre_validation_cli.print')
('fbpcs.pc_pre_validation.pc_pre_validation_cli.InputDataValidator')
('fbpcs.pc_pre_validation.pc_pre_validation_cli.BinaryFileValidator')
('fbpcs.pc_pre_validation.pc_pre_validation_cli.run_validators')
def test_parsing_required_args(self, run_validators_mock: Mock, binary_file_validator_mock: Mock, input_data_validator_mock: Mock, _print_mock: Mock) -> None:
aggregated_result = ValidationResult.SUCCESS
aggregated_report = 'Aggregated report...'
run_validators_mock.side_effect = [[aggregated_result, aggregated_report]]
expected_input_file_path = '
cloud_provider_str = 'AWS'
expected_cloud_provider = CloudProvider.AWS
expected_region = 'region1'
argv = [f'--input-file-path={expected_input_file_path}', f'--cloud-provider={cloud_provider_str}', f'--region={expected_region}']
validation_cli.main(argv)
input_data_validator_mock.assert_called_with(input_file_path=expected_input_file_path, cloud_provider=expected_cloud_provider, region=expected_region, stream_file=False, publisher_pc_pre_validation=False, partner_pc_pre_validation=False, enable_for_tee=False, private_computation_role=None, start_timestamp=None, end_timestamp=None, access_key_id=None, access_key_data=None, tee_local_file_path=None)
binary_file_validator_mock.assert_called_with(region=expected_region, access_key_id=None, access_key_data=None, binary_version=None)
run_validators_mock.assert_called_with([input_data_validator_mock(), binary_file_validator_mock()])
('fbpcs.pc_pre_validation.pc_pre_validation_cli.print')
('fbpcs.pc_pre_validation.pc_pre_validation_cli.InputDataValidator')
('fbpcs.pc_pre_validation.pc_pre_validation_cli.BinaryFileValidator')
('fbpcs.pc_pre_validation.pc_pre_validation_cli.run_validators')
def test_parsing_all_args(self, run_validators_mock: Mock, binary_file_validator_mock: Mock, input_data_validator_mock: Mock, _print_mock: Mock) -> None:
aggregated_result = ValidationResult.SUCCESS
aggregated_report = 'Aggregated report...'
run_validators_mock.side_effect = [[aggregated_result, aggregated_report]]
expected_input_file_path = '
cloud_provider_str = 'AWS'
expected_cloud_provider = CloudProvider.AWS
expected_region = 'region1'
expected_start_timestamp = ''
expected_end_timestamp = ''
expected_access_key_id = 'access_key_id2'
expected_access_key_data = 'access_key_data3'
expected_binary_version = 'binary_version'
expected_pc_computation_role: PrivateComputationRole = PrivateComputationRole.PARTNER.name
expected_tee_local_file_path = '/tmp/local_file_path'
argv = [f'--input-file-path={expected_input_file_path}', f'--cloud-provider={cloud_provider_str}', f'--region={expected_region}', f'--start-timestamp={expected_start_timestamp}', f'--end-timestamp={expected_end_timestamp}', f'--access-key-id={expected_access_key_id}', f'--access-key-data={expected_access_key_data}', f'--binary-version={expected_binary_version}', f'--private-computation-role={expected_pc_computation_role}', '--pre-validation-file-stream=enabled', '--publisher-pc-pre-validation=enabled', '--partner-pc-pre-validation=enabled', '--enable-for-tee=enabled', f'--tee-local-file-path={expected_tee_local_file_path}']
validation_cli.main(argv)
input_data_validator_mock.assert_called_with(input_file_path=expected_input_file_path, cloud_provider=expected_cloud_provider, region=expected_region, stream_file=True, publisher_pc_pre_validation=True, partner_pc_pre_validation=True, enable_for_tee=True, private_computation_role=PrivateComputationRole.PARTNER.name, start_timestamp=expected_start_timestamp, end_timestamp=expected_end_timestamp, access_key_id=expected_access_key_id, access_key_data=expected_access_key_data, tee_local_file_path=expected_tee_local_file_path)
binary_file_validator_mock.assert_called_with(region=expected_region, access_key_id=expected_access_key_id, access_key_data=expected_access_key_data, binary_version=expected_binary_version)
run_validators_mock.assert_called_with([input_data_validator_mock(), binary_file_validator_mock()])
('fbpcs.pc_pre_validation.pc_pre_validation_cli.print')
('fbpcs.pc_pre_validation.pc_pre_validation_cli.InputDataValidator')
('fbpcs.pc_pre_validation.pc_pre_validation_cli.BinaryFileValidator')
('fbpcs.pc_pre_validation.pc_pre_validation_cli.run_validators')
def test_it_includes_the_overall_result_when_failed(self, run_validators_mock: Mock, binary_file_validator_mock: Mock, input_data_validator_mock: Mock, _print_mock: Mock) -> None:
aggregated_result = ValidationResult.FAILED
aggregated_report = 'Aggregated report...'
run_validators_mock.side_effect = [[aggregated_result, aggregated_report]]
argv = ['--input-file-path=test-path', '--cloud-provider=AWS', '--region=test-region']
expected_overall_result_str = f'Overall Validation Result: {aggregated_result.value}'
with self.assertRaisesRegex(Exception, expected_overall_result_str):
validation_cli.main(argv)
('fbpcs.pc_pre_validation.pc_pre_validation_cli.print')
('fbpcs.pc_pre_validation.pc_pre_validation_cli.InputDataValidator')
('fbpcs.pc_pre_validation.pc_pre_validation_cli.BinaryFileValidator')
('fbpcs.pc_pre_validation.pc_pre_validation_cli.run_validators')
def test_it_includes_the_overall_result_when_success(self, run_validators_mock: Mock, binary_file_validator_mock: Mock, input_data_validator_mock: Mock, print_mock: Mock) -> None:
aggregated_result = ValidationResult.SUCCESS
aggregated_report = 'Aggregated report...'
run_validators_mock.side_effect = [[aggregated_result, aggregated_report]]
argv = ['--input-file-path=test-path', '--cloud-provider=AWS', '--region=test-region']
expected_overall_result_str = f'Overall Validation Result: {aggregated_result.value}'
validation_cli.main(argv)
print_str = str(print_mock.call_args[0])
self.assertRegex(print_str, expected_overall_result_str) |
def faba_with_toptier_agencies(award_count_sub_schedule, award_count_submission, defc_codes):
toptier_agency(1)
award1 = award_with_toptier_agency(1, 8, 0)
toptier_agency(2)
award2 = award_with_toptier_agency(2, 0, 7)
award3 = baker.make('search.AwardSearch', award_id=1, type='A', funding_agency_id=Agency.objects.first().id, funding_toptier_agency_code='001', total_loan_value=0, disaster_emergency_fund_codes=['M'], total_covid_obligation=8, total_covid_outlay=0, covid_spending_by_defc=[{'defc': 'M', 'outlay': 0, 'obligaton': 8}], action_date='2020-10-01')
faba_for_award(award1, 8, 0)
faba_for_award(award2, 0, 7)
faba_for_award(award3, 8, 0) |
def uploaded(textpath):
if (not (textpath is None)):
print(textpath)
file_paths = textpath.name
print(file_paths)
links = []
with open(file_paths, 'r') as file:
for line in file:
if line.startswith(tuple(supportedlinks)):
links.append(line.strip())
elif line.startswith('!'):
links.append(line.strip())
elif line.startswith(tuple(wildcardcommand)):
links.append(line.strip())
elif line.startswith('#'):
links.append(line.strip())
text = list_to_text(links)
return text |
class Bed2SidesEdge(edges.BaseEdge):
def __init__(self, boxes, bed_length, full_head_length, full_foot_height) -> None:
super().__init__(boxes, None)
self.bed_length = bed_length
self.full_head_length = full_head_length
self.full_foot_height = full_foot_height
def __call__(self, bed_height, **kw):
foot_corner = 6
middle_corner = 3
head_corner = 10
foot_height = ((self.full_foot_height - self.thickness) - foot_corner)
head_length = ((self.full_head_length - head_corner) - self.thickness)
corners = ((foot_corner + middle_corner) + head_corner)
head_height = ((bed_height - foot_height) - corners)
middle_length = ((self.bed_length - head_length) - corners)
self.polyline(foot_height, (90, foot_corner), middle_length, ((- 90), middle_corner), head_height, (90, head_corner), head_length) |
def check_password_match(moderator: ModeratorModel, password: str):
if (not validation.check_password_validity(password)):
raise ArgumentError(MESSAGE_INVALID_PASSWORD)
with session() as s:
moderator_orm_model = s.query(ModeratorOrmModel).filter_by(id=moderator.id).one()
moderator_hashed_password = moderator_orm_model.password
s.commit()
if (not bcrypt.checkpw(password.encode(), moderator_hashed_password)):
raise ArgumentError(MESSAGE_PASSWORD_INCORRECT) |
def print_perf_results(results):
pal_avgs = dict()
pal_min =
for (i, (key, tds)) in enumerate(sorted(results.items())):
(pal, mem) = key
pal_min = min(pal, pal_min)
print(highlight_text(f'{i:03d}: pal={pal}, mem={mem} MB'))
for (j, td) in enumerate(tds):
print(f'Run {j:03d}: {td} h')
avg = (sum(tds, timedelta(0)) / len(tds))
pal_avgs[pal] = avg
print(f' Avg: {avg} h')
print()
avg_min = pal_avgs[pal_min]
print(f' pal_min: {pal_min} core(s)')
print('efficiency: speedup / (pal / pal_min)\n')
header = ('pal', 'avg. / h', 'speedup', 'efficieny')
col_fmts = ('int', 'str', 'float', 'float')
table = TablePrinter(header, col_fmts, width=20)
table.print_header()
for (pal, avg) in pal_avgs.items():
speedup = (avg_min / avg)
speedup_per_pal = (speedup / (pal / pal_min))
table.print_row((pal, str(avg), speedup, speedup_per_pal)) |
def test():
assert ('from spacy.tokens import Doc' in __solution__), 'Estas importando la clase Doc correctamente?'
assert (len(spaces) == 5), 'Parece que el numero de espacios no concuerda con el numero de palabras.'
assert all((isinstance(s, bool) for s in spaces)), 'Los espacios tienen que ser booleanos.'
assert ([int(s) for s in spaces] == [0, 0, 1, 0, 0]), 'Estan correctos los espacios?'
assert (doc.text == 'Vamos, empieza!'), 'Creaste el Doc correctamente?'
__msg__.good('Bien!') |
.usefixtures('use_tmpdir')
def test_copy_directory_error(shell):
assert (b'existing directory' in shell.copy_directory('does/not/exist', 'target').stderr)
with open('file', 'w', encoding='utf-8') as f:
f.write('hei')
assert (b'existing directory' in shell.copy_directory('hei', 'target').stderr) |
class DiagResult():
def __init__(self, diag: 'Diagnostics', estat: EnvoyStats, request) -> None:
self.diag = diag
self.logger = self.diag.logger
self.estat = estat
self.cstats = {cluster.name: self.estat.cluster_stats(cluster.stats_name) for cluster in self.diag.clusters.values()}
self.request_host = request.headers.get('Host', '*')
self.request_scheme = request.headers.get('X-Forwarded-Proto', '
self.clusters: Dict[(str, DiagCluster)] = {}
self.routes: List[dict] = []
self.element_keys: Dict[(str, bool)] = {}
self.ambassador_resources: Dict[(str, str)] = {}
self.envoy_resources: Dict[(str, dict)] = {}
def as_dict(self) -> Dict[(str, Any)]:
return {'cluster_stats': self.cstats, 'cluster_info': self.clusters, 'route_info': self.routes, 'active_elements': sorted(self.element_keys.keys()), 'ambassador_resources': self.ambassador_resources, 'envoy_resources': self.envoy_resources}
def include_element(self, key: str) -> None:
"\n Note that a particular key is something relevant to this result -- e.g.\n 'oh, the key foo-mapping.1 is active here'.\n\n One problem here is that we don't currently cycle over to make sure that\n all the requisite higher-level objects are brought in when we mark an\n element active. This needs fixing.\n\n :param key: the key we want to remember as being active.\n "
self.element_keys[key] = True
def include_referenced_elements(self, obj: dict) -> None:
"\n Include all of the elements in the given object's _referenced_by\n array.\n\n :param obj: object for which to include referencing keys\n "
for element_key in obj['_referenced_by']:
self.include_element(element_key)
def include_cluster(self, cluster: dict) -> DiagCluster:
"\n Note that a particular cluster and everything that references it are\n relevant to this result. If the cluster has related health information in\n our cstats, fold that in too.\n\n Don't pass an IRCluster here -- turn it into a dict with as_dict()\n first.\n\n Returns the DiagCluster that we actually use to hold everything.\n\n :param cluster: dictionary version of a cluster to mark as active.\n :return: the DiagCluster for this cluster\n "
c_name = cluster['name']
if (c_name not in self.clusters):
self.clusters[c_name] = DiagCluster(cluster)
if (c_name in self.cstats):
self.clusters[c_name].update_health(self.cstats[c_name])
self.include_referenced_elements(cluster)
return self.clusters[c_name]
def include_ group: IRHTTPMappingGroup) -> None:
prefix = (group['prefix'] if ('prefix' in group) else group['regex'])
rewrite = group.get('rewrite', '/')
method = '*'
host = None
route_clusters: List[DiagCluster] = []
for mapping in group.get('mappings', []):
cluster = mapping['cluster']
mapping_cluster = self.include_cluster(cluster.as_dict())
mapping_cluster.update({'weight': mapping.get('weight', 100)})
route_clusters.append(mapping_cluster)
host_redir = group.get('host_redirect', None)
if host_redir:
redirect_cluster = self.include_cluster({'name': host_redir['name'], 'service': host_redir['service'], 'weight': 100, 'type_label': 'redirect', '_referenced_by': [host_redir['rkey']]})
route_clusters.append(redirect_cluster)
self.logger.debug(('host_redirect route: %s' % group))
self.logger.debug(('host_redirect cluster: %s' % redirect_cluster))
shadows = group.get('shadows', [])
for shadow in shadows:
shadow_dict = shadow['cluster'].as_dict()
shadow_dict['type_label'] = 'shadow'
shadow_cluster = self.include_cluster(shadow_dict)
route_clusters.append(shadow_cluster)
self.logger.debug(('shadow route: %s' % group))
self.logger.debug(('shadow cluster: %s' % shadow_cluster))
headers = []
for header in group.get('headers', []):
hdr_name = header.get('name', None)
hdr_value = header.get('value', None)
if (hdr_name == ':authority'):
host = hdr_value
elif (hdr_name == ':method'):
method = hdr_value
else:
headers.append(header)
sep = ('' if prefix.startswith('/') else '/')
route_key = ('%s://%s%s%s' % (self.request_scheme, (host if host else self.request_host), sep, prefix))
route_info = {'_route': group.as_dict(), '_source': group['location'], '_group_id': group['group_id'], 'key': route_key, 'prefix': prefix, 'rewrite': rewrite, 'method': method, 'headers': headers, 'clusters': [x.default_missing() for x in route_clusters], 'host': (host if host else '*')}
if ('precedence' in group):
route_info['precedence'] = group['precedence']
metadata_labels = (group.get('metadata_labels') or {})
diag_class = (metadata_labels.get('ambassador_diag_class') or None)
if diag_class:
route_info['diag_class'] = diag_class
self.routes.append(route_info)
self.include_referenced_elements(group)
def finalize(self) -> None:
for key in self.element_keys.keys():
amb_el_info = self.diag.ambassador_elements.get(key, None)
if amb_el_info:
serialization = amb_el_info.get('serialization', None)
if serialization:
self.ambassador_resources[key] = serialization
envoy_el_info = self.diag.envoy_elements.get(key, None)
if envoy_el_info:
self.envoy_resources[key] = envoy_el_info |
.external
.skipif((has_openai_key is False), reason='OpenAI API key not available')
def test_add_label():
nlp = spacy.blank('en')
llm = nlp.add_pipe('llm', config={'task': {'_tasks': 'spacy.TextCat.v3'}, 'model': {'_models': 'spacy.GPT-3-5.v1'}})
nlp.initialize()
text = 'I am feeling great.'
doc = nlp(text)
assert (len(doc.cats) == 0)
for label in ['HAPPY', 'SAD']:
llm.add_label(label)
doc = nlp(text)
assert (len(doc.cats) == 2)
assert (set(doc.cats.keys()) == {'HAPPY', 'SAD'}) |
def generate_training_data_for_source_filename(source_filename: str, output_path: str, sciencebeam_parser: ScienceBeamParser, use_model: bool, use_directory_structure: bool, gzip_enabled: bool):
LOGGER.debug('use_model: %r', use_model)
layout_document = get_layout_document_for_source_filename(source_filename, sciencebeam_parser=sciencebeam_parser)
generate_training_data_for_layout_document(layout_document=layout_document, output_path=output_path, source_filename=source_filename, document_features_context=DocumentFeaturesContext(sciencebeam_parser.app_features_context), fulltext_models=sciencebeam_parser.fulltext_models, use_model=use_model, use_directory_structure=use_directory_structure, gzip_enabled=gzip_enabled) |
def test_arg_range6():
val = (2 ** 32)
def foo(N: size, K: size):
assert (N > val)
assert (K < val)
pass
assert (arg_range_analysis(foo._loopir_proc, foo._loopir_proc.args[0], fast=False) == ((2 ** 15), None))
assert (arg_range_analysis(foo._loopir_proc, foo._loopir_proc.args[1], fast=False) == (1, None)) |
def _remove_empty_fields(initial: Dict) -> Dict:
result = {}
for (key, value) in initial.items():
if isinstance(initial[key], dict):
value = _remove_empty_fields(value)
if isinstance(initial[key], list):
value = [i for i in initial[key] if (i is not None)]
if (value not in (None, {}, [], '')):
result[key] = value
return result |
_only_with_numba
def test_equivalent_sources_spherical_parallel():
region = ((- 70), (- 60), (- 40), (- 30))
radius = 6400000.0
points = vd.grid_coordinates(region=region, shape=(6, 6), extra_coords=(radius - 500000.0))
masses = vd.synthetic.CheckerBoard(amplitude=.0, region=region).predict(points)
coordinates = vd.grid_coordinates(region=region, shape=(40, 40), extra_coords=radius)
data = point_gravity(coordinates, points, masses, field='g_z', coordinate_system='spherical')
relative_depth = 500000.0
eqs_serial = EquivalentSourcesSph(relative_depth=relative_depth, parallel=False)
eqs_serial.fit(coordinates, data)
eqs_parallel = EquivalentSourcesSph(relative_depth=relative_depth, parallel=True)
eqs_parallel.fit(coordinates, data)
upward = radius
shape = (60, 60)
grid_coords = vd.grid_coordinates(region=region, shape=shape, extra_coords=upward)
grid_serial = eqs_serial.grid(grid_coords)
grid_parallel = eqs_parallel.grid(grid_coords)
npt.assert_allclose(grid_serial.scalars, grid_parallel.scalars, rtol=1e-07) |
.skip(reason='Global index not supported.')
.long_test
.download
.parametrize('baseurl', CML_BASEURLS)
.parametrize('source_name', ['indexed-url', 'indexed-url-with-json-index'])
def test_global_index(source_name, baseurl):
print(f'{baseurl}/test-data/input/indexed-urls/global_index.index')
s = cml.load_source(source_name, f'{baseurl}/test-data/input/indexed-urls/global_index.index', param='r', time=['1000', '1200', '1800'], date='')
assert (len(s) == 3), len(s)
assert (s[0].metadata('short_name') == 'r')
date = s[0].metadata('date')
assert (str(date) == ''), date
mean = float(s.to_xarray()['r'].mean())
assert (abs((mean - 70.)) < 1e-07), mean |
class MatchRequest():
status_code = 200
error = False
def __init__(self, url):
self.url = url
if self.error:
raise requests.exceptions.ConnectionError
def json(self):
if (('chelsea' in self.url) or ('burnley' in self.url)):
return lfs_data.CHELSEA
elif ('liverpool' in self.url):
return lfs_data.LIVERPOOL
elif (('premier-league' in self.url) or ('tournament/full-priority' in self.url)):
return lfs_data.PREMIER_LEAGUE
else:
return lfs_data.NO_MATCHES |
(scope='function')
def privacy_notice_us_co_provide_service_operations(db: Session) -> Generator:
privacy_notice = PrivacyNotice.create(db=db, data={'name': 'example privacy notice us_co provide.service.operations', 'notice_key': 'example_privacy_notice_us_co_provide.service.operations', 'description': 'a sample privacy notice configuration', 'regions': [PrivacyNoticeRegion.us_co], 'consent_mechanism': ConsentMechanism.opt_in, 'data_uses': ['essential.service.operations'], 'enforcement_level': EnforcementLevel.system_wide, 'displayed_in_privacy_center': False, 'displayed_in_overlay': True, 'displayed_in_api': False})
(yield privacy_notice) |
def get_audio_length(file_path) -> int:
try:
if file_path.casefold().endswith('.wav'):
a = WavInfoReader(file_path)
length = (a.data.frame_count / a.fmt.sample_rate)
elif file_path.casefold().endswith('.wma'):
try:
audio_info = mutagen.File(file_path).info
length = audio_info.length
except AttributeError:
audio_info = AAC(file_path).info
length = audio_info.length
elif file_path.casefold().endswith('.opus'):
audio_info = mutagen.File(file_path).info
length = audio_info.length
else:
audio_info = mutagen.File(file_path).info
length = audio_info.length
return length
except (AttributeError, HeaderNotFoundError, MutagenError, WavInfoEOFError, StopIteration) as e:
raise InvalidAudioFile(f'{file_path} is an invalid audio file') from e |
class FaucetTaggedIPv4RouteTest(FaucetTaggedTest):
CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "tagged"\n faucet_vips: ["10.0.0.254/24"]\n routes:\n - route:\n ip_dst: "10.0.1.0/24"\n ip_gw: "10.0.0.1"\n - route:\n ip_dst: "10.0.2.0/24"\n ip_gw: "10.0.0.2"\n - route:\n ip_dst: "10.0.3.0/24"\n ip_gw: "10.0.0.2"\n 200:\n description: "not used"\n 300:\n description: "not used"\n'
CONFIG = '\n arp_neighbor_timeout: 2\n max_resolve_backoff_time: 1\n interfaces:\n %(port_1)d:\n tagged_vlans: [100]\n %(port_2)d:\n tagged_vlans: [100]\n %(port_3)d:\n tagged_vlans: [100]\n %(port_4)d:\n native_vlan: 200\n'
def test_tagged(self):
self._enable_event_log()
host_pair = self.hosts_name_ordered()[:2]
(first_host, second_host) = host_pair
first_host_routed_ip = ipaddress.ip_interface('10.0.1.1/24')
second_host_routed_ip = ipaddress.ip_interface('10.0.2.1/24')
for _coldstart in range(2):
for _swaps in range(3):
self.verify_ipv4_routing(first_host, first_host_routed_ip, second_host, second_host_routed_ip)
self.swap_host_macs(first_host, second_host)
self.coldstart_conf()
for vid in (300, 200):
self.change_port_config(self.port_map['port_4'], 'native_vlan', vid, restart=True, cold_start=False)
self.wait_until_matching_lines_from_file('.+L3_LEARN.+10.0.0.[12].+', self.event_log) |
('/search/', methods=['GET'])
('/search/<int:page>', methods=['GET'])
_required
def search(page=1):
scope = request.args.get('scope')
query = request.args.get('query')
if (not (scope and query)):
return render_search_page()
if ('source-site' in request.args):
try:
request_str = request.args['source-site']
request_str = urllib.parse.unquote(request_str)
sources = json.loads(request_str)
except ValueError:
sources = None
else:
sources = None
return render_search(query, scope, sources, page) |
class ServiceObj(metaclass=ServiceObjMeta):
def __init__(self, service, name=None):
self._service = service
self._loop = (service.loop if service else asyncio.get_event_loop())
self._objname = (name or self.__class__.__name__)
self._logger = self.create_logger()
def loop(self):
return self._loop
def objname(self):
return self._objname
def service(self):
return self._service
def logger(self):
return self._logger
def create_logger(self):
return logging.getLogger(('fcr.' + self.objname))
def inc_counter(self, counter):
if (self.service and self.service.stats_mgr):
self.service.stats_mgr.incrementCounter(counter)
def register_counters(cls, stats_mgr):
pass |
class DockableListElement(DockableViewElement):
editor = Instance(NotebookEditor)
def dockable_close(self, dock_control, force):
return self.close_dock_control(dock_control, force)
def close_dock_control(self, dock_control, abort):
if abort:
return super().close_dock_control(dock_control, False)
view_object = self.ui.context['object']
for (i, value) in enumerate(self.editor._uis):
if (view_object is value[2]):
del self.editor.value[i]
return False
def dockable_tab_activated(self, dock_control, activated):
for (i, value) in enumerate(self.editor._uis):
if ((dock_control is value[0]) and activated):
self.editor.selected = value[1]
break |
class TestComposerThread_send_testing_digest(ComposerThreadBaseTestCase):
('bodhi.server.mail.smtplib.SMTP')
def test_critpath_updates(self, SMTP):
t = ComposerThread(self.semmock, self._make_task()['composes'][0], 'bowlofeggs', self.Session, self.tempdir)
t.compose = self.db.query(Compose).one()
update = t.compose.updates[0]
update.critpath = True
update.request = None
update.status = UpdateStatus.testing
t.testing_digest = {'Fedora 17': {'fake': 'content'}}
t._checkpoints = {}
t.db = self.Session
self.db.flush()
self.db.info['messages'] = []
config['smtp_server'] = 'smtp.example.com'
t.send_testing_digest()
SMTP.assert_called_once_with('smtp.example.com')
sendmail = SMTP.return_value.sendmail
assert (sendmail.call_count == 1)
args = sendmail.mock_calls[0][1]
assert (args[0] == config['bodhi_email'])
assert (args[1] == [config['fedora_test_announce_list']])
assert ('The following Fedora 17 Critical Path updates have yet to be approved:\n Age URL\n' in args[2].decode('utf-8'))
assert (str(update.days_in_testing) in args[2].decode('utf-8'))
assert (update.abs_url() in args[2].decode('utf-8'))
assert (update.title in args[2].decode('utf-8'))
('bodhi.server.mail.smtplib.SMTP')
def test_security_updates(self, SMTP):
t = ComposerThread(self.semmock, self._make_task()['composes'][0], 'bowlofeggs', self.Session, self.tempdir)
t.compose = self.db.query(Compose).one()
update = t.compose.updates[0]
update.type = UpdateType.security
update.request = None
update.status = UpdateStatus.testing
t.testing_digest = {'Fedora 17': {'fake': 'content'}}
t._checkpoints = {}
t.db = self.Session
self.db.flush()
self.db.info['messages'] = []
config['smtp_server'] = 'smtp.example.com'
t.send_testing_digest()
SMTP.assert_called_once_with('smtp.example.com')
sendmail = SMTP.return_value.sendmail
assert (sendmail.call_count == 1)
args = sendmail.mock_calls[0][1]
assert (args[0] == config['bodhi_email'])
assert (args[1] == [config['fedora_test_announce_list']])
assert ('The following Fedora 17 Security updates need testing:\n Age URL\n' in args[2].decode('utf-8'))
assert (str(update.days_in_testing) in args[2].decode('utf-8'))
assert (update.abs_url() in args[2].decode('utf-8'))
assert (update.title in args[2].decode('utf-8'))
('bodhi.server.tasks.composer.log.warning')
def test_test_list_not_configured(self, warning):
t = ComposerThread(self.semmock, self._make_task()['composes'][0], 'bowlofeggs', self.Session, self.tempdir)
t.compose = self.db.query(Compose).one()
t.testing_digest = {'Fedora 17': {'fake': 'content'}}
t._checkpoints = {}
t.db = self.Session
config['fedora_test_announce_list'] = None
t.send_testing_digest()
warning.assert_called_once_with('%r undefined. Not sending updates-testing digest', 'fedora_test_announce_list') |
class USDDRPHY(Module, AutoCSR):
def __init__(self, pads, memtype='DDR3', sys_clk_freq=.0, iodelay_clk_freq=.0, cl=None, cwl=None, cmd_latency=0, cmd_delay=None, is_rdimm=False, is_clam_shell=False):
phytype = self.__class__.__name__
device = {'USDDRPHY': 'ULTRASCALE', 'USPDDRPHY': 'ULTRASCALE_PLUS'}[phytype]
pads = PHYPadsCombiner(pads)
tck = (2 / ((2 * 4) * sys_clk_freq))
addressbits = len(pads.a)
if (memtype == 'DDR4'):
addressbits += 3
bankbits = (len(pads.ba) if (memtype == 'DDR3') else (len(pads.ba) + len(pads.bg)))
nranks = (1 if (not hasattr(pads, 'cs_n')) else len(pads.cs_n))
databits = len(pads.dq)
nphases = 4
assert ((databits % 8) == 0)
x4_dimm_mode = ((databits / len(pads.dqs_p)) == 4)
if (phytype == 'USDDRPHY'):
assert (iodelay_clk_freq >= .0)
if (phytype == 'USPDDRPHY'):
assert (iodelay_clk_freq >= .0)
cl = (get_default_cl(memtype, tck) if (cl is None) else cl)
cwl = (get_default_cwl(memtype, tck) if (cwl is None) else cwl)
cl_sys_latency = get_sys_latency(nphases, cl)
cwl_sys_latency = get_sys_latency(nphases, cwl)
rdphase = get_sys_phase(nphases, cl_sys_latency, (cl + cmd_latency))
wrphase = get_sys_phase(nphases, cwl_sys_latency, (cwl + cmd_latency))
self._rst = CSRStorage()
self._en_vtc = CSRStorage(reset=1)
self._half_sys8x_taps = CSRStatus(9)
self._wlevel_en = CSRStorage()
self._wlevel_strobe = CSR()
self._cdly_rst = CSR()
self._cdly_inc = CSR()
self._cdly_value = CSRStatus(9)
self._dly_sel = CSRStorage((databits // 8))
self._rdly_dq_rst = CSR()
self._rdly_dq_inc = CSR()
self._rdly_dq_bitslip_rst = CSR()
self._rdly_dq_bitslip = CSR()
self._wdly_dq_rst = CSR()
self._wdly_dq_inc = CSR()
self._wdly_dqs_rst = CSR()
self._wdly_dqs_inc = CSR()
self._wdly_dqs_inc_count = CSRStatus(9)
self._wdly_dq_bitslip_rst = CSR()
self._wdly_dq_bitslip = CSR()
self._rdphase = CSRStorage(2, reset=rdphase)
self._wrphase = CSRStorage(2, reset=wrphase)
self.settings = PhySettings(phytype=phytype, memtype=memtype, databits=databits, dfi_databits=(2 * databits), nranks=((nranks // 2) if is_clam_shell else nranks), nphases=nphases, rdphase=self._rdphase.storage, wrphase=self._wrphase.storage, cl=cl, cwl=cwl, read_latency=(cl_sys_latency + 5), write_latency=(cwl_sys_latency - 1), cmd_latency=cmd_latency, cmd_delay=cmd_delay, write_leveling=True, write_latency_calibration=True, read_leveling=True, delays=512, bitslips=8, is_clam_shell=is_clam_shell)
if is_rdimm:
self.settings.set_rdimm(tck=tck, rcd_pll_bypass=False, rcd_ca_cs_drive=5, rcd_odt_cke_drive=5, rcd_clk_drive=5)
self.dfi = dfi = Interface(addressbits, bankbits, nranks, (2 * databits), nphases)
if (memtype == 'DDR4'):
dfi = Interface(addressbits, bankbits, nranks, (2 * databits), nphases)
self.submodules += DDR4DFIMux(self.dfi, dfi)
for pads_group in range(len(pads.groups)):
pads.sel_group(pads_group)
for i in range(len(pads.clk_p)):
clk_o_nodelay = Signal()
clk_o_delayed = Signal()
self.specials += [Instance('OSERDESE3', p_SIM_DEVICE=device, p_DATA_WIDTH=8, p_INIT=0, p_IS_RST_INVERTED=0, p_IS_CLK_INVERTED=0, p_IS_CLKDIV_INVERTED=0, i_RST=(ResetSignal('ic') | self._rst.storage), i_CLK=ClockSignal('sys4x'), i_CLKDIV=ClockSignal('sys'), i_D=170, o_OQ=clk_o_nodelay), Instance('ODELAYE3', p_SIM_DEVICE=device, p_CASCADE='NONE', p_UPDATE_MODE='ASYNC', p_REFCLK_FREQUENCY=(iodelay_clk_freq / 1000000.0), p_DELAY_FORMAT='TIME', p_DELAY_TYPE='VARIABLE', p_DELAY_VALUE=0, i_RST=((ResetSignal('ic') | self._cdly_rst.re) | self._rst.storage), i_CLK=ClockSignal('sys'), i_EN_VTC=self._en_vtc.storage, i_CE=self._cdly_inc.re, i_INC=1, o_CNTVALUEOUT=(self._cdly_value.status if (i == 0) else Signal()), i_ODATAIN=clk_o_nodelay, o_DATAOUT=clk_o_delayed), Instance('OBUFDS', i_I=clk_o_delayed, o_O=pads.clk_p[i], o_OB=pads.clk_n[i])]
pads_ba = Signal(bankbits)
commands = {'reset_n': ('reset_n', 'optional'), 'cs_n': ('cs_n', 'optional'), 'a': ('address', 'required'), pads_ba: ('bank', 'required'), 'ras_n': ('ras_n', 'required'), 'cas_n': ('cas_n', 'required'), 'we_n': ('we_n', 'required'), 'cke': ('cke', 'optional'), 'odt': ('odt', 'optional'), 'act_n': ('act_n', 'optional')}
for (pad_name, (dfi_name, pad_type)) in commands.items():
pad = (pad_name if isinstance(pad_name, Signal) else getattr(pads, pad_name, None))
if (pad is None):
if (pad_type == 'required'):
raise ValueError(f'DRAM pad {pad_name} required but not found in pads.')
continue
for i in range(len(pad)):
o_nodelay = Signal()
self.specials += [Instance('OSERDESE3', p_SIM_DEVICE=device, p_DATA_WIDTH=8, p_INIT=0, p_IS_RST_INVERTED=0, p_IS_CLK_INVERTED=0, p_IS_CLKDIV_INVERTED=0, i_RST=(ResetSignal('ic') | self._rst.storage), i_CLK=ClockSignal('sys4x'), i_CLKDIV=ClockSignal('sys'), i_D=Cat(*[getattr(dfi.phases[(n // 2)], dfi_name)[i] for n in range(8)]), o_OQ=o_nodelay), Instance('ODELAYE3', p_SIM_DEVICE=device, p_CASCADE='NONE', p_UPDATE_MODE='ASYNC', p_REFCLK_FREQUENCY=(iodelay_clk_freq / 1000000.0), p_DELAY_FORMAT='TIME', p_DELAY_TYPE='VARIABLE', p_DELAY_VALUE=0, i_RST=((ResetSignal('ic') | self._cdly_rst.re) | self._rst.storage), i_CLK=ClockSignal('sys'), i_EN_VTC=self._en_vtc.storage, i_CE=self._cdly_inc.re, i_INC=1, i_ODATAIN=o_nodelay, o_DATAOUT=pad[i])]
self.comb += pads.ba.eq(pads_ba)
if hasattr(pads, 'bg'):
self.comb += pads.bg.eq(pads_ba[len(pads.ba):])
if hasattr(pads, 'ten'):
self.comb += pads.ten.eq(0)
dqs_oe = Signal()
dqs_preamble = Signal()
dqs_postamble = Signal()
dqs_oe_delay = TappedDelayLine(ntaps=1)
dqs_pattern = DQSPattern(wlevel_en=self._wlevel_en.storage, wlevel_strobe=self._wlevel_strobe.re)
self.submodules += (dqs_oe_delay, dqs_pattern)
self.comb += dqs_oe_delay.input.eq(((dqs_preamble | dqs_oe) | dqs_postamble))
for i in range((databits // 8)):
dqs_bitslip = BitSlip(8, i=dqs_pattern.o, rst=((self._dly_sel.storage[i] & self._wdly_dq_bitslip_rst.re) | self._rst.storage), slp=(self._dly_sel.storage[i] & self._wdly_dq_bitslip.re), cycles=1)
self.submodules += dqs_bitslip
if x4_dimm_mode:
dqs_pads = ((pads.dqs_p[(i * 2)], pads.dqs_n[(i * 2)]), (pads.dqs_p[((i * 2) + 1)], pads.dqs_n[((i * 2) + 1)]))
else:
dqs_pads = ((pads.dqs_p[i], pads.dqs_n[i]),)
for (j, (dqs_p, dqs_n)) in enumerate(dqs_pads):
dqs_nodelay = Signal()
dqs_delayed = Signal()
dqs_t = Signal()
self.specials += [Instance('OSERDESE3', p_SIM_DEVICE=device, p_DATA_WIDTH=8, p_INIT=0, p_IS_RST_INVERTED=0, p_IS_CLK_INVERTED=0, p_IS_CLKDIV_INVERTED=0, i_RST=(ResetSignal('ic') | self._rst.storage), i_CLK=ClockSignal('sys4x'), i_CLKDIV=ClockSignal('sys'), i_T=(~ dqs_oe_delay.output), i_D=dqs_bitslip.o, o_OQ=dqs_nodelay, o_T_OUT=dqs_t), Instance('ODELAYE3', p_SIM_DEVICE=device, p_CASCADE='NONE', p_UPDATE_MODE='ASYNC', p_REFCLK_FREQUENCY=(iodelay_clk_freq / 1000000.0), p_IS_CLK_INVERTED=0, p_IS_RST_INVERTED=0, p_DELAY_FORMAT='TIME', p_DELAY_TYPE='VARIABLE', p_DELAY_VALUE=int(((tck * .0) / 4)), o_CNTVALUEOUT=(self._half_sys8x_taps.status if ((i == 0) and (j == 0)) else Signal()), i_RST=ResetSignal('ic'), i_CLK=ClockSignal('sys'), i_EN_VTC=self._en_vtc.storage, i_CE=(self._dly_sel.storage[i] & self._wdly_dqs_inc.re), i_INC=1, i_ODATAIN=dqs_nodelay, o_DATAOUT=dqs_delayed), Instance('IOBUFDSE3', i_I=dqs_delayed, i_T=dqs_t, io_IO=dqs_p, io_IOB=dqs_n)]
wdly_dqs_inc_count = Signal(9)
self.sync += If((self._dly_sel.storage[i] & self._wdly_dqs_inc.re), wdly_dqs_inc_count.eq((wdly_dqs_inc_count + 1)))
self.comb += If(self._dly_sel.storage[i], self._wdly_dqs_inc_count.status.eq(wdly_dqs_inc_count))
for i in range((databits // 8)):
if hasattr(pads, 'dm'):
dm_i = Cat(*[dfi.phases[(n // 2)].wrdata_mask[((((n % 2) * databits) // 8) + i)] for n in range(8)])
if (memtype == 'DDR4'):
dm_i = (~ dm_i)
dm_o_nodelay = Signal()
dm_o_bitslip = BitSlip(8, i=dm_i, rst=((self._dly_sel.storage[i] & self._wdly_dq_bitslip_rst.re) | self._rst.storage), slp=(self._dly_sel.storage[i] & self._wdly_dq_bitslip.re), cycles=1)
self.submodules += dm_o_bitslip
self.specials += [Instance('OSERDESE3', p_SIM_DEVICE=device, p_DATA_WIDTH=8, p_INIT=0, p_IS_RST_INVERTED=0, p_IS_CLK_INVERTED=0, p_IS_CLKDIV_INVERTED=0, i_RST=(ResetSignal('ic') | self._rst.storage), i_CLK=ClockSignal('sys4x'), i_CLKDIV=ClockSignal('sys'), i_D=dm_o_bitslip.o, o_OQ=dm_o_nodelay), Instance('ODELAYE3', p_SIM_DEVICE=device, p_CASCADE='NONE', p_UPDATE_MODE='ASYNC', p_REFCLK_FREQUENCY=(iodelay_clk_freq / 1000000.0), p_IS_CLK_INVERTED=0, p_IS_RST_INVERTED=0, p_DELAY_FORMAT='TIME', p_DELAY_TYPE='VARIABLE', p_DELAY_VALUE=0, i_RST=((ResetSignal('ic') | (self._dly_sel.storage[i] & self._wdly_dq_rst.re)) | self._rst.storage), i_EN_VTC=self._en_vtc.storage, i_CLK=ClockSignal('sys'), i_CE=(self._dly_sel.storage[i] & self._wdly_dq_inc.re), i_INC=1, i_ODATAIN=dm_o_nodelay, o_DATAOUT=pads.dm[i])]
dq_oe = Signal()
dq_oe_delay = TappedDelayLine(ntaps=1)
self.submodules += dq_oe_delay
self.comb += dq_oe_delay.input.eq(((dqs_preamble | dq_oe) | dqs_postamble))
for i in range(databits):
dq_o_nodelay = Signal()
dq_o_delayed = Signal()
dq_i_nodelay = Signal()
dq_i_delayed = Signal()
dq_t = Signal()
dq_o_bitslip = BitSlip(8, i=Cat(*[dfi.phases[(n // 2)].wrdata[(((n % 2) * databits) + i)] for n in range(8)]), rst=((self._dly_sel.storage[(i // 8)] & self._wdly_dq_bitslip_rst.re) | self._rst.storage), slp=(self._dly_sel.storage[(i // 8)] & self._wdly_dq_bitslip.re), cycles=1)
self.submodules += dq_o_bitslip
self.specials += Instance('OSERDESE3', p_SIM_DEVICE=device, p_DATA_WIDTH=8, p_INIT=0, p_IS_RST_INVERTED=0, p_IS_CLK_INVERTED=0, p_IS_CLKDIV_INVERTED=0, i_RST=(ResetSignal('ic') | self._rst.storage), i_CLK=ClockSignal('sys4x'), i_CLKDIV=ClockSignal('sys'), i_D=dq_o_bitslip.o, i_T=(~ dq_oe_delay.output), o_OQ=dq_o_nodelay, o_T_OUT=dq_t)
dq_i_bitslip = BitSlip(8, rst=((self._dly_sel.storage[(i // 8)] & self._rdly_dq_bitslip_rst.re) | self._rst.storage), slp=(self._dly_sel.storage[(i // 8)] & self._rdly_dq_bitslip.re), cycles=1)
self.submodules += dq_i_bitslip
self.specials += Instance('ISERDESE3', p_SIM_DEVICE=device, p_IS_CLK_INVERTED=0, p_IS_CLK_B_INVERTED=1, p_DATA_WIDTH=8, i_RST=(ResetSignal('ic') | self._rst.storage), i_CLK=ClockSignal('sys4x'), i_CLK_B=ClockSignal('sys4x'), i_CLKDIV=ClockSignal('sys'), i_D=dq_i_delayed, i_FIFO_RD_EN=0, o_Q=dq_i_bitslip.i)
for n in range(8):
self.comb += dfi.phases[(n // 2)].rddata[(((n % 2) * databits) + i)].eq(dq_i_bitslip.o[n])
self.specials += Instance('ODELAYE3', p_SIM_DEVICE=device, p_CASCADE='NONE', p_UPDATE_MODE='ASYNC', p_REFCLK_FREQUENCY=(iodelay_clk_freq / 1000000.0), p_IS_CLK_INVERTED=0, p_IS_RST_INVERTED=0, p_DELAY_FORMAT='TIME', p_DELAY_TYPE='VARIABLE', p_DELAY_VALUE=0, i_RST=((ResetSignal('ic') | (self._dly_sel.storage[(i // 8)] & self._wdly_dq_rst.re)) | self._rst.storage), i_CLK=ClockSignal('sys'), i_EN_VTC=self._en_vtc.storage, i_CE=(self._dly_sel.storage[(i // 8)] & self._wdly_dq_inc.re), i_INC=1, i_ODATAIN=dq_o_nodelay, o_DATAOUT=dq_o_delayed)
self.specials += Instance('IDELAYE3', p_SIM_DEVICE=device, p_CASCADE='NONE', p_UPDATE_MODE='ASYNC', p_REFCLK_FREQUENCY=(iodelay_clk_freq / 1000000.0), p_IS_CLK_INVERTED=0, p_IS_RST_INVERTED=0, p_DELAY_FORMAT='TIME', p_DELAY_SRC='IDATAIN', p_DELAY_TYPE='VARIABLE', p_DELAY_VALUE=0, i_RST=((ResetSignal('ic') | (self._dly_sel.storage[(i // 8)] & self._rdly_dq_rst.re)) | self._rst.storage), i_CLK=ClockSignal('sys'), i_EN_VTC=self._en_vtc.storage, i_CE=(self._dly_sel.storage[(i // 8)] & self._rdly_dq_inc.re), i_INC=1, i_IDATAIN=dq_i_nodelay, o_DATAOUT=dq_i_delayed)
self.specials += Instance('IOBUF', i_I=dq_o_delayed, o_O=dq_i_nodelay, i_T=dq_t, io_IO=pads.dq[i])
rddata_en = TappedDelayLine(signal=reduce(or_, [dfi.phases[i].rddata_en for i in range(nphases)]), ntaps=self.settings.read_latency)
self.submodules += rddata_en
self.comb += [phase.rddata_valid.eq((rddata_en.output | self._wlevel_en.storage)) for phase in dfi.phases]
wrtap = (cwl_sys_latency - 1)
wrdata_en = TappedDelayLine(signal=reduce(or_, [dfi.phases[i].wrdata_en for i in range(nphases)]), ntaps=(wrtap + 2))
self.submodules += wrdata_en
self.comb += dq_oe.eq(wrdata_en.taps[wrtap])
self.comb += If(self._wlevel_en.storage, dqs_oe.eq(1)).Else(dqs_oe.eq(dq_oe))
self.comb += dqs_preamble.eq((wrdata_en.taps[(wrtap - 1)] & (~ wrdata_en.taps[(wrtap + 0)])))
self.comb += dqs_postamble.eq((wrdata_en.taps[(wrtap + 1)] & (~ wrdata_en.taps[(wrtap + 0)]))) |
def _chunk_actions(actions: Iterable[_TYPE_BULK_ACTION_HEADER_AND_BODY], chunk_size: int, max_chunk_bytes: int, serializer: Serializer) -> Iterable[Tuple[(List[Union[(Tuple[_TYPE_BULK_ACTION_HEADER], Tuple[(_TYPE_BULK_ACTION_HEADER, _TYPE_BULK_ACTION_BODY)])]], List[bytes])]]:
chunker = _ActionChunker(chunk_size=chunk_size, max_chunk_bytes=max_chunk_bytes, serializer=serializer)
for (action, data) in actions:
ret = chunker.feed(action, data)
if ret:
(yield ret)
ret = chunker.flush()
if ret:
(yield ret) |
class TestRegistry(unittest.TestCase):
def test_registry(self) -> None:
OBJECT_REGISTRY = Registry('OBJECT')
_REGISTRY.register()
class Object1():
pass
with self.assertRaises(AssertionError) as err:
OBJECT_REGISTRY.register(Object1)
self.assertTrue(("An object named 'Object1' was already registered in 'OBJECT' registry!" in str(err.exception)))
self.assertEqual(OBJECT_REGISTRY.get('Object1'), Object1)
with self.assertRaises(KeyError) as err:
OBJECT_REGISTRY.get('Object2')
self.assertTrue(("No object named 'Object2' found in 'OBJECT' registry!" in str(err.exception)))
items = list(OBJECT_REGISTRY)
self.assertListEqual(items, [('Object1', Object1)], 'Registry iterable contains valid item') |
def test_observation_normalization_manual_stats():
env = GymMazeEnv('CartPole-v0')
normalization_config_1 = {'default_strategy': 'maze.normalization_strategies.MeanZeroStdOneObservationNormalizationStrategy', 'default_strategy_config': {'clip_range': (None, None), 'axis': None}, 'default_statistics': None, 'statistics_dump': 'statistics.pkl', 'sampling_policy': RandomPolicy(env.action_spaces_dict), 'exclude': None, 'manual_config': {'observation': {'strategy': 'maze.normalization_strategies.MeanZeroStdOneObservationNormalizationStrategy', 'strategy_config': {'clip_range': (0, 1), 'axis': 0}, 'statistics': {'mean': [0, 0, 0, 0], 'std': [1, 1, 1, 1]}}}}
normalization_config_2 = {'default_strategy': 'maze.normalization_strategies.MeanZeroStdOneObservationNormalizationStrategy', 'default_strategy_config': {'clip_range': (0, 1), 'axis': 0}, 'default_statistics': {'mean': [0, 0, 0, 0], 'std': [1, 1, 1, 1]}, 'statistics_dump': 'statistics.pkl', 'sampling_policy': RandomPolicy(env.action_spaces_dict), 'exclude': None, 'manual_config': None}
def test_config(normalization_config):
env = GymMazeEnv('CartPole-v0')
env = ObservationNormalizationWrapper(env, default_strategy=normalization_config['default_strategy'], default_strategy_config=normalization_config['default_strategy_config'], default_statistics=normalization_config['default_statistics'], statistics_dump=normalization_config['statistics_dump'], sampling_policy=normalization_config['sampling_policy'], exclude=normalization_config['exclude'], manual_config=normalization_config['manual_config'])
assert np.alltrue((env.observation_space['observation'].high <= 1.0))
assert np.alltrue((env.observation_space['observation'].low >= 0.0))
statistics = env.get_statistics()
assert np.all((statistics['observation']['mean'] == np.zeros(shape=4)))
assert np.all((statistics['observation']['std'] == np.ones(shape=4)))
obs = random_env_steps(env, steps=100)
assert ((np.min(obs) >= 0) and (np.max(obs) <= 1))
test_config(normalization_config_1)
test_config(normalization_config_2) |
def marshal(data, fields, envelope=None):
def make(cls):
if isinstance(cls, type):
return cls()
return cls
if isinstance(data, (list, tuple)):
return (OrderedDict([(envelope, [marshal(d, fields) for d in data])]) if envelope else [marshal(d, fields) for d in data])
items = ((k, (marshal(data, v) if isinstance(v, dict) else make(v).output(k, data))) for (k, v) in fields.items())
return (OrderedDict([(envelope, OrderedDict(items))]) if envelope else OrderedDict(items)) |
class Vars(_coconut.object):
def items(cls):
for name in dir(cls):
if (not name.startswith('_')):
var = getattr(cls, name)
(yield (name, var))
def add_to(cls, globs):
for (name, var) in cls.items():
globs[name] = var
use = add_to
def use_in(cls, globs):
prevars = _coconut.dict()
for (name, var) in cls.items():
if (name in globs):
prevars[name] = globs[name]
globs[name] = var
try:
(yield)
finally:
for (name, var) in cls.items():
if (name in prevars):
globs[name] = prevars[name]
else:
del globs[name]
using = use_in
_coconut_tco
def __hash__(self):
return _coconut_tail_call(hash, str(self))
def __lt__(self, other):
return (str(self) < str(other))
def __gt__(self, other):
return (str(self) > str(other))
def __ge__(self, other):
return (str(self) >= str(other))
def __le__(self, other):
return (str(self) <= str(other)) |
class ModelBasedAgent(core.Actor):
def __init__(self, actor: core.Actor, learner: core.Learner):
self._actor = actor
self._learner = learner
self._last_timestep = None
def select_action(self, observation: np.ndarray):
return self._actor.select_action(observation)
def observe_first(self, timestep: dm_env.TimeStep):
self._actor.observe_first(timestep)
self._last_timestep = timestep
def observe(self, action: np.ndarray, next_timestep: dm_env.TimeStep):
self._actor.observe(action, next_timestep)
self._last_timestep = next_timestep
def update(self, wait=True):
if self._last_timestep.last():
self._learner.step()
self._actor.update(wait)
def update_goal(self, goal):
self._actor.update_goal(goal) |
class AggregatableEvent(Event):
def _unique_zone_key(df_view: pd.DataFrame) -> ZoneKey:
zone_key = df_view['zoneKey'].unique()
if (len(zone_key) > 1):
raise ValueError(f'Cannot merge events from different zones: {zone_key}')
return zone_key[0]
def _sources(df_view: pd.DataFrame) -> str:
sources = df_view['source'].unique()
return ', '.join(sources)
def _unique_source_type(df_view: pd.DataFrame) -> EventSourceType:
source_type = df_view['sourceType'].unique()
if (len(source_type) > 1):
raise ValueError(f'Cannot merge events from different source types: {source_type}')
return source_type[0]
def _unique_datetime(df_view: pd.DataFrame) -> datetime:
target_datetime = df_view.datetime.unique()
if (len(target_datetime) > 1):
raise ValueError(f'Cannot merge events from different datetimes: {target_datetime}')
return target_datetime[0].to_pydatetime()
def _aggregated_fields(df_view: pd.DataFrame) -> tuple[(ZoneKey, str, EventSourceType, datetime)]:
return (AggregatableEvent._unique_zone_key(df_view), AggregatableEvent._sources(df_view), AggregatableEvent._unique_source_type(df_view), AggregatableEvent._unique_datetime(df_view))
def aggregate(events: list['AggregatableEvent']) -> 'AggregatableEvent':
pass |
_deserializable
class JinaLlm(BaseLlm):
def __init__(self, config: Optional[BaseLlmConfig]=None):
if ('JINACHAT_API_KEY' not in os.environ):
raise ValueError('Please set the JINACHAT_API_KEY environment variable.')
super().__init__(config=config)
def get_llm_model_answer(self, prompt):
response = JinaLlm._get_answer(prompt, self.config)
return response
def _get_answer(prompt: str, config: BaseLlmConfig) -> str:
messages = []
if config.system_prompt:
messages.append(SystemMessage(content=config.system_prompt))
messages.append(HumanMessage(content=prompt))
kwargs = {'temperature': config.temperature, 'max_tokens': config.max_tokens, 'model_kwargs': {}}
if config.top_p:
kwargs['model_kwargs']['top_p'] = config.top_p
if config.stream:
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
chat = JinaChat(**kwargs, streaming=config.stream, callbacks=[StreamingStdOutCallbackHandler()])
else:
chat = JinaChat(**kwargs)
return chat(messages).content |
class BaseEmbeddingProvider():
def get(self, text: str) -> np.ndarray:
raise NotImplementedError()
def __call__(self, text: str):
return self.get(text)
def config(self):
return {'class': self.__class__.__name__, 'type': 'embedding_provider'}
def from_config(cls, config):
return cls() |
def verify_required_paths(volume_map, output_dir):
assert (constants.DOCKER_SOCKET_PATH in volume_map)
docker_map = volume_map[constants.DOCKER_SOCKET_PATH]
assert ('bind' in docker_map)
assert (docker_map['bind'] == constants.DOCKER_SOCKET_PATH)
assert (docker_map['mode'] == constants.MOUNT_READ_WRITE)
assert (output_dir in volume_map)
output_map = volume_map[output_dir]
assert (output_map['bind'] == output_dir)
assert (output_map['mode'] == constants.MOUNT_READ_WRITE) |
class SynchronousLogstashHandler(Handler):
def __init__(self, host, port, transport='logstash_async.transport.TcpTransport', ssl_enable=False, ssl_verify=True, keyfile=None, certfile=None, ca_certs=None, enable=True, encoding='utf-8', **kwargs):
super().__init__()
self._host = host
self._port = port
self._transport_path = transport
self._ssl_enable = ssl_enable
self._ssl_verify = ssl_verify
self._keyfile = keyfile
self._certfile = certfile
self._ca_certs = ca_certs
self._enable = enable
self._transport = None
self._encoding = encoding
self._setup_transport(**kwargs)
def emit(self, record):
if (not self._enable):
return
self._setup_transport()
try:
data = self._format_record(record)
self._transport.send([data], use_logging=False)
except Exception:
self.handleError(record)
def _setup_transport(self, **kwargs):
if (self._transport is not None):
return
transport_args = dict(host=self._host, port=self._port, timeout=constants.SOCKET_TIMEOUT, ssl_enable=self._ssl_enable, ssl_verify=self._ssl_verify, keyfile=self._keyfile, certfile=self._certfile, ca_certs=self._ca_certs, **kwargs)
if isinstance(self._transport_path, str):
transport_class = import_string(self._transport_path)
self._transport = transport_class(**transport_args)
elif callable(self._transport_path):
self._transport = self._transport_path(**transport_args)
elif hasattr(self._transport_path, 'send'):
self._transport = self._transport_path
else:
raise RuntimeError('Invalid transport path: must be an importable module path, a class or factory function or an instance.')
def _format_record(self, record):
self._create_formatter_if_necessary()
formatted = self.formatter.format(record)
if isinstance(formatted, str):
formatted = formatted.encode(self._encoding)
return (formatted + b'\n')
def _create_formatter_if_necessary(self):
if (self.formatter is None):
self.formatter = LogstashFormatter()
def close(self):
self.acquire()
try:
self.shutdown()
finally:
self.release()
super().close()
def shutdown(self):
self._close_transport()
def _close_transport(self):
try:
if (self._transport is not None):
self._transport.close()
except Exception as exc:
safe_log_via_print('error', f'Error on closing transport: {exc}') |
class ZipFileReference(ResourceReference):
zip_file = Instance(FastZipFile)
volume_name = Str()
file_name = Str()
cache_file = File()
filename = Property
def load(self):
cache_file = self.cache_file
if (cache_file == ''):
data = self.zip_file.read(self.file_name)
image = self.resource_factory.image_from_data(data, Undefined)
if (image is not None):
return image
cache_dir = join(image_cache_path, self.volume_name)
if (not exists(cache_dir)):
makedirs(cache_dir)
cache_file = join(cache_dir, self.file_name)
with open(cache_file, 'wb') as fh:
fh.write(data)
self.cache_file = cache_file
self.zip_file = None
return self.resource_factory.image_from_file(cache_file)
def _get_filename(self):
if (self.cache_file == ''):
self.load()
return self.cache_file |
class ONFFlowMonitorRequest(StringifyMixin):
def __init__(self, id_, flags, match=OFPMatch(), out_port=ofproto.OFPP_ANY, table_id=ofproto.OFPTT_ALL, match_len=None):
self.id = id_
self.flags = flags
self.match_len = match_len
self.out_port = out_port
self.table_id = table_id
self.match = match
def serialize(self):
match = self.match
bin_match = bytearray()
ofp_match_len = match.serialize(bin_match, 0)
assert (len(bin_match) == ofp_match_len)
match_len = match.length
match_hdr_len = (ofproto.OFP_MATCH_SIZE - 4)
bin_match = bytearray(bin_match)[match_hdr_len:match_len]
self.match_len = len(bin_match)
buf = bytearray()
msg_pack_into(ofproto.ONF_FLOW_MONITOR_REQUEST_PACK_STR, buf, 0, self.id, self.flags, self.match_len, self.out_port, self.table_id)
buf += bin_match
pad_len = (utils.round_up(self.match_len, 8) - self.match_len)
buf += (pad_len * b'\x00')
return buf |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.