code stringlengths 281 23.7M |
|---|
class SetCI(SimpleTool):
name = 'SetCI'
description = 'Configure the commands to run, lint, test the project or lint a file (`{command} {file}` will be used). Input format: `lint: "command", lintfile: "command", test: "command", run: "command"`'
structured_desc = 'Configure the commands to run, lint, test the project or lint a file. '
def __init__(self, project: Project):
self.project = project
super().__init__()
def structured_func(self, lint: str='', lintfile: str='', test: str='', run: str='', **kwargs):
self.project.ci_commands = {'lint': lint, 'lintfile': lintfile, 'test': test, 'run': run, **kwargs}
if run:
self.project.memories.append(f'The command to run the project: `{run}`')
if test:
self.project.memories.append(f'The command to test the project: `{test}`')
return f'CI set up.'
def func(self, args: str):
args = args.strip().strip('`').split('", ')
args = {arg.split(':')[0].strip(): arg.split(':')[1].strip().removeprefix('"').removesuffix('"') for arg in args}
return self.structured_func(**args) |
def parse(text, encoding='utf-8', handler=None, **defaults):
text = u(text, encoding).strip()
metadata = defaults.copy()
handler = (handler or detect_format(text, handlers))
if (handler is None):
return (metadata, text)
try:
(fm, content) = handler.split(text)
except ValueError:
return (metadata, text)
fm = handler.load(fm)
if isinstance(fm, dict):
metadata.update(fm)
return (metadata, content.strip()) |
def test_setGlyphOrder_also_updates_glyf_glyphOrder():
font = TTFont()
font.importXML(os.path.join(DATA_DIR, 'TestTTF-Regular.ttx'))
current_order = font.getGlyphOrder()
assert (current_order == font['glyf'].glyphOrder)
new_order = list(current_order)
while (new_order == current_order):
random.shuffle(new_order)
font.setGlyphOrder(new_order)
assert (font.getGlyphOrder() == new_order)
assert (font['glyf'].glyphOrder == new_order) |
class LinkerEntryOffset(LinkerEntry):
def __init__(self, segment: Segment):
super().__init__(segment, [], Path(), 'linker_offset', 'linker_offset', False)
self.object_path = None
def emit_entry(self, linker_writer: LinkerWriter):
linker_writer._write_symbol(f'{self.segment.get_cname()}_OFFSET', '.') |
def filter_endpoint_control_forticlient_ems_data(json):
option_list = ['address', 'admin_password', 'admin_type', 'admin_username', ' 'listen_port', 'name', 'rest_api_auth', 'serial_number', 'upload_port']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
def sharpe_iid_adjusted(rtns, bench=0, factor=1, log=True):
sr = sharpe_iid(rtns, bench=bench, factor=1, log=log)
if _is_pandas(rtns):
skew = rtns.skew()
excess_kurt = rtns.kurtosis()
else:
skew = ss.skew(rtns, bias=False, nan_policy='omit')
excess_kurt = ss.kurtosis(rtns, bias=False, fisher=True, nan_policy='omit')
return (adjusted_sharpe(sr, skew, excess_kurt) * np.sqrt(factor)) |
def prepare_query(query: str) -> Tuple[(str, List[str])]:
statements = sqlparse.parse(query)
if (len(statements) != 1):
raise ValueError('Only one SQL statement is allowed.')
statement: sqlparse.sql.Statement = statements[0]
if (statement.get_type() != 'SELECT'):
raise ValueError('Only SELECT statements are supported.')
new_tokens: List[str] = []
params_map: Dict[(str, int)] = {}
for token in statement.flatten():
if (token.ttype in sqlparse.tokens.Name.Placeholder):
index = params_map.setdefault(token.value, len(params_map))
new_tokens.append(('?' + str((index + 1))))
else:
new_tokens.append(str(token))
params_list = [k[1:] for (_, k) in sorted(((v, k) for (k, v) in params_map.items()))]
return (''.join(new_tokens), params_list) |
def _start():
global patch, name, path, monitor
global pin, delay, scale_duration, offset_duration, lock, trigger
pin = {'gpio0': 0, 'gpio1': 1, 'gpio2': 2, 'gpio3': 3, 'gpio4': 4, 'gpio5': 5, 'gpio6': 6, 'gpio7': 7, 'gpio21': 21, 'gpio22': 22, 'gpio23': 23, 'gpio24': 24, 'gpio25': 25, 'gpio26': 26, 'gpio27': 27, 'gpio28': 28, 'gpio29': 29}
delay = patch.getfloat('general', 'delay')
scale_duration = patch.getfloat('scale', 'duration', default=1)
offset_duration = patch.getfloat('offset', 'duration', default=0)
lock = threading.Lock()
wiringpi.wiringPiSetup()
previous_val = {}
for (gpio, channel) in patch.config.items('control'):
monitor.info(((('control ' + channel) + ' ') + gpio))
wiringpi.softPwmCreate(pin[gpio], 0, 100)
previous_val[gpio] = None
trigger = []
for (gpio, channel) in patch.config.items('trigger'):
wiringpi.pinMode(pin[gpio], 1)
duration = patch.getstring('duration', gpio)
trigger.append(TriggerThread(channel, gpio, duration))
monitor.info(((('trigger ' + channel) + ' ') + gpio))
for thread in trigger:
thread.start()
if len(locals()):
print(('LOCALS: ' + ', '.join(locals().keys()))) |
class OptionSeriesBarLabel(Options):
def boxesToAvoid(self):
return self._config_get(None)
def boxesToAvoid(self, value: Any):
self._config(value, js_type=False)
def connectorAllowed(self):
return self._config_get(False)
def connectorAllowed(self, flag: bool):
self._config(flag, js_type=False)
def connectorNeighbourDistance(self):
return self._config_get(24)
def connectorNeighbourDistance(self, num: float):
self._config(num, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get('undefined')
def formatter(self, value: Any):
self._config(value, js_type=False)
def maxFontSize(self):
return self._config_get(None)
def maxFontSize(self, num: float):
self._config(num, js_type=False)
def minFontSize(self):
return self._config_get(None)
def minFontSize(self, num: float):
self._config(num, js_type=False)
def onArea(self):
return self._config_get(None)
def onArea(self, flag: bool):
self._config(flag, js_type=False)
def style(self) -> 'OptionSeriesBarLabelStyle':
return self._config_sub_data('style', OptionSeriesBarLabelStyle)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False) |
.parametrize('type_sep, counter_sep', [('', ''), ('_', '_')])
def test_hungarian_notation_separators(type_sep: str, counter_sep: str):
true_value = LogicCondition.initialize_true(LogicCondition.generate_new_context())
ast = AbstractSyntaxTree(CodeNode(Assignment((var := Variable('var_0', I32)), Constant(0)), true_value), {})
_run_vng(ast, _generate_options(type_sep=type_sep, counter_sep=counter_sep))
assert (var.name == f'i{type_sep}Var{counter_sep}0') |
class OptionPlotoptionsCylinderStatesHover(Options):
def animation(self) -> 'OptionPlotoptionsCylinderStatesHoverAnimation':
return self._config_sub_data('animation', OptionPlotoptionsCylinderStatesHoverAnimation)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def brightness(self):
return self._config_get(0.1)
def brightness(self, num: float):
self._config(num, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
def describe_symbol(sym):
assert (type(sym) == symtable.Symbol)
print('Symbol:', sym.get_name())
for prop in ['referenced', 'imported', 'parameter', 'global', 'declared_global', 'local', 'free', 'assigned', 'namespace']:
if getattr(sym, ('is_' + prop))():
print(' is', prop) |
def extractPassivetranslationsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Hermione and the Genius Magic Formula', 'Hermione and the Genius Magic Formula', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class ShapeStar():
def __init__(self, points, indent=0.61):
self.coords = self._calc_coords(points, (1 - indent))
def _calc_coords(self, points, indent):
coords = []
from math import cos, pi, sin
step = (pi / points)
for i in range((2 * points)):
if (i % 2):
x = ((sin((step * i)) + 1) * 0.5)
y = ((cos((step * i)) + 1) * 0.5)
else:
x = (((sin((step * i)) * indent) + 1) * 0.5)
y = (((cos((step * i)) * indent) + 1) * 0.5)
coords.append((x, y))
return coords
def layout(self, cr, x, y, w, h):
points = [(((sx_sy[0] * w) + x), ((sx_sy[1] * h) + y)) for sx_sy in self.coords]
cr.move_to(*points[0])
for p in points[1:]:
cr.line_to(*p)
cr.close_path() |
def map_func(context):
json_recorder = JsonRecorder(context.from_java(), context.to_java())
try:
while True:
data = json_recorder.read_record()
print(context.index, 'data:', data)
sys.stdout.flush()
res = json_recorder.write_record(data)
print(context.index, 'res:', res)
sys.stdout.flush()
except Exception as e:
msg = traceback.format_exc()
print(msg) |
def format_custom_ner_examples(example: Dict):
text = example['text']
entities = example['entities']
extracted_entities = []
for entity in entities:
category = entity['category']
entity_name = entity['entity']
extracted_entities.append({'entity': entity_name, 'category': category})
result = f'''
Text:
{text}
Entities:
{', '.join(set([entity['category'] for entity in extracted_entities]))}
Output:
{{
"items":[
{', '.join([f"""{{"entity":"{entity['entity']}", "category":"{entity['category']}"}}""" for entity in extracted_entities])}
]
}}
'''
return result |
class Cell():
def __init__(self):
self._watchers = []
self._value = None
self.counter = 0
def add_watcher(self, cell):
self._watchers.append(cell)
def value(self):
return self._value
def value(self, new_value):
self._value = new_value
self.counter += 1
for cell in self._watchers:
cell.compute() |
def serialize_to_folder(pkgs: typing.List[str], settings: SerializationSettings, local_source_root: typing.Optional[str]=None, folder: str='.', options: typing.Optional[Options]=None):
if (folder is None):
folder = '.'
loaded_entities = serialize(pkgs, settings, local_source_root, options=options)
persist_registrable_entities(loaded_entities, folder) |
.parametrize(('input_data', 'expected_result'), [([('NX enabled', 1696, 0.89122), ('NX disabled', 207, 0.10878), ('Canary enabled', 9, 0.00473)], {'labels': ['NX enabled', 'NX disabled', 'Canary enabled'], 'datasets': [{'data': [1696, 207, 9], 'percentage': [0.89122, 0.10878, 0.00473], 'backgroundColor': ['#4062fa', '#f4c069', '#4062fa'], 'borderWidth': 0, 'links': 'null'}]}), ([()], None)])
def test_data_to_chart_with_value_percentage_pairs(input_data, expected_result):
assert (flt.data_to_chart_with_value_percentage_pairs(input_data) == expected_result) |
def test_machine_should_activate_initial_state():
class CampaignMachine(StateMachine):
producing = State()
closed = State(final=True)
draft = State(initial=True)
add_job = (draft.to(draft) | producing.to(producing))
produce = draft.to(producing)
deliver = producing.to(closed)
sm = CampaignMachine()
assert (sm.current_state == sm.draft)
assert sm.current_state.is_active |
class SurfaceConfig(ParameterConfig):
ncol: int
nrow: int
xori: float
yori: float
xinc: float
yinc: float
rotation: float
yflip: int
forward_init_file: str
output_file: Path
base_surface_path: str
def from_config_list(cls, surface: List[str]) -> Self:
options = option_dict(surface, 1)
name = surface[0]
init_file = options.get('INIT_FILES')
out_file = options.get('OUTPUT_FILE')
base_surface = options.get('BASE_SURFACE')
forward_init = str_to_bool(options.get('FORWARD_INIT', 'FALSE'))
errors = []
if (not out_file):
errors.append(ErrorInfo('Missing required OUTPUT_FILE').set_context(surface))
if (not init_file):
errors.append(ErrorInfo('Missing required INIT_FILES').set_context(surface))
elif ((not forward_init) and ('%d' not in init_file)):
errors.append(ErrorInfo('INIT_FILES must contain %d when FORWARD_INIT:FALSE').set_context(surface))
if (not base_surface):
errors.append(ErrorInfo('Missing required BASE_SURFACE').set_context(surface))
elif (not Path(base_surface).exists()):
errors.append(ErrorInfo(f'BASE_SURFACE:{base_surface} not found').set_context(surface))
if errors:
raise ConfigValidationError.from_collected(errors)
assert (init_file is not None)
assert (out_file is not None)
assert (base_surface is not None)
try:
surf = xtgeo.surface_from_file(base_surface, fformat='irap_ascii', dtype=np.float32)
except Exception as err:
raise ConfigValidationError.with_context(f'Could not load surface {base_surface!r}', surface) from err
return cls(ncol=surf.ncol, nrow=surf.nrow, xori=surf.xori, yori=surf.yori, xinc=surf.xinc, yinc=surf.yinc, rotation=surf.rotation, yflip=surf.yflip, name=name, forward_init=forward_init, forward_init_file=init_file, output_file=Path(out_file), base_surface_path=base_surface)
def __len__(self) -> int:
return (self.ncol * self.nrow)
def read_from_runpath(self, run_path: Path, real_nr: int) -> xr.Dataset:
file_name = self.forward_init_file
if ('%d' in file_name):
file_name = (file_name % real_nr)
file_path = (run_path / file_name)
if (not file_path.exists()):
raise ValueError(f'''Failed to initialize parameter '{self.name}' in file {file_name}: File not found
''')
surface = xtgeo.surface_from_file(file_path, fformat='irap_ascii', dtype=np.float32)
da = xr.DataArray(surface.values, name='values', dims=['x', 'y'])
return da.to_dataset()
def write_to_runpath(self, run_path: Path, real_nr: int, ensemble: EnsembleReader) -> None:
data = ensemble.load_parameters(self.name, real_nr)['values']
surf = xtgeo.RegularSurface(ncol=self.ncol, nrow=self.nrow, xori=self.xori, yori=self.yori, xinc=self.xinc, yinc=self.yinc, rotation=self.rotation, yflip=self.yflip, values=data.values)
file_path = (run_path / self.output_file)
file_path.parent.mkdir(exist_ok=True, parents=True)
surf.to_file(file_path, fformat='irap_ascii') |
def assert_decoder_with_cache_output_equals_hf(orig_model: DecoderModule, hf_model: 'transformers.AutoModel', torch_device: torch.device, atol: float, rtol: float, jit_method: JITMethod, with_torch_sdp=False):
X_jit = torch.randint(0, hf_model.config.vocab_size, (3, 5), device=torch_device)
mask_jit = torch.ones_like(X_jit, dtype=torch.bool)
with torch.no_grad():
cache_jit = orig_model(X_jit, AttentionMask(mask_jit), store_cache=True).cache
(model, output) = jit_method.convert(DecoderWithCache(orig_model), with_torch_sdp, X_jit, AttentionMask(torch.concat([mask_jit, mask_jit], dim=1)), cache_jit)
(_, n_heads, _, head_width) = cache_jit[0].key.shape
empty_kv_jit = torch.zeros((2, n_heads, 0, head_width), dtype=cache_jit[0].key.dtype, device=torch_device)
empty_cache_jit = ([KeyValueCache(empty_kv_jit, empty_kv_jit)] * hf_model.config.num_hidden_layers)
X = torch.randint(0, hf_model.config.vocab_size, (2, 10), device=torch_device)
mask = torch.ones_like(X, dtype=torch.bool)
X_rest = torch.randint(0, hf_model.config.vocab_size, (2, 10), device=torch_device)
mask_rest = torch.cat([mask, torch.ones_like(X_rest, dtype=torch.bool)], dim=1)
with torch.no_grad():
Y = model(X, AttentionMask(mask), empty_cache_jit)
Y_hf = hf_model(X, use_cache=True)
Y = output(model(X_rest, AttentionMask(mask_rest), cache=output(Y)[1]))[0][(- 1)]
Y_hf = hf_model(X_rest, past_key_values=Y_hf.past_key_values).last_hidden_state
torch_assertclose(Y, Y_hf, atol=atol, rtol=rtol) |
class DateEditorDemo(HasTraits):
single_date = Date()
multi_date = List(Date)
info_string = Str('The editors for Traits Date objects. Showing both the defaults, and one with alternate options.')
multi_select_editor = DateEditor(allow_future=False, multi_select=True, shift_to_select=False, on_mixed_select='max_change', months=2, padding=30)
traits_view = View(Item('info_string', show_label=False, style='readonly'), Group(Item('single_date', label='Simple date editor'), Item('single_date', style='custom', label='Default custom editor'), Item('single_date', style='readonly', editor=DateEditor(strftime='You picked %B %d %Y', message='Click a date above.'), label='ReadOnly editor'), label='Default settings for editors'), Group(Item('multi_date', editor=multi_select_editor, style='custom', label='Multi-select custom editor'), label='More customized editor: multi-select; disallow future; selection style; etc.'), resizable=True)
def _multi_date_changed(self):
print(self.multi_date)
def _simple_date_changed(self):
print(self.simple_date, self.single_date)
def _single_date_changed(self):
print(self.single_date) |
class Link(object):
swagger_types = {'deprecation': 'AlertEndPosition', 'href': 'Url', 'hreflang': 'str', 'name': 'str', 'profile': 'str', 'templated': 'bool', 'title': 'str', 'type': 'str'}
attribute_map = {'deprecation': 'deprecation', 'href': 'href', 'hreflang': 'hreflang', 'name': 'name', 'profile': 'profile', 'templated': 'templated', 'title': 'title', 'type': 'type'}
def __init__(self, deprecation=None, href=None, hreflang=None, name=None, profile=None, templated=None, title=None, type=None):
self._deprecation = None
self._href = None
self._hreflang = None
self._name = None
self._profile = None
self._templated = None
self._title = None
self._type = None
self.discriminator = None
if (deprecation is not None):
self.deprecation = deprecation
self.href = href
if (hreflang is not None):
self.hreflang = hreflang
if (name is not None):
self.name = name
if (profile is not None):
self.profile = profile
if (templated is not None):
self.templated = templated
if (title is not None):
self.title = title
if (type is not None):
self.type = type
def deprecation(self):
return self._deprecation
def deprecation(self, deprecation):
self._deprecation = deprecation
def href(self):
return self._href
def href(self, href):
if (href is None):
raise ValueError('Invalid value for `href`, must not be `None`')
self._href = href
def hreflang(self):
return self._hreflang
def hreflang(self, hreflang):
self._hreflang = hreflang
def name(self):
return self._name
def name(self, name):
self._name = name
def profile(self):
return self._profile
def profile(self, profile):
self._profile = profile
def templated(self):
return self._templated
def templated(self, templated):
self._templated = templated
def title(self):
return self._title
def title(self, title):
self._title = title
def type(self):
return self._type
def type(self, type):
self._type = type
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(Link, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, Link)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
def main():
try:
options = parse_options()
setup_logging(options)
scan_to_graph(['tomate'], graph)
app = Application.from_graph(graph)
app.Run()
if app.IsRunning():
Gdk.notify_startup_complete()
except Exception as ex:
logger.error(ex, exc_info=True)
raise ex |
def instantiate_keystore(derivation_type: DerivationType, data: Dict[(str, Any)], parent_keystore: Optional[KeyStore]=None, row: Optional[MasterKeyRow]=None) -> KeyStore:
keystore: KeyStore
if (derivation_type == DerivationType.BIP32):
keystore = BIP32_KeyStore(data, row, parent_keystore)
elif (derivation_type == DerivationType.HARDWARE):
assert (parent_keystore is None)
keystore = app_state.device_manager.create_keystore(data, row)
elif (derivation_type == DerivationType.ELECTRUM_MULTISIG):
assert (parent_keystore is None)
keystore = Multisig_KeyStore(data, row)
elif (derivation_type == DerivationType.ELECTRUM_OLD):
assert (parent_keystore is None)
keystore = Old_KeyStore(data, row)
else:
raise Exception(_('unknown masterkey type {}:{}').format((row.masterkey_id if (row is not None) else None), derivation_type))
return keystore |
def moods(request):
avatar_list = Avatars.objects.all()
key = request.GET.get('key', '')
mood_list = Moods.objects.filter(content__contains=key).order_by('-create_date')
query_params = request.GET.copy()
pager = Pagination(current_page=request.GET.get('page'), all_count=mood_list.count(), base_url=request.path_info, query_params=query_params, per_page=5, pager_page_count=7)
mood_list = mood_list[pager.start:pager.end]
return render(request, 'moods.html', locals()) |
class DomainSerializer(s.ConditionalDCBoundSerializer):
_model_ = Domain
_update_fields_ = ('owner', 'access', 'desc', 'dc_bound', 'type')
_default_fields_ = ('name', 'owner', 'type')
_blank_fields_ = frozenset({'desc'})
name_changed = None
name = s.RegexField('^[A-Za-z0-9][A-Za-z0-9\\._/-]*$', max_length=253, min_length=3)
type = s.ChoiceField(choices=Domain.TYPE_MASTER, default=Domain.MASTER)
owner = s.SlugRelatedField(slug_field='username', queryset=User.objects)
access = s.IntegerChoiceField(choices=Domain.ACCESS, default=Domain.PRIVATE)
desc = s.SafeCharField(max_length=128, required=False)
created = s.DateTimeField(read_only=True, required=False)
def __init__(self, request, domain, *args, **kwargs):
super(DomainSerializer, self).__init__(request, domain, *args, **kwargs)
if (not kwargs.get('many', False)):
self._dc_bound = domain.dc_bound
def _normalize(self, attr, value):
if (attr == 'dc_bound'):
if isinstance(self._dc_bound, Dc):
self._dc_bound = self._dc_bound.id
return self._dc_bound
return super(DomainSerializer, self)._normalize(attr, value)
def validate_name(self, attrs, source):
try:
value = attrs[source].lower()
except KeyError:
pass
else:
attrs[source] = value
if self.object.pk:
if (self.object.name == value):
return attrs
else:
self.name_changed = self.object.name
validate_dns_name(value)
return attrs |
class Crawler(object):
def run(self, resource):
raise NotImplementedError('The run function of the crawler')
def visit(self, resource):
raise NotImplementedError('The visit function of the crawler')
def dispatch(self, callback):
raise NotImplementedError('The dispatch function of the crawler')
def get_client(self):
raise NotImplementedError('The get_client function of the crawler') |
class OptionSeriesWindbarbSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def handle_pandas(values: List[Dict[(str, Union[(int, str, NotebookNode)])]]) -> List[Tuple[(int, str)]]:
output = []
for value in values:
index = int(value['index'])
data = str(value['data'])
df = pd.read_html(data, flavor='lxml')
md_df = df[0]
for column in md_df.columns:
if column.startswith('Unnamed'):
md_df.rename(columns={column: ''}, inplace=True)
mdx = ''
if isinstance(md_df.index, pd.RangeIndex):
mdx = md_df.to_markdown(index=False)
elif (not isinstance(md_df.index, pd.RangeIndex)):
mdx = md_df.to_markdown()
output.append((index, f'''
{mdx}
'''))
return output |
def extractJawzPublications(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if (('Zectas' in item['tags']) and vol and chp):
return buildReleaseMessageWithType(item, 'Zectas', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if (('LMS' in item['tags']) and vol and chp):
return buildReleaseMessageWithType(item, 'Legendary Moonlight Sculptor', vol, chp, frag=frag, postfix=postfix)
return False |
class BookmarksManager():
__PATH = os.path.join(xdg.get_data_dirs()[0], 'bookmarklist.dat')
def __init__(self):
self.__db_file_lock = threading.RLock()
self.__bookmarks = []
self.menu = None
self.delete_menu = None
self.__setup_menu()
self.__load_db()
def __setup_menu(self):
self.menu = menu.Menu(self)
self.delete_menu = menu.Menu(self)
def factory_factory(display_name, icon_name, callback=None, submenu=None):
def factory(_menu, _parent, _context):
item = Gtk.ImageMenuItem.new_with_mnemonic(display_name)
image = Gtk.Image.new_from_icon_name(icon_name, size=Gtk.IconSize.MENU)
item.set_image(image)
if (callback is not None):
item.connect('activate', callback)
if (submenu is not None):
item.set_submenu(submenu)
if (len(self.__bookmarks) == 0):
item.set_sensitive(False)
return item
return factory
items = []
items.append(_smi('bookmark', [], _('_Bookmark This Track'), 'bookmark-new', self.__on_add_bookmark))
delete_cb = factory_factory(_('_Delete Bookmark'), 'gtk-close', submenu=self.delete_menu)
items.append(menu.MenuItem('delete', delete_cb, ['bookmark']))
clear_cb = factory_factory(_('_Clear Bookmarks'), 'gtk-clear', callback=self.__clear_bookmarks)
items.append(menu.MenuItem('clear', clear_cb, ['delete']))
items.append(_sep('sep', ['clear']))
for item in items:
self.menu.add_item(item)
def __on_add_bookmark(self, _widget, _name, _foo, _bookmarks_manager):
self.__add_bookmark()
def __add_bookmark(self, path=None, time=None, save_db=True):
if (not self.menu):
return
bookmark = Bookmark(self.menu, self.delete_menu, self.__delete_bookmark, path, time)
self.__bookmarks.append(bookmark)
if save_db:
self.__save_db()
def __clear_bookmarks(self, _widget):
for bookmark in self.__bookmarks:
self.delete_menu.remove_item(bookmark.get_menu_item())
self.menu.remove_item(bookmark.get_menu_item())
self.__bookmarks = []
self.__save_db()
def __delete_bookmark(self, _widget, bookmark):
self.__bookmarks.remove(bookmark)
self.delete_menu.remove_item(bookmark.get_menu_item())
self.menu.remove_item(bookmark.get_menu_item())
self.__save_db()
def __load_db(self):
with self.__db_file_lock:
if (not os.path.exists(self.__PATH)):
LOGGER.info('Bookmarks file does not exist yet.')
return
try:
with open(self.__PATH, 'r') as bm_file:
bookmarks = json.load(bm_file)
self.__load_db_callback(bookmarks)
except IOError as err:
LOGGER.error('BM: could not open file: %s', err.strerror)
_add()
def __load_db_callback(self, loaded_bookmarks):
if (not self.menu):
return
for (key, pos) in loaded_bookmarks:
self.__add_bookmark(key, pos, save_db=False)
def __save_db(self):
bookmarks = copy.copy(self.__bookmarks)
thread = threading.Thread(target=self.__do_save_db, args=[bookmarks])
thread.daemon = False
thread.start()
def __do_save_db(self, bookmarks):
with self.__db_file_lock:
with open(self.__PATH, 'w') as bm_file:
json.dump(bookmarks, bm_file, indent=2, default=Bookmark.serialize_bookmark)
LOGGER.debug('saved %d bookmarks', len(bookmarks)) |
class ConvRecordEntry():
exec_entry: str
exec_entry_sha1: str
dtype_a: int
dtype_b: int
dtype_c: int
dtype_acc: int
major_a: int
major_b: int
major_c: int
kh: int
kw: int
co: int
strideh: int
stridew: int
padh: int
padw: int
dilateh: int
dilatew: int
op_type: str
epilogue: int
device: str
algo: str
workspace: int
split_k: int |
([Output('modal-power-curve-chart', 'figure'), Output('modal-power-curve-card', 'style')], [Input('modal-activity-id-type-metric', 'children')], [State('activity-modal', 'is_open')])
def modal_power_curve(activity, is_open):
if (activity and is_open):
activity_id = activity.split('|')[0]
activity_type = activity.split('|')[1]
metric = activity.split('|')[2]
if (metric == 'power_zone'):
figure = power_curve(last_id=activity_id, activity_type=activity_type)
return (figure, {'height': '100%'})
else:
return ({}, {'display': 'None'})
else:
return ({}, {'display': 'None'}) |
class DynamicLaunchPlanCommand(click.RichCommand):
def __init__(self, name: str, h: str, lp_name: str, **kwargs):
super().__init__(name=name, help=h, **kwargs)
self._lp_name = lp_name
self._lp = None
def _fetch_launch_plan(self, ctx: click.Context) -> FlyteLaunchPlan:
if self._lp:
return self._lp
run_level_params: RunLevelParams = ctx.obj
r = run_level_params.remote_instance()
self._lp = r.fetch_launch_plan(run_level_params.project, run_level_params.domain, self._lp_name)
return self._lp
def _get_params(self, ctx: click.Context, inputs: typing.Dict[(str, Variable)], native_inputs: typing.Dict[(str, type)], fixed: typing.Dict[(str, Literal)], defaults: typing.Dict[(str, Parameter)]) -> typing.List['click.Parameter']:
params = []
flyte_ctx = context_manager.FlyteContextManager.current_context()
for (name, var) in inputs.items():
if (fixed and (name in fixed)):
continue
required = True
if (defaults and (name in defaults)):
required = False
params.append(to_click_option(ctx, flyte_ctx, name, var, native_inputs[name], None, required))
return params
def get_params(self, ctx: click.Context) -> typing.List['click.Parameter']:
if (not self.params):
self.params = []
lp = self._fetch_launch_plan(ctx)
if lp.interface:
if lp.interface.inputs:
types = TypeEngine.guess_python_types(lp.interface.inputs)
self.params = self._get_params(ctx, lp.interface.inputs, types, lp.fixed_inputs.literals, lp.default_inputs.parameters)
return super().get_params(ctx)
def invoke(self, ctx: click.Context) -> typing.Any:
run_level_params: RunLevelParams = ctx.obj
r = run_level_params.remote_instance()
lp = self._fetch_launch_plan(ctx)
run_remote(r, lp, run_level_params.project, run_level_params.domain, ctx.params, run_level_params, type_hints=(lp.python_interface.inputs if lp.python_interface else None)) |
class AMX_TILE(StaticMemory):
NUM_AMX_TILES = 8
StaticMemory.init_state(NUM_AMX_TILES)
tile_dict = {}
def reset_allocations(cls):
cls.init_state(cls.NUM_AMX_TILES)
cls.tile_dict = {}
def global_(cls):
return '#include <immintrin.h>'
def can_read(cls):
return False
def alloc(cls, new_name, prim_type, shape, srcinfo):
if (not (shape[0].isdecimal() and (int(shape[0]) <= 16))):
raise MemGenError('Number of tile rows must be a constant and <= 16.')
ctype_size = {'float': 4, 'double': 8, 'int8_t': 1, 'int32_t': 4, 'int_fast32_t': 4}
if (not (shape[1].isdecimal() and ((int(shape[1]) * ctype_size[prim_type]) <= 64))):
raise MemGenError(f'Number of bytes per row must be a constant and <= 64, currently trying to allocate {(int(shape[1]) * ctype_size[prim_type])} bytes per row.')
tile_num = cls.find_free_chunk()
cls.mark(tile_num)
cls.tile_dict[new_name] = tile_num
return f'#define {new_name} {tile_num}'
def free(cls, new_name, prim_type, shape, srcinfo):
tile_num = cls.tile_dict[new_name]
del cls.tile_dict[new_name]
cls.unmark(tile_num)
return f'#undef {new_name}' |
class EventTrigger(_typing.TypedDict):
eventFilters: _typing_extensions.NotRequired[dict[(str, (str | _params.Expression[str]))]]
eventFilterPathPatterns: _typing_extensions.NotRequired[dict[(str, (str | _params.Expression[str]))]]
channel: _typing_extensions.NotRequired[str]
eventType: _typing_extensions.Required[str]
retry: _typing_extensions.Required[((bool | _params.Expression[bool]) | _util.Sentinel)] |
class DataView(OverlayPlotContainer):
orientation = Enum('h', 'v')
default_origin = Enum('bottom left', 'top left', 'bottom right', 'top right')
origin = Property(observe='default_origin')
index_mapper = Instance(Base1DMapper)
value_mapper = Instance(Base1DMapper)
index_scale = Enum('linear', 'log')
value_scale = Enum('linear', 'log')
index_range = Property
value_range = Property
range2d = Instance(DataRange2D)
x_mapper = OrientedMapperProperty
y_mapper = OrientedMapperProperty
x_axis = Instance(AbstractOverlay)
y_axis = Instance(AbstractOverlay)
x_grid = Instance(PlotGrid)
y_grid = Instance(PlotGrid)
auto_axis = Bool(True)
auto_grid = Bool(True)
index_axis = AxisProperty
value_axis = AxisProperty
index_grid = GridProperty
value_grid = GridProperty
bgcolor = 'white'
padding_top = Property(observe='y_axis.[title,orientation], x_axis.[title,orientation]')
padding_bottom = Property(observe='y_axis.[title,orientation], x_axis.[title,orientation]')
padding_left = Property(observe='y_axis.[title,orientation], x_axis.[title,orientation]')
padding_right = Property(observe='y_axis.[title,orientation], x_axis.[title,orientation]')
_padding_top = Optional(Int())
_padding_bottom = Optional(Int())
_padding_left = Optional(Int())
_padding_right = Optional(Int())
def _find_padding(self, side):
SIDE_TO_TRAIT_MAP = {'top': '_padding_top', 'bottom': '_padding_bottom', 'left': '_padding_left', 'right': '_padding_right'}
if (getattr(self, SIDE_TO_TRAIT_MAP[side]) is not None):
return getattr(self, SIDE_TO_TRAIT_MAP[side])
else:
if self.y_axis:
if (self.y_axis.title and (self.y_axis.orientation == side)):
return 80
if self.x_axis:
if (self.x_axis.title and (self.x_axis.orientation == side)):
return 80
return 50
def _get_padding_top(self):
return self._find_padding('top')
def _get_padding_bottom(self):
return self._find_padding('bottom')
def _get_padding_left(self):
return self._find_padding('left')
def _get_padding_right(self):
return self._find_padding('right')
def _set_padding_top(self, value):
self._padding_top = value
def _set_padding_bottom(self, value):
self._padding_bottom = value
def _set_padding_left(self, value):
self._padding_left = value
def _set_padding_right(self, value):
self._padding_right = value
border_visible = True
def __init__(self, **kwtraits):
super().__init__(**kwtraits)
self._init_components()
if (self.resizable == ''):
self._update_mappers()
def map_screen(self, data_array):
if (len(data_array) == 0):
return empty(shape=(0, 2))
(x_ary, y_ary) = transpose(data_array)
sx = self.index_mapper.map_screen(x_ary)
sy = self.value_mapper.map_screen(y_ary)
if (self.orientation == 'h'):
return transpose(array((sx, sy)))
else:
return transpose(array((sy, sx)))
def map_data(self, screen_pt):
(x, y) = screen_pt
return array((self.index_mapper.map_data(x), self.value_mapper.map_data(y)))
def _init_components(self):
if (not self.range2d):
self.range2d = DataRange2D()
if (not self.index_mapper):
if (self.index_scale == 'linear'):
imap = LinearMapper(range=self.range2d.x_range)
else:
imap = LogMapper(range=self.range2d.x_range)
self.index_mapper = imap
if (not self.value_mapper):
if (self.value_scale == 'linear'):
vmap = LinearMapper(range=self.range2d.y_range)
else:
vmap = LogMapper(range=self.range2d.y_range)
self.value_mapper = vmap
grid_color = 'lightgray'
if (color_table[self.bgcolor] == color_table[grid_color]):
grid_color = 'white'
if ((not self.x_grid) and self.auto_grid):
self.x_grid = PlotGrid(mapper=self.x_mapper, orientation='vertical', line_color=grid_color, line_style='dot', component=self)
if ((not self.y_grid) and self.auto_grid):
self.y_grid = PlotGrid(mapper=self.y_mapper, orientation='horizontal', line_color=grid_color, line_style='dot', component=self)
if ((not self.x_axis) and self.auto_axis):
self.x_axis = PlotAxis(mapper=self.x_mapper, orientation='bottom', component=self)
if ((not self.y_axis) and self.auto_axis):
self.y_axis = PlotAxis(mapper=self.y_mapper, orientation='left', component=self)
def _update_mappers(self):
x = self.x
x2 = self.x2
y = self.y
y2 = self.y2
if (self.x_mapper is not None):
if ('left' in self.origin):
self.x_mapper.low_pos = x
self.x_mapper.high_pos = x2
else:
self.x_mapper.low_pos = x2
self.x_mapper.high_pos = x
if (self.y_mapper is not None):
if ('bottom' in self.origin):
self.y_mapper.low_pos = y
self.y_mapper.high_pos = y2
else:
self.y_mapper.low_pos = y2
self.y_mapper.high_pos = y
self.invalidate_draw()
def _bounds_changed(self, old, new):
super()._bounds_changed(old, new)
self._update_mappers()
def _bounds_items_changed(self, event):
super()._bounds_items_changed(event)
self._update_mappers()
def _position_changed(self, old, new):
super()._position_changed(old, new)
self._update_mappers()
def _position_items_changed(self, event):
super()._position_items_changed(event)
self._update_mappers()
def _origin_changed(self):
self._update_mappers()
def _orientation_changed(self):
if (self.x_grid is not None):
self.x_grid.mapper = self.x_mapper
if (self.y_grid is not None):
self.y_grid.mapper = self.y_mapper
if (self.x_axis is not None):
self.x_axis.mapper = self.x_mapper
if (self.y_axis is not None):
self.y_axis.mapper = self.y_mapper
self._update_mappers()
for renderer in self.components:
if hasattr(renderer, 'orientation'):
renderer.orientation = self.orientation
def _index_mapper_changed(self, old, new):
if (new is not None):
if (new.range is not None):
for source in new.range.sources:
self.index_range.add(source)
new.range = self.index_range
if self.index_axis:
self.index_axis.mapper = new
if self.index_grid:
self.index_grid.mapper = new
def _value_mapper_changed(self, old, new):
if (new is not None):
if (new.range is not None):
for source in new.range.sources:
self.value_range.add(source)
new.range = self.value_range
if self.value_axis:
self.value_axis.mapper = new
if self.value_grid:
self.value_grid.mapper = new
def _bgcolor_changed(self):
self.invalidate_draw()
def _x_grid_changed(self, old, new):
self._underlay_change_helper(old, new)
def _y_grid_changed(self, old, new):
self._underlay_change_helper(old, new)
def _x_axis_changed(self, old, new):
self._underlay_change_helper(old, new)
def _y_axis_changed(self, old, new):
self._underlay_change_helper(old, new)
def _underlay_change_helper(self, old, new):
if (old in self.underlays):
self.underlays.remove(old)
if (new is not None):
self.underlays.append(new)
def _overlay_change_helper(self, old, new):
if (old in self.overlays):
self.overlays.remove(old)
if (new is not None):
self.overlays.append(new)
def _range2d_changed(self, old, new):
if (new is not None):
if (self.index_mapper is not None):
self.index_mapper.range = new.x_range
if (self.value_mapper is not None):
self.value_mapper.range = new.y_range
else:
self.index_mapper.range = None
self.value_mapper.range = None
if (old is not None):
for datasource in old.sources[:]:
old.remove(datasource)
if (new is not None):
new.add(datasource)
for renderer in self.components:
if hasattr(renderer, 'range2d'):
renderer.range2d = new
elif isinstance(renderer, Base2DPlot):
renderer.index_range = new
else:
if hasattr(renderer, 'index_range'):
setattr(renderer, 'index_range', self.index_range)
if hasattr(renderer, 'value_range'):
setattr(renderer, 'value_range', self.value_range)
def _range2d_default(self):
return DataRange2D()
def _get_index_range(self):
return self.range2d.x_range
def _set_index_range(self, newrange):
self._handle_range_changed('index', self.range2d.x_range, newrange)
self.range2d.x_range = newrange
def _get_value_range(self):
return self.range2d.y_range
def _set_value_range(self, newrange):
self._handle_range_changed('value', self.range2d.y_range, newrange)
self.range2d.y_range = newrange
def _handle_range_changed(self, name, old, new):
mapper = getattr(self, (name + '_mapper'))
if (mapper.range == old):
mapper.range = new
if (old is not None):
for datasource in old.sources[:]:
old.remove(datasource)
if (new is not None):
new.add(datasource)
range_name = (name + '_range')
for renderer in self.components:
if hasattr(renderer, range_name):
setattr(renderer, range_name, new)
def _get_origin(self):
return self.default_origin |
class Test_UBIFS_Unpacker(TestUnpackerBase):
def test_unpacker_selection_generic(self):
self.check_unpacker_selection('filesystem/ubifs', 'UBIFS')
def test_extraction(self):
self.check_unpacking_of_standard_unpack_set(os.path.join(TEST_DATA_DIR, 'test.ubifs'), additional_prefix_folder='') |
class OptionSeriesNetworkgraphSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
(scope='function')
def access_manual_webhook(db, integration_manual_webhook_config) -> ConnectionConfig:
manual_webhook = AccessManualWebhook.create(db=db, data={'connection_config_id': integration_manual_webhook_config.id, 'fields': [{'pii_field': 'email', 'dsr_package_label': 'email', 'data_categories': ['user.contact.email']}, {'pii_field': 'Last Name', 'dsr_package_label': 'last_name', 'data_categories': ['user.name']}]})
(yield manual_webhook)
try:
manual_webhook.delete(db)
except ObjectDeletedError:
pass |
class TestDocClassificationDataModule(testslide.TestCase):
def setUp(self) -> None:
super().setUp()
self.patcher = patch('torchdata.datapipes.iter.util.cacheholder._hash_check', return_value=True)
self.patcher.start()
def tearDown(self) -> None:
self.patcher.stop()
super().tearDown()
def get_datamodule(self) -> DocClassificationDataModule:
(train_dataset, val_dataset, test_dataset) = SST2(root=_DATA_DIR_PATH)
text_transform = DocClassificationTextTransform(vocab_path=get_asset_path('vocab_example.pt'), spm_model_path=get_asset_path('spm_example.model'))
label_transform = LabelToIndex(label_names=['0', '1'])
return DocClassificationDataModule(train_dataset=train_dataset, val_dataset=val_dataset, test_dataset=val_dataset, transform=text_transform, label_transform=label_transform, columns=['text', 'label'], label_column='label', batch_size=8)
def test_doc_classification_datamodule(self) -> None:
datamodule = self.get_datamodule()
self.assertIsInstance(datamodule, DocClassificationDataModule)
dataloader = datamodule.train_dataloader()
batch = next(iter(dataloader))
self.assertTrue(torch.is_tensor(batch['label_ids']))
self.assertTrue(torch.is_tensor(batch['token_ids']))
self.assertEqual(batch['label_ids'].size(), torch.Size([8]))
self.assertEqual(batch['token_ids'].size(), torch.Size([8, 35])) |
class TestParseAmass():
def setup_method(self):
self.tmp_path = Path(tempfile.mkdtemp())
self.scan = ParseAmassOutput(target_file=__file__, results_dir=str(self.tmp_path), db_location=str((self.tmp_path / 'testing.sqlite')))
self.scan.input = (lambda : luigi.LocalTarget(amass_json))
self.scan.run()
def teardown_method(self):
shutil.rmtree(self.tmp_path)
def test_scan_requires(self):
with patch('pipeline.recon.AmassScan'):
retval = self.scan.requires()
assert isinstance(retval, AmassScan)
def test_scan_results(self):
assert self.scan.output().exists()
def test_scan_creates_results_dir(self):
assert self.scan.results_subfolder.exists()
def test_scan_creates_database(self):
assert self.scan.db_mgr.location.exists()
assert ((self.tmp_path / 'testing.sqlite') == self.scan.db_mgr.location) |
class ClientKey(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'client_key': (str,), 'signature': (str,), 'expires_at': (datetime, none_type)}
_property
def discriminator():
return None
attribute_map = {'client_key': 'client_key', 'signature': 'signature', 'expires_at': 'expires_at'}
read_only_vars = {'expires_at'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class _S3STSToken(BaseModel):
cloud_path: str = Field(alias='cloudpath')
user_credential: _UserCredential = Field(alias='userCredentials')
def get_bucket(self) -> str:
r = urllib.parse.urlparse(self.cloud_path)
return r.netloc
def get_s3_key(self) -> str:
r = urllib.parse.urlparse(self.cloud_path)
return r.path[1:]
def get_client(self) -> boto3.client:
return boto3.client('s3', region_name=Env.current.s3_region, aws_access_key_id=self.user_credential.access_key_id, aws_secret_access_key=self.user_credential.secret_access_key, aws_session_token=self.user_credential.session_token, verify=Env.current.ssl_verify)
def is_expired(self) -> bool:
return ((self.user_credential.expiration - datetime.now(tz=self.user_credential.expiration.tzinfo)).total_seconds() < 300) |
class TestHistoryBuffer(unittest.TestCase):
def setUp(self) -> None:
super().setUp()
np.random.seed(42)
def create_buffer_with_init(num_values: int, buffer_len: int=1000000) -> typing.Callable[([], typing.Union[(object, np.ndarray)])]:
max_value = 1000
values: np.ndarray = np.random.randint(max_value, size=num_values)
def create_buffer() -> typing.Union[(object, np.ndarray)]:
buf = HistoryBuffer(buffer_len)
for v in values:
buf.update(v)
return (buf, values)
return create_buffer
def test_buffer(self) -> None:
num_iters = 100
for _ in range(num_iters):
gt_len = 1000
buffer_len = np.random.randint(1, gt_len)
create_buffer = TestHistoryBuffer.create_buffer_with_init(gt_len, buffer_len)
(buf, gt) = create_buffer()
(values, iterations) = zip(*buf.values())
self.assertEqual(len(values), buffer_len)
self.assertEqual(len(iterations), buffer_len)
self.assertTrue((values == gt[(- buffer_len):]).all())
iterations_gt = np.arange((gt_len - buffer_len), gt_len)
self.assertTrue((iterations == iterations_gt).all(), ', '.join((str(x) for x in iterations)))
self.assertAlmostEqual(buf.global_avg(), gt.mean())
w = 100
effective_w = min(w, buffer_len)
self.assertAlmostEqual(buf.median(w), np.median(gt[(- effective_w):]), None, ' '.join((str(x) for x in gt[(- effective_w):])))
self.assertAlmostEqual(buf.avg(w), np.mean(gt[(- effective_w):]), None, ' '.join((str(x) for x in gt[(- effective_w):]))) |
def test_lower_dimension_custom_medium_to_gds(tmp_path):
geometry = td.Box(size=(2, 0, 2))
(nx, nz) = (100, 80)
x = np.linspace(0, 2, nx)
y = np.array([0.0])
z = np.linspace((- 1), 1, nz)
f = np.array([td.C_0])
(mx, my, mz, _) = np.meshgrid(x, y, z, f, indexing='ij', sparse=True)
data = (1 + (1 / ((1 + ((mx - 1) ** 2)) + (mz ** 2))))
eps_diagonal_data = td.ScalarFieldDataArray(data, coords=dict(x=x, y=y, z=z, f=f))
eps_components = {f'eps_{d}{d}': eps_diagonal_data for d in 'xyz'}
eps_dataset = td.PermittivityDataset(**eps_components)
medium = td.CustomMedium(eps_dataset=eps_dataset, name='my_medium')
structure = td.Structure(geometry=geometry, medium=medium)
fname = str((tmp_path / 'structure-custom-y.gds'))
structure.to_gds_file(fname, y=0, permittivity_threshold=1.5, frequency=td.C_0)
cell = gdstk.read_gds(fname).cells[0]
assert np.allclose(cell.area(), (np.pi / 2), atol=0.03) |
def get_user(username, password):
with session_scope() as session:
if (session.query(User).count() == 0):
user = User(username=config.username, is_admin=True)
user.set_password(password=config.password)
session.add(user)
user = session.query(User).filter((User.username == username)).one_or_none()
if (user is not None):
if user.check_password(password=password):
session.expunge_all()
return user
return None |
class OptionPlotoptionsArearangeSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def get_response_with_params(zone_key: str, url, session: (Session | None)=None, params=None):
ses = (session or Session())
response: Response = ses.get(url, params=params)
if (response.status_code != 200):
raise ParserException(zone_key, f'Response code: {response.status_code}')
return response |
def comparetime(tstr):
result = True
try:
tstr2 = tstr.replace(':', ',')
tstr2 = tstr2.replace('==', '=')
sides = tstr2.split('=')
tleft = sides[0].split(',')
tright = sides[1].split(',')
tleft[0] = tleft[0].lower()
tright[0] = tright[0].lower()
l1 = len(tleft)
l2 = len(tright)
if (l2 < l1):
l1 = l2
for t in range(l1):
if (('all' not in tright[t]) and ('**' not in tright[t])):
if (str(tright[t]).strip() != str(tleft[t]).strip()):
result = False
break
except:
result = False
return result |
class ArrayAccessDetection(PipelineStage):
name = 'array-access-detection'
def __init__(self):
self._candidates: DefaultDict[(Variable[Pointer], List[Candidate])]
self._candidate_offset_classes: Dict[(Variable, OffsetInfo)]
def run(self, task: DecompilerTask) -> None:
if (not task.options.getboolean('array-access-detection.enabled')):
return
self._candidates = defaultdict(list)
self._candidate_offset_classes = defaultdict(OffsetInfo)
for instr in task.graph.instructions:
for dereference in self._find_dereference_subexpressions(instr):
self._add_possible_array_element_access_candidates(dereference)
self._mark_candidates_as_array_element_accesses()
def _add_possible_array_element_access_candidates(self, candidate: UnaryOperation) -> None:
operand = candidate.operand
if self._is_addition(operand):
(base, offset) = self._get_base_and_offset(operand)
if (base and offset):
if ((offset_details := self._parse_offset(offset)) is not None):
(offset_class, index, element_size) = offset_details
self._candidates[base].append(Candidate(candidate, array_index=index, array_base=base))
self._update_candidate_offsets(base, offset_class, element_size)
def _mark_candidates_as_array_element_accesses(self) -> None:
for (base, offset_class) in self._candidate_offset_classes.items():
array_type_size = self._get_array_type_size(base)
self._mark_candidates_if_consistent_offsets(base, offset_class, array_type_size)
def _get_array_type_size(self, base: Variable) -> int:
array_type = base.type
if ((array_type.type == CustomType.void()) or (array_type.type == UnknownType()) or (array_type.size == 0)):
return 0
return self._size_in_bytes(array_type.type.size)
def _mark_candidates_if_consistent_offsets(self, base: Variable, offset_class: OffsetInfo, available_array_type_size: Optional[int]=None) -> None:
mul = offset_class.mul
const = offset_class.const
var = offset_class.var
if (not self._is_valid_offset_class_combination(var, mul)):
return
if (len(var) == 1):
computed_element_size = var.pop()
else:
computed_element_size = mul.pop()
if (available_array_type_size and (computed_element_size != available_array_type_size)):
return
if (not const):
self._set_array_element_access_attributes(base, computed_element_size, available_array_type_size)
return
if all([((constant % computed_element_size) == 0) for constant in const]):
self._set_array_element_access_attributes(base, computed_element_size, available_array_type_size)
def _set_array_element_access_attributes(self, base_variable: Variable, element_size: int, available_array_type_size: Optional[int]) -> None:
for candidate in self._candidates[base_variable]:
confidence = False
index = candidate.array_index
if isinstance(index, int):
index = int((index / element_size))
if available_array_type_size:
confidence = True
array_info = ArrayInfo(base_variable, index, confidence)
candidate.dereference.array_info = array_info
def _get_base_and_offset(self, operand: BinaryOperation) -> Tuple[(Optional[Variable[Pointer]], Optional[Expression])]:
left = operand.left
right = operand.right
base = None
offset = None
if self._is_pointer_variable(left):
base = left
offset = right
elif self._is_pointer_variable(right):
base = right
offset = left
return (base, offset)
def _parse_offset(self, offset: Expression) -> Optional[Tuple[(str, Variable, int)]]:
if isinstance(offset, Constant):
return ('const', offset.value, offset.value)
if (isinstance(offset, Variable) and (not isinstance(offset.type, Pointer))):
return ('var', offset, 1)
if self._is_variable_cast(offset):
return ('var', offset.operand, 1)
if (not isinstance(offset, BinaryOperation)):
return None
constants = [expr for expr in offset if isinstance(expr, Constant)]
if (len(constants) != 1):
return None
constant = constants[0]
vars = [expr for expr in offset if (isinstance(expr, Variable) or self._is_variable_cast(expr))]
if (len(vars) == 0):
return None
var = (vars[0] if isinstance(vars[0], Variable) else vars[0].operand)
if (self._is_left_shift(offset) and (offset.right == constant)):
return ('mul', var, (2 ** constant.value))
if (self._is_multiplication(offset) and ((constant.value % 2) == 0)):
return ('mul', var, constant.value)
return None
def _update_candidate_offsets(self, base: Variable, offset_class: str, offset_value: int) -> None:
if (base not in self._candidate_offset_classes):
self._candidate_offset_classes[base] = OffsetInfo()
if (offset_class == 'mul'):
self._candidate_offset_classes[base].mul.add(offset_value)
elif (offset_class == 'const'):
self._candidate_offset_classes[base].const.add(offset_value)
elif (offset_class == 'var'):
self._candidate_offset_classes[base].var.add(offset_value)
else:
logging.warning(f'Unknown offset class {offset_class}')
def _is_pointer_variable(expression: Expression) -> bool:
return (isinstance(expression, Variable) and isinstance(expression.type, Pointer))
def _find_dereference_subexpressions(expression: DataflowObject) -> Iterator[UnaryOperation]:
all_subexpressions = [expression]
while (all_subexpressions and (subexpression := all_subexpressions.pop())):
all_subexpressions.extend(subexpression)
if (isinstance(subexpression, UnaryOperation) and (subexpression.operation == OperationType.dereference)):
(yield subexpression)
def _is_addition(expression: Expression) -> bool:
return (isinstance(expression, BinaryOperation) and (expression.operation == OperationType.plus))
def _is_multiplication(expression: Expression) -> bool:
return (isinstance(expression, BinaryOperation) and (expression.operation in {OperationType.multiply_us, OperationType.multiply}))
def _is_left_shift(expression: Expression) -> bool:
return (isinstance(expression, BinaryOperation) and (expression.operation == OperationType.left_shift))
def _size_in_bytes(size: int) -> int:
if (size == 1):
raise RuntimeError(f'Unexpected size {size}')
return int((size / BYTE_SIZE))
def _is_valid_offset_class_combination(var_offsets: Set[int], mul_offsets: Set[int]) -> bool:
return (((len(var_offsets) == 1) and (not mul_offsets)) ^ ((len(mul_offsets) == 1) and (not var_offsets)))
def _is_variable_cast(expression: Expression) -> bool:
return (isinstance(expression, UnaryOperation) and (expression.operation == OperationType.cast) and (expression.type in {Integer.int32_t(), Integer.uint32_t(), Integer.int64_t(), Integer.uint64_t()}) and isinstance(expression.operand, Variable)) |
def test_approval_request():
testutil.add_response('login_response_200')
testutil.add_response('api_version_response_200')
testutil.add_response('approval_request_response_200')
client = testutil.get_client()
body = {'requests': [{'actionType': 'Submit', 'contextId': 'ueBV', 'nextApproverIds': ['j3h2'], 'comments': 'this is a test', 'contextActorId': 'SlY', 'processDefinitionNameOrId': 'test_account', 'skipEntryCriteria': 'true'}]}
ar = client.approvals(body)
req_response = ar[0]
request_status = req_response[0].get('instanceStatus', None)
assert (ar[1].status == 200)
assert (ar[1]. is 'POST')
assert (request_status == 'Pending') |
class PandasFrameWrapper(Wrapper):
def __init__(self, frame, **kwargs):
self.frame = frame
self.lat = 'cannot-find-latitude-column'
self.lon = 'cannot-find-longitude-column'
self.time = 'time'
if ('time' not in self.frame):
self.time = 'date'
for (lat, lon) in LATLON:
if (lat in self.frame):
(self.lat, self.lon) = (lat, lon)
break
def plot_map(self, backend):
column = backend.option('column', self.lat)
(north, west, south, east) = self.bounding_box()
backend.bounding_box(north=north, south=south, west=west, east=east)
backend.plot_pandas(self.frame, self.lat, self.lon, column)
def bounding_box(self):
north = self.frame[self.lat].max()
south = self.frame[self.lat].min()
lons1 = self.frame[self.lon]
east1 = lons1.max()
west1 = lons1.min()
lons2 = (self.frame[self.lon] % 360)
east2 = lons2.max()
west2 = lons2.min()
if (abs((east1 - west1)) <= abs((east2 - west2))):
(east, west) = (east1, west1)
else:
(east, west) = (east2, west2)
return [north, west, south, east]
def to_datetime_list(self):
return sorted(set(self.frame[self.time].values))
def to_bounding_box(self):
return self.bounding_box() |
class reiterable(_coconut_has_iter):
__slots__ = ()
def __new__(cls, iterable):
if _coconut.isinstance(iterable, _coconut.reiterables):
return iterable
return _coconut.super(_coconut_reiterable, cls).__new__(cls, iterable)
def get_new_iter(self):
(self.iter, new_iter) = _coconut_tee(self.iter)
return new_iter
def __iter__(self):
return _coconut.iter(self.get_new_iter())
def __repr__(self):
return ('reiterable(%s)' % (_coconut.repr(self.get_new_iter()),))
def __reduce__(self):
return (self.__class__, (self.iter,))
def __copy__(self):
return self.__class__(self.get_new_iter())
def __getitem__(self, index):
return _coconut_iter_getitem(self.get_new_iter(), index)
def __reversed__(self):
return _coconut_reversed(self.get_new_iter())
def __len__(self):
if (not _coconut.isinstance(self.iter, _coconut.abc.Sized)):
return _coconut.NotImplemented
return _coconut.len(self.get_new_iter())
def __contains__(self, elem):
return (elem in self.get_new_iter())
def count(self, elem):
return self.get_new_iter().count(elem)
def index(self, elem):
return self.get_new_iter().index(elem) |
def test_award_endpoint_parent_award_no_submissions(client, awards_and_transactions):
resp = client.get('/api/v2/awards/7/')
assert (resp.status_code == status.HTTP_200_OK)
assert (json.loads(resp.content.decode('utf-8'))['parent_award'] == expected_contract_award_parent(include_slug=False))
resp = client.get('/api/v2/awards/10/')
assert (resp.status_code == status.HTTP_200_OK)
assert (json.loads(resp.content.decode('utf-8'))['parent_award'] is None)
resp = client.get('/api/v2/awards/8/')
assert (resp.status_code == status.HTTP_200_OK)
assert (json.loads(resp.content.decode('utf-8'))['parent_award'] == expected_idv_award_parent(include_slug=False))
resp = client.get('/api/v2/awards/9/')
assert (resp.status_code == status.HTTP_200_OK)
assert (json.loads(resp.content.decode('utf-8'))['parent_award'] is None) |
.parametrize('language, setup_commands', [('py', ['pip install -r requirements.txt', 'python main.py', 'pytest']), ('js', ['npm install', 'node app.js', 'npm test'])])
def test_getting_started_with_language_setup(language, setup_commands, config, config_helper):
deps = ['pytest', 'tensorflow', 'python']
summaries = [(f'module.{language}', 'summary')]
result = getting_started(config, config_helper, deps, summaries)
assert (result == tuple(setup_commands)) |
def test_import():
import proteus
successful_import = True
for m in proteus.__all__:
try:
module = __import__(('proteus.' + m), fromlist=['proteus'])
except:
(exc_type, exc_value, exc_traceback) = sys.exc_info()
print(repr(traceback.extract_tb(exc_traceback)))
print('Failed to import proteus.', m)
successful_import = False
assert successful_import |
def build_pdf_report(firmware: Firmware, folder: Path) -> Path:
_initialize_subfolder(folder, firmware)
try:
result = run_docker_container('fkiecad/fact_pdf_report', combine_stderr_stdout=True, mem_limit='512m', mounts=[Mount('/tmp/interface/', str(folder), type='bind')])
except (DockerException, TimeoutError):
logging.error('Failed to execute pdf generator.')
raise RuntimeError('Could not create PDF report')
try:
result.check_returncode()
except CalledProcessError as err:
logging.error(f'''Failed to execute pdf generator with code {err.returncode}:
{result.stdout}''')
raise RuntimeError('Could not create PDF report')
return _find_pdf(folder) |
class OptionPlotoptionsVectorClusterZonesMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(15)
def radius(self, num: float):
self._config(num, js_type=False)
def symbol(self):
return self._config_get('cluster')
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def test_list_processes():
expected_name_list = ['prefix/image_1', 'prefix/image_2', 'prefix/image_3']
expected_image_list = []
for image_name in expected_name_list:
mock_container = mock.Mock()
mock_container.name = image_name
expected_image_list.append(mock_container)
image_filter = {'status': 'running'}
new_docker_image = docker_image.DockerImage()
with mock.patch('docker.models.containers.ContainerCollection.list', mock.MagicMock(return_value=expected_image_list)) as image_list_mock:
image_list = new_docker_image.list_processes()
image_list_mock.assert_called_once_with(filters=image_filter)
assert (image_list == expected_name_list) |
def extractNanashitranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Miniature Medical Goddess', 'Miniature Medical Goddess', 'translated'), ("The Wolf Lord's Lady", "The Wolf Lord's Lady", 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def configure_connection_params(arg_parser, args, cfg):
target_hosts = opts.TargetHosts(args.target_hosts)
cfg.add(config.Scope.applicationOverride, 'client', 'hosts', target_hosts)
client_options = opts.ClientOptions(args.client_options, target_hosts=target_hosts)
cfg.add(config.Scope.applicationOverride, 'client', 'options', client_options)
if (set(target_hosts.all_hosts) != set(client_options.all_client_options)):
arg_parser.error('--target-hosts and --client-options must define the same keys for multi cluster setups.') |
.django_db
def test_federal_account_count_specific(client, agency_account_data):
resp = client.get(url.format(code='008', filter='?fiscal_year=2017'))
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.data['federal_account_count'] == 1)
assert (resp.data['treasury_account_count'] == 1)
resp = client.get(url.format(code='008', filter='?fiscal_year=2018'))
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.data['federal_account_count'] == 1)
assert (resp.data['treasury_account_count'] == 1) |
class Store(StoreT[(KT, VT)], Service):
def __init__(self, url: Union[(str, URL)], app: AppT, table: CollectionT, *, table_name: str='', key_type: ModelArg=None, value_type: ModelArg=None, key_serializer: CodecArg=None, value_serializer: CodecArg=None, options: Optional[Mapping[(str, Any)]]=None, **kwargs: Any) -> None:
Service.__init__(self, **kwargs)
self.url = URL(url)
self.app = app
self.table = table
self.table_name = (table_name or self.table.name)
self.key_type = key_type
self.value_type = value_type
self.key_serializer = key_serializer
self.value_serializer = value_serializer
self.options = options
def __hash__(self) -> int:
return object.__hash__(self)
def persisted_offset(self, tp: TP) -> Optional[int]:
raise NotImplementedError('In-memory store only, does not persist.')
def set_persisted_offset(self, tp: TP, offset: int) -> None:
...
async def need_active_standby_for(self, tp: TP) -> bool:
return True
async def on_rebalance(self, assigned: Set[TP], revoked: Set[TP], newly_assigned: Set[TP], generation_id: int=0) -> None:
...
async def on_recovery_completed(self, active_tps: Set[TP], standby_tps: Set[TP]) -> None:
...
def _encode_key(self, key: KT) -> bytes:
key_bytes = self.app.serializers.dumps_key(self.key_type, key, serializer=self.key_serializer)
if (key_bytes is None):
raise TypeError('Table key cannot be None')
return key_bytes
def _encode_value(self, value: VT) -> Optional[bytes]:
return self.app.serializers.dumps_value(self.value_type, value, serializer=self.value_serializer)
def _decode_key(self, key: Optional[bytes]) -> KT:
return cast(KT, self.app.serializers.loads_key(self.key_type, key, serializer=self.key_serializer))
def _decode_value(self, value: Optional[bytes]) -> VT:
return self.app.serializers.loads_value(self.value_type, value, serializer=self.value_serializer)
def _repr_info(self) -> str:
return f'table_name={self.table_name} url={self.url}'
def label(self) -> str:
return f'{type(self).__name__}: {self.url}' |
class TestCubicBezier(unittest.TestCase):
def test_x_range(self):
with self.assertRaises(ValueError):
cubic_bezier(1.2, 0, 1, 1)
with self.assertRaises(ValueError):
cubic_bezier((- 0.2), 0, 1, 1)
with self.assertRaises(ValueError):
cubic_bezier(0, 0, 1.2, 1)
with self.assertRaises(ValueError):
cubic_bezier(0, 0, (- 0.2), 1)
def test_bisect(self):
ease_in_out_expo = cubic_bezier(1.0, 0.0, 0.0, 1.0)
self.assertEqual(ease_in_out_expo(0.43), 0.) |
class ZK_helper(object):
def __init__(self, ip, port=4370):
self.address = (ip, port)
self.ip = ip
self.port = port
def test_ping(self):
import subprocess, platform
ping_str = ('-n 1' if (platform.system().lower() == 'windows') else '-c 1 -W 5')
args = (((('ping ' + ' ') + ping_str) + ' ') + self.ip)
need_sh = (False if (platform.system().lower() == 'windows') else True)
return (subprocess.call(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=need_sh) == 0)
def test_tcp(self):
self.client = socket(AF_INET, SOCK_STREAM)
self.client.settimeout(10)
res = self.client.connect_ex(self.address)
self.client.close()
return res
def test_udp(self):
self.client = socket(AF_INET, SOCK_DGRAM)
self.client.settimeout(10) |
.parametrize('catalog_instance_no_server_process', [KFP_COMPONENT_CACHE_INSTANCE], indirect=True)
def test_export_kubeflow_format_option(jp_environ, kubeflow_pipelines_runtime_instance, catalog_instance_no_server_process):
runner = CliRunner()
with runner.isolated_filesystem():
cwd = Path.cwd().resolve()
prepare_export_work_dir(str(cwd), ((Path(__file__).parent / 'resources') / 'pipelines'))
pipeline_file = 'kfp_3_node_custom.pipeline'
pipeline_file_path = (cwd / pipeline_file)
assert (pipeline_file_path.is_file() is True)
for supported_export_format_value in [None, 'py', 'yaml']:
if supported_export_format_value:
expected_output_file = pipeline_file_path.with_suffix(f'.{supported_export_format_value}')
else:
expected_output_file = pipeline_file_path.with_suffix('.yaml')
if expected_output_file.is_file():
expected_output_file.unlink()
options = ['export', str(pipeline_file_path), '--runtime-config', kubeflow_pipelines_runtime_instance]
if supported_export_format_value:
options.append('--format')
options.append(supported_export_format_value)
result = runner.invoke(pipeline, options)
assert (result.exit_code == 0), result.output
assert (f"was exported to '{str(expected_output_file)}" in result.output), result.output
for invalid_export_format_value in ['humpty', 'dumpty']:
options = ['export', str(pipeline_file_path), '--runtime-config', kubeflow_pipelines_runtime_instance, '--format', invalid_export_format_value]
result = runner.invoke(pipeline, options)
assert (result.exit_code == 2), result.output
assert ("Invalid value for --format: Valid export formats are ['yaml', 'py']." in result.output), result.output |
class PluginInfo():
name: str
module: str
doc: str
core: bool
python_version: VersionType
errbot_minversion: VersionType
errbot_maxversion: VersionType
dependencies: List[str]
location: Path = None
def load(plugfile_path: Path) -> 'PluginInfo':
with plugfile_path.open(encoding='utf-8') as plugfile:
return PluginInfo.load_file(plugfile, plugfile_path)
def load_file(plugfile, location: Path) -> 'PluginInfo':
cp = ConfigParser()
cp.read_file(plugfile)
pi = PluginInfo.parse(cp)
pi.location = location
return pi
def parse(config: ConfigParser) -> 'PluginInfo':
name = config.get('Core', 'Name')
module = config.get('Core', 'Module')
core = (config.get('Core', 'Core', fallback='false').lower() == 'true')
doc = config.get('Documentation', 'Description', fallback=None)
python_version = config.get('Python', 'Version', fallback=None)
if python_version:
if (python_version in ('2+', '3')):
python_version = (3, 0, 0)
elif (python_version == '2'):
python_version = (2, 0, 0)
else:
try:
python_version = tuple(version2tuple(python_version)[0:3])
except ValueError as ve:
raise ConfigParserError(f'Invalid Python Version format: {python_version} ({ve})')
min_version = config.get('Errbot', 'Min', fallback=None)
max_version = config.get('Errbot', 'Max', fallback=None)
try:
if min_version:
min_version = version2tuple(min_version)
except ValueError as ve:
raise ConfigParserError(f'Invalid Errbot min version format: {min_version} ({ve})')
try:
if max_version:
max_version = version2tuple(max_version)
except ValueError as ve:
raise ConfigParserError(f'Invalid Errbot max version format: {max_version} ({ve})')
depends_on = config.get('Core', 'DependsOn', fallback=None)
deps = ([name.strip() for name in depends_on.split(',')] if depends_on else [])
return PluginInfo(name, module, doc, core, python_version, min_version, max_version, deps)
def load_plugin_classes(self, base_module_name: str, baseclass: Type):
module_name = ((base_module_name + '.') + self.module)
spec = spec_from_file_location(module_name, (self.location.parent / (self.module + '.py')))
modu1e = module_from_spec(spec)
spec.loader.exec_module(modu1e)
sys.modules[module_name] = modu1e
def is_plugin(member):
return (inspect.isclass(member) and issubclass(member, baseclass) and (member != baseclass))
plugin_classes = inspect.getmembers(modu1e, is_plugin)
return plugin_classes |
class AbstractStateBackend():
def from_bool_to_str_value(value):
value = str(int(value))
if (value not in ['0', '1']):
raise ValueError('state value is not 0|1')
return value
def from_str_to_bool_value(value):
value = value.strip()
if (value not in ['0', '1']):
raise ValueError('state value is not 0|1')
value = bool(int(value))
return value
def get_value(self):
raise NotImplementedError()
def set_value(self, value):
raise NotImplementedError() |
class TLSHInterface(ReadOnlyDbInterface):
def get_all_tlsh_hashes(self) -> list[tuple[(str, str)]]:
with self.get_read_only_session() as session:
query = select(AnalysisEntry.uid, AnalysisEntry.result['tlsh']).filter((AnalysisEntry.plugin == 'file_hashes')).filter((AnalysisEntry.result['tlsh'] != None))
return list(session.execute(query)) |
def boolean(**kwargs):
assert (len(kwargs) == 1)
[(boolean_type, children)] = kwargs.items()
if (not isinstance(children, list)):
children = [children]
dsl = defaultdict(list)
if (boolean_type in ('must', 'filter')):
for child in children:
if (list(child) == ['bool']):
for (child_type, child_terms) in child['bool'].items():
if (child_type in ('must', 'filter')):
dsl[child_type].extend(child_terms)
elif (child_type == 'should'):
if ('should' not in dsl):
dsl[child_type].extend(child_terms)
else:
dsl[boolean_type].append(boolean(should=child_terms))
elif (child_type == 'must_not'):
dsl[child_type].extend(child_terms)
elif (child_type != 'minimum_should_match'):
raise ValueError('Unknown term {}: {}'.format(child_type, child_terms))
else:
dsl[boolean_type].append(child)
elif (boolean_type == 'should'):
for child in children:
if ((list(child) == ['bool']) and set(child['bool']).issubset({'should', 'minimum_should_match'})):
dsl['should'].extend(child['bool']['should'])
else:
dsl[boolean_type].append(child)
elif ((boolean_type == 'must_not') and (len(children) == 1)):
child = children[0]
if ((list(child) == ['bool']) and (list(child['bool']) in (['filter'], ['must']))):
(negated,) = child['bool'].values()
dsl = {'must_not': negated}
else:
dsl = {'must_not': children}
else:
dsl = dict(kwargs)
if ('should' in dsl):
dsl.update(minimum_should_match=1)
dsl = {'bool': dict(dsl)}
return dsl |
class OptionSeriesVectorSonificationContexttracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_path_curve():
white = Color('WHITE')
red = Color('RED')
blue = Color('BLUE')
with Image(width=50, height=50, background=white) as img:
with Drawing() as draw:
draw.fill_color = blue
draw.stroke_color = red
draw.path_start()
draw.path_move(to=(0, 25), relative=True)
draw.path_curve(to=(25, 25), controls=((0, 0), (25, 0)))
draw.path_curve(to=(25, 0), controls=((0, 25), (25, 25)), relative=True)
draw.path_finish()
draw.draw(img)
assert (img[(25, 25)] == red)
assert (img[(35, 35)] == img[(35, 35)] == blue)
assert (img[(35, 15)] == img[(15, 35)] == white) |
def device_copy(dst_tensor: Tensor, src_tensor: Tensor, dst_idx: int) -> str:
src_name = src_tensor._attrs['name']
dst_ptr = f'params_[{dst_idx}].ptr'
shape = ['1']
for dim in dst_tensor._attrs['shape']:
if isinstance(dim, IntImm):
shape.append(str(dim._attrs['values'][0]))
else:
shape.append(dim._attrs['name'])
shape = '*'.join(shape)
size = f"{shape} * {get_dtype_size(dst_tensor._attrs['dtype'])}"
return f'DEVICE_CHECK(DeviceToDeviceCopy({dst_ptr}, {src_name}, {size}, stream));' |
def init_app():
global app, celery, config
config = UchanConfig()
setup_logging()
import uchan.lib.database as database
database.init_db()
celery = Celery('uchan', loader=CustomCeleryLoader)
celery.config_from_object({'result_backend': 'rpc://', 'task_serializer': 'pickle', 'accept_content': ['pickle'], 'result_serializer': 'pickle', 'broker_url': f'amqp://{config.broker_user}:{config.broker_password}{config.broker_host}:{config.broker_port}/'})
from uchan.flask import CustomFlaskApp, create_web_app
app = CustomFlaskApp(__name__, template_folder='view/templates', static_folder=None)
create_web_app(config, app)
database.register_teardown(app)
from uchan.flask.custom_session import CustomSessionInterface
from uchan.lib.cache import cache
app.session_interface = CustomSessionInterface(cache)
import uchan.view
from uchan.view import assets
assets.setup_assets(app, config.asset_watch_for_changes)
import uchan.filter.app_filters
from uchan.view.mod import mod
app.register_blueprint(mod)
from uchan.view.api import api
app.register_blueprint(api)
from uchan.lib.service.file_service import LocalCdn
if (config.file_cdn_type == 'local'):
cdn = LocalCdn(config.local_cdn_path, config.local_cdn_web_path)
else:
raise Exception('Unknown file cdn type')
from uchan.lib.service import file_service
file_service.init(config.upload_queue_path, cdn)
import uchan.lib.tasks
from uchan.lib import plugin_manager
plugin_manager.load_plugins(['captcha2'])
return app |
def test_param_convention_mars_1():
('parameter', 'variable-list', convention='mars')
def values_mars(parameter):
return parameter
assert (values_mars(parameter='tp') == ['tp'])
assert (values_mars(parameter='2t') == ['2t'])
assert (values_mars(parameter='t2m') == ['2t'])
assert (values_mars(parameter=['t2m', 'tp']) == ['2t', 'tp'])
assert (values_mars(parameter='whatever') == ['whatever']) |
def _lua_to_python(lval, return_status=False):
import lua
lua_globals = lua.globals()
if (lval is None):
return None
if (lua_globals.type(lval) == 'table'):
pval = []
for i in lval:
if return_status:
if (i == 'ok'):
return lval[i]
if (i == 'err'):
raise ResponseError(lval[i])
pval.append(Script._lua_to_python(lval[i]))
return pval
elif isinstance(lval, six.integer_types):
return six.integer_types[(- 1)](lval)
elif isinstance(lval, float):
return float(lval)
elif (lua_globals.type(lval) == 'userdata'):
return str(lval)
elif (lua_globals.type(lval) == 'string'):
return lval
elif (lua_globals.type(lval) == 'boolean'):
return bool(lval)
raise RuntimeError(('Invalid Lua type: ' + str(lua_globals.type(lval)))) |
class Frontend(Common):
model_config = ConfigDict(extra='forbid')
class Authentication(BaseModel):
model_config = ConfigDict(extra='forbid')
enabled: bool
user_database: str
password_salt: str
communication_timeout: int = 60
authentication: Frontend.Authentication
results_per_page: int
number_of_latest_firmwares_to_display: int = 10
ajax_stats_reload_time: int
max_elements_per_chart: int = 10
radare2_url: str |
def get_dbt_results(project_dir: str, config: RuntimeConfig) -> Optional[RunResultsArtifact]:
results_path = os.path.join(config.target_path, 'run_results.json')
try:
return RunResultsArtifact.read_and_check_versions(results_path)
except IncompatibleSchemaError as exc:
exc.add_filename(results_path)
raise
except DbtRuntimeError as exc:
LOGGER.warn('Could not read dbt run_results artifact')
return None |
def _compare_config(ref, other, path=[]):
keys = sorted(ref.keys())
remaining = sorted((set(other.keys()) - set(ref.keys())))
delta = []
path.append(None)
for key in keys:
path[(- 1)] = key
ref_value = ref[key]
assert (key in other), f"XP config shouldn't be missing any key. Missing key {key}"
other_value = other[key]
if isinstance(ref_value, DictConfig):
assert isinstance(other_value, DictConfig), f'Structure of config should be identical between XPs. Wrong type for {key}, expected DictConfig, got {type(other_value)}.'
(yield from _compare_config(ref_value, other_value, path))
elif (other_value != ref_value):
(yield _Difference(list(path), key, ref, other, ref_value, other_value))
for key in remaining:
path[(- 1)] = key
other_value = other[key]
(yield _Difference(list(path), key, ref, other, NotThere, other_value))
path.pop((- 1))
return delta |
def extractBoredtransWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_table_multiline(header):
data = [('hello', ['foo', 'bar', 'baz'], 'world'), ('hello', 'world', ['world 1', 'world 2'])]
result = table(data, header=header, divider=True, multiline=True)
assert (result == '\nCOL A COL B COL 3 \n----- ----- -------\nhello foo world \n bar \n baz \n \nhello world world 1\n world 2\n') |
def run_component_modeler(monkeypatch, modeler: ComponentModeler):
sim_dict = modeler.sim_dict
batch_data = {task_name: run_emulated(sim) for (task_name, sim) in sim_dict.items()}
monkeypatch.setattr(ComponentModeler, '_run_sims', (lambda self, path_dir: batch_data))
s_matrix = modeler.run(path_dir=modeler.path_dir)
return s_matrix |
class ExceptionsTestCase(unittest.TestCase):
def test_option_not_valid(self):
exception = OptionNotValid('Name', 'Value', 'Namespace/')
self.assertEqual('Option (Namespace/Name=Value) have invalid name or value', exception.__str__())
def test_local_resource_not_found(self):
exception = LocalResourceNotFound('Path')
self.assertEqual('Local file: Path not found', exception.__str__())
def test_remote_resource_not_found(self):
exception = RemoteResourceNotFound('Path')
self.assertEqual('Remote resource: Path not found', exception.__str__())
def test_remote_parent_not_found(self):
exception = RemoteParentNotFound('Path')
self.assertEqual('Remote parent for: Path not found', exception.__str__())
def test_method_not_supported(self):
exception = MethodNotSupported('HEAD', 'Server')
self.assertEqual("Method 'HEAD' not supported for Server", exception.__str__())
def test_connection_exception(self):
exception = ConnectionException(MethodNotSupported('HEAD', 'Server'))
self.assertEqual("Method 'HEAD' not supported for Server", exception.__str__())
def test_no_connection(self):
exception = NoConnection('Server')
self.assertEqual('No connection with Server', exception.__str__())
def test_not_connection_legacy(self):
exception = NotConnection('Server')
self.assertEqual('No connection with Server', exception.__str__())
def test_response_error_code(self):
exception = ResponseErrorCode(' 502, 'Service Unavailable')
self.assertEqual('Request to failed with code 502 and message: Service Unavailable', exception.__str__())
def test_not_enough_space(self):
exception = NotEnoughSpace()
self.assertEqual('Not enough space on the server', exception.__str__()) |
class TestUserRecord():
.parametrize('data', (INVALID_DICTS + [{}, {'foo': 'bar'}]))
def test_invalid_record(self, data):
with pytest.raises(ValueError):
auth.UserRecord(data)
def test_metadata(self):
metadata = auth.UserMetadata(10, 20)
assert (metadata.creation_timestamp == 10)
assert (metadata.last_sign_in_timestamp == 20)
metadata = auth.UserMetadata()
assert (metadata.creation_timestamp is None)
assert (metadata.last_sign_in_timestamp is None)
def test_exported_record(self):
user = auth.ExportedUserRecord({'localId': 'user', 'passwordHash': 'passwordHash', 'salt': 'passwordSalt'})
assert (user.uid == 'user')
assert (user.password_hash == 'passwordHash')
assert (user.password_salt == 'passwordSalt')
def test_exported_record_no_password(self):
user = auth.ExportedUserRecord({'localId': 'user'})
assert (user.uid == 'user')
assert (user.password_hash is None)
assert (user.password_salt is None)
def test_exported_record_empty_password(self):
user = auth.ExportedUserRecord({'localId': 'user', 'passwordHash': '', 'salt': ''})
assert (user.uid == 'user')
assert (user.password_hash == '')
assert (user.password_salt == '')
def test_redacted_passwords_cleared(self):
user = auth.ExportedUserRecord({'localId': 'user', 'passwordHash': base64.b64encode(b'REDACTED')})
assert (user.password_hash is None)
def test_custom_claims(self):
user = auth.UserRecord({'localId': 'user', 'customAttributes': '{"admin": true, "package": "gold"}'})
assert (user.custom_claims == {'admin': True, 'package': 'gold'})
def test_no_custom_claims(self):
user = auth.UserRecord({'localId': 'user'})
assert (user.custom_claims is None)
def test_empty_custom_claims(self):
user = auth.UserRecord({'localId': 'user', 'customAttributes': '{}'})
assert (user.custom_claims is None)
.parametrize('data', (INVALID_DICTS + [{}, {'foo': 'bar'}]))
def test_invalid_provider(self, data):
with pytest.raises(ValueError):
_user_mgt.ProviderUserInfo(data)
def test_tokens_valid_after_time(self):
user = auth.UserRecord({'localId': 'user', 'validSince': 100})
assert (user.tokens_valid_after_timestamp == 100000)
def test_no_tokens_valid_after_time(self):
user = auth.UserRecord({'localId': 'user'})
assert (user.tokens_valid_after_timestamp == 0)
def test_tenant_id(self):
user = auth.UserRecord({'localId': 'user', 'tenantId': 'test-tenant'})
assert (user.tenant_id == 'test-tenant') |
class UrlSdkLoader(BaseSdkLoader):
LOADER_MODE_KEY = 'url'
def __init__(self, download_dir: str, url: str):
super().__init__(download_dir)
self.url = url
def get_sdk_component(self, target: str) -> str:
log.info(f'Fetching SDK from {self.url}')
return self._fetch_file(self.url)
def get_metadata(self) -> Dict[(str, str)]:
return {'mode': self.LOADER_MODE_KEY, 'url': self.url, 'version': self.VERSION_UNKNOWN}
def metadata_to_init_kwargs(cls, metadata: dict) -> Dict[(str, str)]:
return {'url': metadata['url']}
def args_namespace_to_metadata(cls, args: argparse.Namespace) -> Dict[(str, str)]:
if (args.url and (not args.hw_target)):
raise ValueError('HW target must be specified when using direct SDK URL')
return {'url': args.url}
def add_args_to_mode_group(cls, mode_group):
mode_group.add_argument('--url', '-u', type=str, help='Direct URL to load SDK from') |
class MultiResolutionDiscriminator(torch.nn.Module):
def __init__(self, resolutions: list[tuple[int]]):
super(MultiResolutionDiscriminator, self).__init__()
self.discriminators = nn.ModuleList([DiscriminatorR(n_fft=n_fft, hop_length=hop_length, win_length=win_length) for (n_fft, hop_length, win_length) in resolutions])
def forward(self, x: torch.Tensor) -> tuple[(list[torch.Tensor], list[list[torch.Tensor]])]:
(ret_x, ret_fmap) = ([], [])
for disc in self.discriminators:
(res, fmap) = disc(x)
ret_x.append(res)
ret_fmap.append(fmap)
return (ret_x, ret_fmap) |
def test_errors():
forbidden_error = ForbiddenError({'source': ''}, 'Super admin access is required')
assert (forbidden_error.status == 403)
not_found_error = NotFoundError({'source': ''}, 'Object not found.')
assert (not_found_error.status == 404)
server_error = ServerError({'source': ''}, 'Internal Server Error')
assert (server_error.status == 500)
unprocessable_entity_error = UnprocessableEntityError({'source': ''}, 'Entity cannot be processed')
assert (unprocessable_entity_error.status == 422)
bad_request_error = BadRequestError({'source': ''}, 'Request cannot be served')
assert (bad_request_error.status == 400) |
()
def setup_to_pass():
file_rules = ['-a always,exit -F arch=b64 -S chmod -S fchmod -S fchmodat -F auid>=1000 -F auid!= -k perm_mod', '-a always,exit -F arch=b32 -S chmod -S fchmod -S fchmodat -F auid>=1000 -F auid!= -k perm_mod', '-a always,exit -F arch=b64 -S chown -S fchown -S fchownat -S lchown -F auid>=1000 -F auid!= -k perm_mod', '-a always,exit -F arch=b32 -S chown -S fchown -S fchownat -S lchown -F auid>=1000 -F auid!= -k perm_mod', '-a always,exit -F arch=b64 -S setxattr -S lsetxattr -S fsetxattr -S removexattr -S lremovexattr -S fremovexattr -F auid>=1000 -F auid!= -k perm_mod', '-a always,exit -F arch=b32 -S setxattr -S lsetxattr -S fsetxattr -S removexattr -S lremovexattr -S fremovexattr -F auid>=1000 -F auid!= -k perm_mod']
auditctl_rules = ["-a always,exit -F arch=b64 -S chmod -S fchmod -S fchmodat -F 'auid>=1000' -F 'auid!=' -k perm_mod", "-a always,exit -F arch=b32 -S chmod -S fchmod -S fchmodat -F 'auid>=1000' -F 'auid!=' -k perm_mod", "-a always,exit -F arch=b64 -S chown -S fchown -S fchownat -S lchown -F 'auid>=1000' -F 'auid!=' -k perm_mod", "-a always,exit -F arch=b32 -S chown -S fchown -S fchownat -S lchown -F 'auid>=1000' -F 'auid!=' -k perm_mod", "-a always,exit -F arch=b64 -S setxattr -S lsetxattr -S fsetxattr -S removexattr -S lremovexattr -S fremovexattr -F 'auid>=1000' -F 'auid!=' -k perm_mod", "-a always,exit -F arch=b32 -S setxattr -S lsetxattr -S fsetxattr -S removexattr -S lremovexattr -S fremovexattr -F 'auid>=1000' -F 'auid!=' -k perm_mod"]
for rule in file_rules:
shellexec(f'echo "{rule}" >> /etc/audit/rules.d/pytest.rules')
for rule in auditctl_rules:
shellexec(f'auditctl {rule}')
(yield None)
print(shellexec('cat /etc/audit/rules.d/pytest.rules'))
print(shellexec('auditctl -l'))
os.remove('/etc/audit/rules.d/pytest.rules')
shellexec('auditctl -D') |
def fastq_parser(infile):
with myopen(infile) as f:
while True:
name = f.readline().strip()
if (not name):
break
seq = f.readline().strip()
name2 = f.readline().strip()
qual = f.readline().strip()
(yield Fastq(name, seq, name2, qual)) |
class ColumnRegExpMetric(Metric[DataIntegrityValueByRegexpMetricResult]):
column_name: str
reg_exp: str
top: int
_reg_exp_compiled: Pattern
def __init__(self, column_name: str, reg_exp: str, top: int=10, options: AnyOptions=None):
self.top = top
self.reg_exp = reg_exp
self.column_name = column_name
self._reg_exp_compiled = re.compile(reg_exp)
super().__init__(options=options)
def _calculate_stats_by_regexp(self, column: pd.Series) -> DataIntegrityValueByRegexpStat:
number_of_matched = 0
number_of_na = 0
number_of_not_matched = 0
table_of_matched: Dict[(str, int)] = collections.defaultdict(int)
table_of_not_matched: Dict[(str, int)] = collections.defaultdict(int)
for item in column:
if pd.isna(item):
number_of_na += 1
continue
item = str(item)
if bool(self._reg_exp_compiled.match(str(item))):
number_of_matched += 1
table_of_matched[item] += 1
else:
number_of_not_matched += 1
table_of_not_matched[item] += 1
matched = sorted(table_of_matched.items(), key=(lambda x: x[1]), reverse=True)
table_of_matched = {k: v for (k, v) in matched[:self.top]}
not_matched = sorted(table_of_not_matched.items(), key=(lambda x: x[1]), reverse=True)
table_of_not_matched = {k: v for (k, v) in not_matched[:self.top]}
return DataIntegrityValueByRegexpStat(number_of_matched=number_of_matched, number_of_not_matched=number_of_not_matched, number_of_rows=get_rows_count(column), table_of_matched=table_of_matched, table_of_not_matched=table_of_not_matched)
def calculate(self, data: InputData) -> DataIntegrityValueByRegexpMetricResult:
if (self.top < 1):
raise ValueError('Parameter top must be >= 1')
if (not self.reg_exp):
raise ValueError('Parameter reg_exp must be not empty for ColumnRegExpMetric')
if (self.column_name not in data.current_data):
raise ValueError(f'Column {self.column_name} not found in current dataset.')
current = self._calculate_stats_by_regexp(data.current_data[self.column_name])
reference = None
if (data.reference_data is not None):
if (self.column_name not in data.reference_data):
raise ValueError(f'Column {self.column_name} was not found in reference dataset.')
reference = self._calculate_stats_by_regexp(data.reference_data[self.column_name])
return DataIntegrityValueByRegexpMetricResult(column_name=self.column_name, reg_exp=self.reg_exp, top=self.top, current=current, reference=reference) |
.skipif((not utils.complex_mode), reason='Test specific to complex mode')
def test_assign_complex_value(cg1):
f = Function(cg1)
g = Function(cg1)
f.assign((1 + 1j))
assert np.allclose(f.dat.data_ro, (1 + 1j))
f.assign(1j)
assert np.allclose(f.dat.data_ro, 1j)
g.assign(2.0)
f.assign(((1 + 1j) * g))
assert np.allclose(f.dat.data_ro, (2 + 2j)) |
('cuda.perm102_bmm_rcr.gen_function')
def gen_function(func_attrs, exec_cond_template, dim_info_dict):
bmm_problem_info = _get_strided_problem_info(func_attrs)
problem_args = bmm_common.PROBLEM_ARGS_TEMPLATE.render(mm_info=bmm_problem_info)
problem_args_cutlass_3x = bmm_common.PROBLEM_ARGS_TEMPLATE_CUTLASS_3X.render(mm_info=bmm_common.add_elem_types_to_mm_info(mm_info=bmm_problem_info, func_attrs=func_attrs))
return bmm_common.gen_function(func_attrs=func_attrs, exec_cond_template=exec_cond_template, problem_args=problem_args, problem_args_cutlass_3x=problem_args_cutlass_3x, dim_info_dict=dim_info_dict, input_addr_calculator='', output_addr_calculator=get_output_addr_calculator(func_attrs)) |
class ELFStructs(object):
def __init__(self, little_endian=True, elfclass=32):
assert ((elfclass == 32) or (elfclass == 64))
self.little_endian = little_endian
self.elfclass = elfclass
self.e_type = None
self.e_machine = None
self.e_ident_osabi = None
def __getstate__(self):
return (self.little_endian, self.elfclass, self.e_type, self.e_machine, self.e_ident_osabi)
def __setstate__(self, state):
(self.little_endian, self.elfclass, e_type, e_machine, e_osabi) = state
self.create_basic_structs()
self.create_advanced_structs(e_type, e_machine, e_osabi)
def create_basic_structs(self):
if self.little_endian:
self.Elf_byte = ULInt8
self.Elf_half = ULInt16
self.Elf_word = ULInt32
self.Elf_word64 = ULInt64
self.Elf_addr = (ULInt32 if (self.elfclass == 32) else ULInt64)
self.Elf_offset = self.Elf_addr
self.Elf_sword = SLInt32
self.Elf_xword = (ULInt32 if (self.elfclass == 32) else ULInt64)
self.Elf_sxword = (SLInt32 if (self.elfclass == 32) else SLInt64)
else:
self.Elf_byte = UBInt8
self.Elf_half = UBInt16
self.Elf_word = UBInt32
self.Elf_word64 = UBInt64
self.Elf_addr = (UBInt32 if (self.elfclass == 32) else UBInt64)
self.Elf_offset = self.Elf_addr
self.Elf_sword = SBInt32
self.Elf_xword = (UBInt32 if (self.elfclass == 32) else UBInt64)
self.Elf_sxword = (SBInt32 if (self.elfclass == 32) else SBInt64)
self._create_ehdr()
self._create_leb128()
self._create_ntbs()
def create_advanced_structs(self, e_type=None, e_machine=None, e_ident_osabi=None):
self.e_type = e_type
self.e_machine = e_machine
self.e_ident_osabi = e_ident_osabi
self._create_phdr()
self._create_shdr()
self._create_chdr()
self._create_sym()
self._create_rel()
self._create_dyn()
self._create_sunw_syminfo()
self._create_gnu_verneed()
self._create_gnu_verdef()
self._create_gnu_versym()
self._create_gnu_abi()
self._create_gnu_property()
self._create_note(e_type)
self._create_stabs()
self._create_attributes_subsection()
self._create_arm_attributes()
self._create_riscv_attributes()
self._create_elf_hash()
self._create_gnu_hash()
def _create_ehdr(self):
self.Elf_Ehdr = Struct('Elf_Ehdr', Struct('e_ident', Array(4, self.Elf_byte('EI_MAG')), Enum(self.Elf_byte('EI_CLASS'), **ENUM_EI_CLASS), Enum(self.Elf_byte('EI_DATA'), **ENUM_EI_DATA), Enum(self.Elf_byte('EI_VERSION'), **ENUM_E_VERSION), Enum(self.Elf_byte('EI_OSABI'), **ENUM_EI_OSABI), self.Elf_byte('EI_ABIVERSION'), Padding(7)), Enum(self.Elf_half('e_type'), **ENUM_E_TYPE), Enum(self.Elf_half('e_machine'), **ENUM_E_MACHINE), Enum(self.Elf_word('e_version'), **ENUM_E_VERSION), self.Elf_addr('e_entry'), self.Elf_offset('e_phoff'), self.Elf_offset('e_shoff'), self.Elf_word('e_flags'), self.Elf_half('e_ehsize'), self.Elf_half('e_phentsize'), self.Elf_half('e_phnum'), self.Elf_half('e_shentsize'), self.Elf_half('e_shnum'), self.Elf_half('e_shstrndx'))
def _create_leb128(self):
self.Elf_uleb128 = ULEB128
def _create_ntbs(self):
self.Elf_ntbs = CString
def _create_phdr(self):
p_type_dict = ENUM_P_TYPE_BASE
if (self.e_machine == 'EM_ARM'):
p_type_dict = ENUM_P_TYPE_ARM
elif (self.e_machine == 'EM_AARCH64'):
p_type_dict = ENUM_P_TYPE_AARCH64
elif (self.e_machine == 'EM_MIPS'):
p_type_dict = ENUM_P_TYPE_MIPS
elif (self.e_machine == 'EM_RISCV'):
p_type_dict = ENUM_P_TYPE_RISCV
if (self.elfclass == 32):
self.Elf_Phdr = Struct('Elf_Phdr', Enum(self.Elf_word('p_type'), **p_type_dict), self.Elf_offset('p_offset'), self.Elf_addr('p_vaddr'), self.Elf_addr('p_paddr'), self.Elf_word('p_filesz'), self.Elf_word('p_memsz'), self.Elf_word('p_flags'), self.Elf_word('p_align'))
else:
self.Elf_Phdr = Struct('Elf_Phdr', Enum(self.Elf_word('p_type'), **p_type_dict), self.Elf_word('p_flags'), self.Elf_offset('p_offset'), self.Elf_addr('p_vaddr'), self.Elf_addr('p_paddr'), self.Elf_xword('p_filesz'), self.Elf_xword('p_memsz'), self.Elf_xword('p_align'))
def _create_shdr(self):
sh_type_dict = ENUM_SH_TYPE_BASE
if (self.e_machine == 'EM_ARM'):
sh_type_dict = ENUM_SH_TYPE_ARM
elif (self.e_machine == 'EM_X86_64'):
sh_type_dict = ENUM_SH_TYPE_AMD64
elif (self.e_machine == 'EM_MIPS'):
sh_type_dict = ENUM_SH_TYPE_MIPS
if (self.e_machine == 'EM_RISCV'):
sh_type_dict = ENUM_SH_TYPE_RISCV
self.Elf_Shdr = Struct('Elf_Shdr', self.Elf_word('sh_name'), Enum(self.Elf_word('sh_type'), **sh_type_dict), self.Elf_xword('sh_flags'), self.Elf_addr('sh_addr'), self.Elf_offset('sh_offset'), self.Elf_xword('sh_size'), self.Elf_word('sh_link'), self.Elf_word('sh_info'), self.Elf_xword('sh_addralign'), self.Elf_xword('sh_entsize'))
def _create_chdr(self):
fields = [Enum(self.Elf_word('ch_type'), **ENUM_ELFCOMPRESS_TYPE), self.Elf_xword('ch_size'), self.Elf_xword('ch_addralign')]
if (self.elfclass == 64):
fields.insert(1, self.Elf_word('ch_reserved'))
self.Elf_Chdr = Struct('Elf_Chdr', *fields)
def _create_rel(self):
if (self.elfclass == 32):
fields = [self.Elf_xword('r_info'), Value('r_info_sym', (lambda ctx: ((ctx['r_info'] >> 8) & ))), Value('r_info_type', (lambda ctx: (ctx['r_info'] & 255)))]
elif (self.e_machine == 'EM_MIPS'):
fields = [self.Elf_word('r_sym'), self.Elf_byte('r_ssym'), self.Elf_byte('r_type3'), self.Elf_byte('r_type2'), self.Elf_byte('r_type'), Value('r_info_sym', (lambda ctx: ctx['r_sym'])), Value('r_info_ssym', (lambda ctx: ctx['r_ssym'])), Value('r_info_type', (lambda ctx: ctx['r_type'])), Value('r_info_type2', (lambda ctx: ctx['r_type2'])), Value('r_info_type3', (lambda ctx: ctx['r_type3'])), Value('r_info', (lambda ctx: (((((ctx['r_sym'] << 32) | (ctx['r_ssym'] << 24)) | (ctx['r_type3'] << 16)) | (ctx['r_type2'] << 8)) | ctx['r_type'])))]
else:
fields = [self.Elf_xword('r_info'), Value('r_info_sym', (lambda ctx: ((ctx['r_info'] >> 32) & ))), Value('r_info_type', (lambda ctx: (ctx['r_info'] & )))]
self.Elf_Rel = Struct('Elf_Rel', self.Elf_addr('r_offset'), *fields)
fields_and_addend = (fields + [self.Elf_sxword('r_addend')])
self.Elf_Rela = Struct('Elf_Rela', self.Elf_addr('r_offset'), *fields_and_addend)
self.Elf_Relr = Struct('Elf_Relr', self.Elf_addr('r_offset'))
def _create_dyn(self):
d_tag_dict = dict(ENUM_D_TAG_COMMON)
if (self.e_machine in ENUMMAP_EXTRA_D_TAG_MACHINE):
d_tag_dict.update(ENUMMAP_EXTRA_D_TAG_MACHINE[self.e_machine])
elif (self.e_ident_osabi == 'ELFOSABI_SOLARIS'):
d_tag_dict.update(ENUM_D_TAG_SOLARIS)
self.Elf_Dyn = Struct('Elf_Dyn', Enum(self.Elf_sxword('d_tag'), **d_tag_dict), self.Elf_xword('d_val'), Value('d_ptr', (lambda ctx: ctx['d_val'])))
def _create_sym(self):
st_info_struct = BitStruct('st_info', Enum(BitField('bind', 4), **ENUM_ST_INFO_BIND), Enum(BitField('type', 4), **ENUM_ST_INFO_TYPE))
st_other_struct = BitStruct('st_other', Enum(BitField('local', 3), **ENUM_ST_LOCAL), Padding(2), Enum(BitField('visibility', 3), **ENUM_ST_VISIBILITY))
if (self.elfclass == 32):
self.Elf_Sym = Struct('Elf_Sym', self.Elf_word('st_name'), self.Elf_addr('st_value'), self.Elf_word('st_size'), st_info_struct, st_other_struct, Enum(self.Elf_half('st_shndx'), **ENUM_ST_SHNDX))
else:
self.Elf_Sym = Struct('Elf_Sym', self.Elf_word('st_name'), st_info_struct, st_other_struct, Enum(self.Elf_half('st_shndx'), **ENUM_ST_SHNDX), self.Elf_addr('st_value'), self.Elf_xword('st_size'))
def _create_sunw_syminfo(self):
self.Elf_Sunw_Syminfo = Struct('Elf_Sunw_Syminfo', Enum(self.Elf_half('si_boundto'), **ENUM_SUNW_SYMINFO_BOUNDTO), self.Elf_half('si_flags'))
def _create_gnu_verneed(self):
self.Elf_Verneed = Struct('Elf_Verneed', self.Elf_half('vn_version'), self.Elf_half('vn_cnt'), self.Elf_word('vn_file'), self.Elf_word('vn_aux'), self.Elf_word('vn_next'))
self.Elf_Vernaux = Struct('Elf_Vernaux', self.Elf_word('vna_hash'), self.Elf_half('vna_flags'), self.Elf_half('vna_other'), self.Elf_word('vna_name'), self.Elf_word('vna_next'))
def _create_gnu_verdef(self):
self.Elf_Verdef = Struct('Elf_Verdef', self.Elf_half('vd_version'), self.Elf_half('vd_flags'), self.Elf_half('vd_ndx'), self.Elf_half('vd_cnt'), self.Elf_word('vd_hash'), self.Elf_word('vd_aux'), self.Elf_word('vd_next'))
self.Elf_Verdaux = Struct('Elf_Verdaux', self.Elf_word('vda_name'), self.Elf_word('vda_next'))
def _create_gnu_versym(self):
self.Elf_Versym = Struct('Elf_Versym', Enum(self.Elf_half('ndx'), **ENUM_VERSYM))
def _create_gnu_abi(self):
self.Elf_abi = Struct('Elf_abi', Enum(self.Elf_word('abi_os'), **ENUM_NOTE_ABI_TAG_OS), self.Elf_word('abi_major'), self.Elf_word('abi_minor'), self.Elf_word('abi_tiny'))
def _create_gnu_debugaltlink(self):
self.Elf_debugaltlink = Struct('Elf_debugaltlink', CString('sup_filename'), String('sup_checksum', length=20))
def _create_gnu_property(self):
def roundup_padding(ctx):
if (self.elfclass == 32):
return (roundup(ctx.pr_datasz, 2) - ctx.pr_datasz)
return (roundup(ctx.pr_datasz, 3) - ctx.pr_datasz)
def classify_pr_data(ctx):
if (type(ctx.pr_type) is not str):
return None
if ctx.pr_type.startswith('GNU_PROPERTY_X86_'):
return ('GNU_PROPERTY_X86_*', 4, 0)
elif ctx.pr_type.startswith('GNU_PROPERTY_AARCH64_'):
return ('GNU_PROPERTY_AARCH64_*', 4, 0)
return (ctx.pr_type, ctx.pr_datasz, self.elfclass)
self.Elf_Prop = Struct('Elf_Prop', Enum(self.Elf_word('pr_type'), **ENUM_NOTE_GNU_PROPERTY_TYPE), self.Elf_word('pr_datasz'), Switch('pr_data', classify_pr_data, {('GNU_PROPERTY_STACK_SIZE', 4, 32): self.Elf_word('pr_data'), ('GNU_PROPERTY_STACK_SIZE', 8, 64): self.Elf_word64('pr_data'), ('GNU_PROPERTY_X86_*', 4, 0): self.Elf_word('pr_data'), ('GNU_PROPERTY_AARCH64_*', 4, 0): self.Elf_word('pr_data')}, default=Field('pr_data', (lambda ctx: ctx.pr_datasz))), Padding(roundup_padding))
def _create_note(self, e_type=None):
self.Elf_ugid = (self.Elf_half if ((self.elfclass == 32) and (self.e_machine in {'EM_MN10300', 'EM_ARM', 'EM_CRIS', 'EM_CYGNUS_FRV', 'EM_386', 'EM_M32R', 'EM_68K', 'EM_S390', 'EM_SH', 'EM_SPARC'})) else self.Elf_word)
self.Elf_Nhdr = Struct('Elf_Nhdr', self.Elf_word('n_namesz'), self.Elf_word('n_descsz'), Enum(self.Elf_word('n_type'), **(ENUM_NOTE_N_TYPE if (e_type != 'ET_CORE') else ENUM_CORE_NOTE_N_TYPE)))
if (self.elfclass == 32):
self.Elf_Prpsinfo = Struct('Elf_Prpsinfo', self.Elf_byte('pr_state'), String('pr_sname', 1), self.Elf_byte('pr_zomb'), self.Elf_byte('pr_nice'), self.Elf_xword('pr_flag'), self.Elf_ugid('pr_uid'), self.Elf_ugid('pr_gid'), self.Elf_word('pr_pid'), self.Elf_word('pr_ppid'), self.Elf_word('pr_pgrp'), self.Elf_word('pr_sid'), String('pr_fname', 16), String('pr_psargs', 80))
else:
self.Elf_Prpsinfo = Struct('Elf_Prpsinfo', self.Elf_byte('pr_state'), String('pr_sname', 1), self.Elf_byte('pr_zomb'), self.Elf_byte('pr_nice'), Padding(4), self.Elf_xword('pr_flag'), self.Elf_ugid('pr_uid'), self.Elf_ugid('pr_gid'), self.Elf_word('pr_pid'), self.Elf_word('pr_ppid'), self.Elf_word('pr_pgrp'), self.Elf_word('pr_sid'), String('pr_fname', 16), String('pr_psargs', 80))
self.Elf_Nt_File = Struct('Elf_Nt_File', self.Elf_xword('num_map_entries'), self.Elf_xword('page_size'), Array((lambda ctx: ctx.num_map_entries), Struct('Elf_Nt_File_Entry', self.Elf_addr('vm_start'), self.Elf_addr('vm_end'), self.Elf_offset('page_offset'))), Array((lambda ctx: ctx.num_map_entries), CString('filename')))
def _create_stabs(self):
self.Elf_Stabs = Struct('Elf_Stabs', self.Elf_word('n_strx'), self.Elf_byte('n_type'), self.Elf_byte('n_other'), self.Elf_half('n_desc'), self.Elf_word('n_value'))
def _create_attributes_subsection(self):
self.Elf_Attr_Subsection_Header = Struct('Elf_Attr_Subsection', self.Elf_word('length'), self.Elf_ntbs('vendor_name', encoding='utf-8'))
def _create_arm_attributes(self):
self.Elf_Arm_Attribute_Tag = Struct('Elf_Arm_Attribute_Tag', Enum(self.Elf_uleb128('tag'), **ENUM_ATTR_TAG_ARM))
def _create_riscv_attributes(self):
self.Elf_RiscV_Attribute_Tag = Struct('Elf_RiscV_Attribute_Tag', Enum(self.Elf_uleb128('tag'), **ENUM_ATTR_TAG_RISCV))
def _create_elf_hash(self):
self.Elf_Hash = Struct('Elf_Hash', self.Elf_word('nbuckets'), self.Elf_word('nchains'), Array((lambda ctx: ctx['nbuckets']), self.Elf_word('buckets')), Array((lambda ctx: ctx['nchains']), self.Elf_word('chains')))
def _create_gnu_hash(self):
self.Gnu_Hash = Struct('Gnu_Hash', self.Elf_word('nbuckets'), self.Elf_word('symoffset'), self.Elf_word('bloom_size'), self.Elf_word('bloom_shift'), Array((lambda ctx: ctx['bloom_size']), self.Elf_xword('bloom')), Array((lambda ctx: ctx['nbuckets']), self.Elf_word('buckets'))) |
def _create_sales_invoices(unicommerce_order, sales_order, client: UnicommerceAPIClient):
from ecommerce_integrations.unicommerce.invoice import create_sales_invoice
facility_code = sales_order.get(FACILITY_CODE_FIELD)
shipping_packages = unicommerce_order['shippingPackages']
for package in shipping_packages:
try:
invoice_data = client.get_sales_invoice(shipping_package_code=package['code'], facility_code=facility_code)
existing_si = frappe.db.get_value('Sales Invoice', {INVOICE_CODE_FIELD: invoice_data['invoice']['code']})
if existing_si:
continue
log = create_unicommerce_log(method='create_sales_invoice', make_new=True)
frappe.flags.request_id = log.name
warehouse_allocations = _get_warehouse_allocations(sales_order)
create_sales_invoice(invoice_data['invoice'], sales_order.name, update_stock=1, so_data=unicommerce_order, warehouse_allocations=warehouse_allocations)
except Exception as e:
create_unicommerce_log(status='Error', exception=e, rollback=True, request_data=invoice_data)
frappe.flags.request_id = None
else:
create_unicommerce_log(status='Success', request_data=invoice_data)
frappe.flags.request_id = None |
def organize_response_per_pages(original_response: Dict[(str, Any)]) -> List[Dict[(str, Any)]]:
organized_pages = []
for page_index in range(1, (original_response['metadata']['documents'][0]['pages'] + 1)):
page_dict = {'items': []}
page_dict.update(extract_entities_for_page(original_response['entities'], page_index))
page_dict['items'] = extract_tables_for_page(original_response['tables'], page_index)
organized_pages.append(page_dict)
return organized_pages |
def compress_bytes(data, offset, dictionary):
bits = 0
chunks = split32(data, offset)
o = []
for (i, chunk) in enumerate(chunks):
if (chunk in dictionary):
o.append(bytes([dictionary.index(chunk)]))
bits += (2 ** i)
elif ((len(chunk) == 32) and (chunk[0] == 0)):
stripped_chunk = chunk.lstrip(b'\x00')
assert (stripped_chunk != b'')
o.append((bytes([(224 - len(stripped_chunk))]) + stripped_chunk))
bits += (2 ** i)
elif ((len(chunk) == 32) and (chunk[(- 1)] == 0)):
stripped_chunk = chunk.rstrip(b'\x00')
assert (stripped_chunk != b'')
o.append((bytes([(256 - len(stripped_chunk))]) + stripped_chunk))
bits += (2 ** i)
else:
o.append(chunk)
bitfield_bytes = ((len(chunks) + 7) // 8)
prefix = (bytes([bitfield_bytes]) + bits.to_bytes(bitfield_bytes, 'little'))
output = (prefix + b''.join(o))
return (len(output).to_bytes(2, 'little') + output) |
class TestConvTransducer(unittest.TestCase):
def test_kernel_graph(self):
def get_graph(l1, l2, add_skip=False):
g = gtn.Graph()
g.add_node(True)
g.add_node(True)
g.add_node()
g.add_node(False, True)
g.add_node(False, True)
g.add_arc(0, 0, 2)
g.add_arc(0, 1, l1)
g.add_arc(1, 1, l1)
g.add_arc(1, 2, 2)
g.add_arc(2, 2, 2)
g.add_arc(2, 3, l2)
g.add_arc(3, 3, l2)
g.add_arc(3, 4, 2)
g.add_arc(4, 4, 2)
if add_skip:
g.add_arc(1, 3, l2)
return g
graph = transducer.make_kernel_graph([0, 0], 2, True)
gtn.equal(graph, get_graph(0, 0, False))
graph = transducer.make_kernel_graph([0, 1], 2, False)
gtn.equal(graph, get_graph(0, 1, False))
graph = transducer.make_kernel_graph([0, 1], 2, True)
gtn.equal(graph, get_graph(0, 1, True))
def test_fwd(self):
lexicon = [(0, 0), (0, 1), (1, 0), (1, 1)]
blank_idx = 2
kernel_size = 5
stride = 3
convTrans = transducer.ConvTransduce1D(lexicon, kernel_size, stride, blank_idx)
B = 2
C = 3
inputs = torch.randn(B, 0, C)
with self.assertRaises(ValueError):
convTrans(inputs)
for Tin in [1, 2, 3, 4]:
inputs = torch.randn(B, Tin, C)
convTrans(inputs)
Tin = (1, 3, 4, 6, 7, 8)
Tout = (1, 1, 2, 2, 3, 3)
for (Ti, To) in zip(Tin, Tout):
inputs = torch.randn(B, Ti, C)
outputs = convTrans(inputs)
self.assertEqual(outputs.shape, (B, To, len(lexicon)))
def test_bwd(self):
lexicon = [(0, 0), (0, 1), (1, 0), (1, 1)]
blank_idx = 2
kernel_size = 5
stride = 3
convTrans = transducer.ConvTransduce1D(lexicon, kernel_size, stride, blank_idx)
B = 2
C = 3
Tin = (1, 3, 4, 6, 7, 8)
Tout = (1, 1, 2, 2, 3, 3)
for (Ti, To) in zip(Tin, Tout):
inputs = torch.randn(B, Ti, C, requires_grad=True)
outputs = convTrans(inputs)
outputs.backward(torch.ones_like(outputs)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.