code stringlengths 281 23.7M |
|---|
((MAGICK_VERSION_NUMBER < 1802), reason='Convex Hull requires ImageMagick-7.0.10.')
def test_convex_hull(fx_asset):
fpath = str(fx_asset.joinpath('horizon_sunset_border2.jpg'))
with Image(filename=fpath) as img:
points = img.convex_hull(background='black')
assert (len(points) > 0) |
def fortios_firewall(data, fos, check_mode):
fos.do_member_operation('firewall', 'addrgrp')
if data['firewall_addrgrp']:
resp = firewall_addrgrp(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'firewall_addrgrp'))
if check_mode:
return resp
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
class ScrubSummary():
scrubbed_output: str
total_substitutions: int
name_to_num_subs: Dict[(str, int)]
def get_report(self) -> str:
report = 'Scrub Summary\n'
for (name, num_subs) in self.name_to_num_subs.items():
report += f'''
* {name}: {num_subs} substitutions'''
report += f'''
* Total substitutions: {self.total_substitutions}'''
return report |
def extractKapteynstarWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Field():
name: str
content: list[Field]
typename: Optional[str] = None
position: Optional[FieldData] = None
parent: Optional[Field] = None
def empty(cls):
return cls('', None)
def error(cls, err):
return cls('<failed>', err)
def walk_back_to(self, name) -> (Field | None):
found_field = next((field for field in reversed(self.content) if (field.name == name)), None)
if ((not found_field) and self.parent):
found_field = self.parent.walk_back_to(name)
return found_field
def __str__(self) -> str:
return self._pretty_str(indent_level=0)
def _pretty_str(self, indent_level=0):
indent = (indent_level * ' ')
if isinstance(self.content, list):
content = ''.join([('\n' + f._pretty_str((indent_level + 1))) for f in self.content])
content = f'[{content}]'
else:
content = str(self.content)
return f'{indent}Field(name={self.name}, type={self.typename}, position={self.position}, content={content})' |
class PipList(object):
def __init__(self):
self.piplist = {}
self.ppiplist = {}
def get_pip_and_ppip_list_for_tile_type(self, tile_type):
if (tile_type not in self.piplist):
self.piplist[tile_type] = []
fname = os.path.join(os.getenv('FUZDIR'), '..', 'piplist', 'build', 'cmt_top_lower', (tile_type.lower() + '.txt'))
with open(fname, 'r') as f:
for l in f:
(pip, is_directional) = l.strip().split(' ')
(tile, dst, src) = pip.split('.')
if (tile_type == tile):
self.piplist[tile_type].append((src, dst, bool(int(is_directional))))
if (tile_type not in self.ppiplist):
self.ppiplist[tile_type] = []
fname = os.path.join(os.getenv('FUZDIR'), '..', '071-ppips', 'build', (('ppips_' + tile_type.lower()) + '.db'))
with open(fname, 'r') as f:
for l in f:
(pip_data, pip_type) = l.strip().split()
if (pip_type != 'always'):
continue
(tile, dst, src) = pip_data.strip().split('.')
if (tile_type == tile):
self.ppiplist[tile_type].append((src, dst, True))
return (self.piplist[tile_type], self.ppiplist[tile_type]) |
class OptionPlotoptionsArcdiagramSonificationTracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class RelationshipsForTlsActivation(ModelComposed):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'tls_certificate': (RelationshipTlsCertificateTlsCertificate,), 'tls_configuration': (RelationshipTlsConfigurationTlsConfiguration,), 'tls_domain': (RelationshipTlsDomainTlsDomain,)}
_property
def discriminator():
return None
attribute_map = {'tls_certificate': 'tls_certificate', 'tls_configuration': 'tls_configuration', 'tls_domain': 'tls_domain'}
read_only_vars = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [RelationshipTlsCertificate, RelationshipTlsConfiguration, RelationshipTlsDomain], 'oneOf': []} |
def do_target(bait_arr, annotate=None, do_short_names=False, do_split=False, avg_size=(200 / 0.75)):
tgt_arr = bait_arr.copy()
tgt_arr = tgt_arr[(tgt_arr.start != tgt_arr.end)]
if do_split:
logging.info('Splitting large targets')
tgt_arr = tgt_arr.subdivide(avg_size, 0)
if annotate:
logging.info('Applying annotations as target names')
annotation = tabio.read_auto(annotate)
antitarget.compare_chrom_names(tgt_arr, annotation)
tgt_arr['gene'] = annotation.into_ranges(tgt_arr, 'gene', '-')
if do_short_names:
logging.info('Shortening target interval labels')
tgt_arr['gene'] = list(shorten_labels(tgt_arr['gene']))
return tgt_arr |
def extractRookiemtlbltranslationsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def check_configuration(options: Options, pool_contract_address: typing.Optional[str]) -> None:
if (pool_contract_address is not None):
completed_process = subprocess.run(args=[options.executable, '--help'], capture_output=True, check=True, encoding='utf-8')
if ('--contract' not in completed_process.stdout):
raise Exception(f'found madMAx version does not support the `--contract` option for pools.') |
def compute_preflop_lossless_abstraction(builder) -> Dict[(Tuple[(Card, Card)], int)]:
allowed_ranks = {10, 11, 12, 13, 14}
found_ranks = set([c.rank_int for c in builder._cards])
if (found_ranks != allowed_ranks):
raise ValueError(f"Preflop lossless abstraction only works for a short deck with ranks [10, jack, queen, king, ace]. What was specified={found_ranks} doesn't equal what is allowed={allowed_ranks}")
preflop_lossless: Dict[(Tuple[(Card, Card)], int)] = {}
for starting_hand in builder.starting_hands:
starting_hand = sorted(list(starting_hand), key=operator.attrgetter('eval_card'), reverse=True)
preflop_lossless[tuple(starting_hand)] = make_starting_hand_lossless(starting_hand, builder)
return preflop_lossless |
def test_assign_events_on_transitions():
class TrafficLightMachine(StateMachine):
green = State(initial=True)
yellow = State()
red = State()
green.to(yellow, event='cycle slowdown slowdown')
yellow.to(red, event='cycle stop')
red.to(green, event='cycle go')
def on_cycle(self, event_data, event: str):
assert (event_data.event == event)
return f'Running {event} from {event_data.transition.source.id} to {event_data.transition.target.id}'
sm = TrafficLightMachine()
assert (sm.send('cycle') == 'Running cycle from green to yellow')
assert (sm.send('cycle') == 'Running cycle from yellow to red')
assert (sm.send('cycle') == 'Running cycle from red to green') |
def get_filesystem_create_file_mapping(io, metadata, event, details_io, extra_detail_io):
details_io.seek(12, 1)
sync_type = read_u32(details_io)
page_protection = read_u32(details_io)
event.details['SyncType'] = get_filesystem_createfilemapping_synctype(sync_type)
if (page_protection & PageProtection.PAGE_READONLY):
event.details['PageProtection'] = PageProtection.PAGE_READONLY.name
elif (page_protection & PageProtection.PAGE_READWRITE):
event.details['PageProtection'] = PageProtection.PAGE_READWRITE.name
elif (page_protection & PageProtection.PAGE_WRITECOPY):
event.details['PageProtection'] = PageProtection.PAGE_WRITECOPY.name
elif (page_protection & PageProtection.PAGE_EXECUTE):
event.details['PageProtection'] = PageProtection.PAGE_EXECUTE.name
elif (page_protection & PageProtection.PAGE_EXECUTE_READ):
event.details['PageProtection'] = PageProtection.PAGE_EXECUTE_READ.name
elif (page_protection & PageProtection.PAGE_EXECUTE_READWRITE):
event.details['PageProtection'] = PageProtection.PAGE_EXECUTE_READWRITE.name
if ((page_protection & PageProtection.PAGE_NOCACHE) and ('PageProtection' in event.details)):
event.details['PageProtection'] += '|PAGE_NOCACHE' |
def test_import_from_nothing(tmp_path, config):
path = tmp_path.joinpath('exported.yaml')
with _get_data_folder().joinpath('network-config.yaml').open() as fp:
networks = yaml.safe_load(fp)
with path.open('w') as fp:
yaml.dump(networks, fp)
for key in config.networks.keys():
cli_networks._delete(key)
config.networks = {}
cli_networks._import(path.as_posix())
with _get_data_folder().joinpath('network-config.yaml').open() as fp:
networks = yaml.safe_load(fp)
with path.open() as fp:
exported = yaml.safe_load(fp)
assert (networks == exported) |
class OptionPlotoptionsTilemapDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionPlotoptionsTilemapDatalabelsAnimation':
return self._config_sub_data('animation', OptionPlotoptionsTilemapDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(False)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionPlotoptionsTilemapDatalabelsFilter':
return self._config_sub_data('filter', OptionPlotoptionsTilemapDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(True)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(0)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionPlotoptionsTilemapDatalabelsTextpath':
return self._config_sub_data('textPath', OptionPlotoptionsTilemapDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('middle')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
_json
class FedAcctResults():
_federal_accounts: Dict[(FedAccount, FedAccount)] = field(default_factory=dict)
def __getitem__(self, key):
return self._federal_accounts.setdefault(key, key)
def __len__(self):
return len(self._federal_accounts)
def rollup(self):
for row in self._federal_accounts:
for child in row.children:
row.outlay += child.outlay
row.obligation += child.obligation
row.total_budgetary_resources = ((row.total_budgetary_resources + child.total_budgetary_resources) if (child.total_budgetary_resources is not None) else None)
if (child.award_count is not None):
row.award_count += child.award_count
else:
row.award_count = None
def sort(self, field, direction):
for row in self._federal_accounts:
row.children = self.sort_results(row.children, field, direction)
self._federal_accounts = self.sort_results(self._federal_accounts, field, direction)
def slice(self, start, end):
results = []
for (i, fa) in enumerate(self._federal_accounts):
if ((i >= start) and (i < end)):
results.append(fa)
return results
def finalize(self, pagination: Pagination):
self.rollup()
self.sort(pagination.sort_key, pagination.sort_order)
return list((fa.to_dict() for fa in self.slice(pagination.lower_limit, pagination.upper_limit)))
def sort_results(items, field, direction='desc'):
reverse = True
if (direction == 'asc'):
reverse = False
if isinstance(items, list):
return sorted(items, key=(lambda x: (getattr(x, field), getattr(x, 'id'))), reverse=reverse)
else:
return {k: items[k] for k in sorted(items, key=(lambda x: (getattr(x, field), getattr(x, 'id'))), reverse=reverse)} |
class StarRenderer(ShapeStar):
def __init__(self):
ShapeStar.__init__(self, 5, 0.6)
self.size = StarSize.NORMAL
self.n_stars = 5
self.spacing = 1
self.rounded = True
self.rating = 3
self.hints = StarRenderHints.NORMAL
self.pixel_value = None
self._size_map = {StarSize.SMALL: small_em, StarSize.NORMAL: em, StarSize.BIG: big_em, StarSize.PIXEL_VALUE: self.get_pixel_size}
def _get_mangled_keys(self, size):
keys = (((size * self.hints) + StarFillState.FULL), ((size * self.hints) + StarFillState.EMPTY))
return keys
def create_normal_surfaces(self, context, vis_width, vis_height, star_width):
rgba1 = context.get_border_color(Gtk.StateFlags.NORMAL)
rgba0 = context.get_color(Gtk.StateFlags.ACTIVE)
lin = cairo.LinearGradient(0, 0, 0, vis_height)
lin.add_color_stop_rgb(0, rgba0.red, rgba0.green, rgba0.blue)
lin.add_color_stop_rgb(1, rgba1.red, rgba1.green, rgba1.blue)
full_surf = cairo.ImageSurface(cairo.FORMAT_ARGB32, vis_width, vis_height)
cr = cairo.Context(full_surf)
cr.set_source(lin)
cr.set_line_width(1)
if self.rounded:
cr.set_line_join(cairo.LINE_CAP_ROUND)
for i in range(self.n_stars):
x = (1 + (i * (star_width + self.spacing)))
self.layout(cr, (x + 1), 1, (star_width - 2), (vis_height - 2))
cr.stroke_preserve()
cr.fill()
del cr
empty_surf = cairo.ImageSurface(cairo.FORMAT_ARGB32, vis_width, vis_height)
cr = cairo.Context(empty_surf)
cr.set_source(lin)
cr.set_line_width(1)
if self.rounded:
cr.set_line_join(cairo.LINE_CAP_ROUND)
for i in range(self.n_stars):
x = (1 + (i * (star_width + self.spacing)))
self.layout(cr, (x + 1), 1, (star_width - 2), (vis_height - 2))
cr.stroke()
del cr
return (full_surf, empty_surf)
def create_reactive_surfaces(self, context, vis_width, vis_height, star_width):
full_surf = cairo.ImageSurface(cairo.FORMAT_ARGB32, vis_width, vis_height)
cr = cairo.Context(full_surf)
if self.rounded:
cr.set_line_join(cairo.LINE_CAP_ROUND)
for i in range(self.n_stars):
x = (1 + (i * (star_width + self.spacing)))
self.layout(cr, (x + 2), 2, (star_width - 4), (vis_height - 4))
line_color = context.get_border_color(Gtk.StateFlags.NORMAL)
cr.set_source_rgb(line_color.red, line_color.green, line_color.blue)
cr.set_line_width(3)
cr.stroke_preserve()
cr.clip()
context.save()
context.add_class('button')
context.set_state(Gtk.StateFlags.NORMAL)
Gtk.render_background(context, cr, 0, 0, vis_width, vis_height)
context.restore()
for i in range(self.n_stars):
x = (1 + (i * (star_width + self.spacing)))
self.layout(cr, (x + 1.5), 1.5, (star_width - 3), (vis_height - 3))
cr.set_source_rgba(1, 1, 1, 0.8)
cr.set_line_width(1)
cr.stroke()
del cr
empty_surf = cairo.ImageSurface(cairo.FORMAT_ARGB32, vis_width, vis_height)
cr = cairo.Context(empty_surf)
if self.rounded:
cr.set_line_join(cairo.LINE_CAP_ROUND)
line_color = context.get_border_color(Gtk.StateFlags.NORMAL)
cr.set_source_rgb(line_color.red, line_color.green, line_color.blue)
for i in range(self.n_stars):
x = (1 + (i * (star_width + self.spacing)))
self.layout(cr, (x + 2), 2, (star_width - 4), (vis_height - 4))
cr.set_line_width(3)
cr.stroke()
del cr
return (full_surf, empty_surf)
def update_cache_surfaces(self, context, size):
LOG.debug('update cache')
global _star_surface_cache
star_width = vis_height = self._size_map[size]()
vis_width = ((star_width + self.spacing) * self.n_stars)
if (self.hints == StarRenderHints.NORMAL):
surfs = self.create_normal_surfaces(context, vis_width, vis_height, star_width)
elif (self.hints == StarRenderHints.REACTIVE):
surfs = self.create_reactive_surfaces(context, vis_width, vis_height, star_width)
(full_key, empty_key) = self._get_mangled_keys(size)
_star_surface_cache[full_key] = surfs[0]
_star_surface_cache[empty_key] = surfs[1]
return surfs
def lookup_surfaces_for_size(self, size):
(full_key, empty_key) = self._get_mangled_keys(size)
if (full_key not in _star_surface_cache):
return (None, None)
full_surf = _star_surface_cache[full_key]
empty_surf = _star_surface_cache[empty_key]
return (full_surf, empty_surf)
def render_star(self, context, cr, x, y):
size = self.size
(full, empty) = self.lookup_surfaces_for_size(size)
if (full is None):
(full, empty) = self.update_cache_surfaces(context, size)
fraction = (self.rating / self.n_stars)
stars_width = star_height = full.get_width()
full_width = round((fraction * stars_width), 0)
cr.rectangle(x, y, full_width, star_height)
cr.clip()
cr.set_source_surface(full, x, y)
cr.paint()
cr.reset_clip()
if (fraction < 1.0):
empty_width = (stars_width - full_width)
cr.rectangle((x + full_width), y, empty_width, star_height)
cr.clip()
cr.set_source_surface(empty, x, y)
cr.paint()
cr.reset_clip()
def get_pixel_size(self):
return self.pixel_value
def get_visible_size(self, context):
(surf, _) = self.lookup_surfaces_for_size(self.size)
if (surf is None):
(surf, _) = self.update_cache_surfaces(context, self.size)
return (surf.get_width(), surf.get_height()) |
class OptionPlotoptionsDumbbellSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Delay(Op):
__slots__ = ('_delay',)
def __init__(self, delay, source=None):
Op.__init__(self, source)
self._delay = delay
def on_source(self, *args):
loop = get_event_loop()
loop.call_later(self._delay, self.emit, *args)
def on_source_error(self, error):
loop = get_event_loop()
loop.call_later(self._delay, self.error_event.emit, error)
def on_source_done(self, source):
if (self._source is not None):
self._disconnect_from(self._source)
self._source = None
loop = get_event_loop()
loop.call_later(self._delay, self.set_done) |
class SimpleEditor(Editor):
base_style = 0
ok_color = OKColor
evaluate = evaluate_trait
def init(self, parent):
factory = self.factory
style = self.base_style
self.evaluate = factory.evaluate
self.sync_value(factory.evaluate_name, 'evaluate', 'from')
if ((not factory.multi_line) or factory.password):
style &= (~ wx.TE_MULTILINE)
if factory.password:
style |= wx.TE_PASSWORD
multi_line = ((style & wx.TE_MULTILINE) != 0)
if multi_line:
self.scrollable = True
if (factory.enter_set and (not multi_line)):
control = wx.TextCtrl(parent, (- 1), self.str_value, style=(style | wx.TE_PROCESS_ENTER))
parent.Bind(wx.EVT_TEXT_ENTER, self.update_object, id=control.GetId())
else:
control = wx.TextCtrl(parent, (- 1), self.str_value, style=style)
control.Bind(wx.EVT_KILL_FOCUS, self.update_object)
if control.IsSingleLine():
control.SetHint(self.factory.placeholder)
if factory.auto_set:
parent.Bind(wx.EVT_TEXT, self.update_object, id=control.GetId())
self.control = control
self.set_error_state(False)
self.set_tooltip()
def update_object(self, event):
if isinstance(event, wx.FocusEvent):
event.Skip()
if ((not self._no_update) and (self.control is not None)):
try:
self.value = self._get_user_value()
if (self._error is not None):
self._error = None
self.ui.errors -= 1
self.set_error_state(False)
except TraitError as excp:
pass
def update_editor(self):
user_value = self._get_user_value()
try:
unequal = bool((user_value != self.value))
except ValueError:
unequal = True
if unequal:
self._no_update = True
self.control.SetValue(self.str_value)
self._no_update = False
if (self._error is not None):
self._error = None
self.ui.errors -= 1
self.set_error_state(False)
def _get_user_value(self):
value = self.control.GetValue()
try:
value = self.evaluate(value)
except:
pass
try:
ret = self.factory.mapping.get(value, value)
except TypeError:
ret = value
return ret
def error(self, excp):
if (self._error is None):
self._error = True
self.ui.errors += 1
self.set_error_state(True)
def in_error_state(self):
return (self.invalid or self._error) |
class BaseHandler(metaclass=abc.ABCMeta):
_serialize_sync = None
_deserialize_sync = None
def serialize(self, media: object, content_type: str) -> bytes:
if (MEDIA_JSON in content_type):
raise NotImplementedError("The JSON media handler requires the sync interface to be implemented even in ASGI applications, because it's used internally by the Falcon framework.")
else:
raise NotImplementedError()
async def serialize_async(self, media: object, content_type: str) -> bytes:
return self.serialize(media, content_type)
def deserialize(self, stream: IO, content_type: str, content_length: Optional[int]) -> object:
if (MEDIA_JSON in content_type):
raise NotImplementedError("The JSON media handler requires the sync interface to be implemented even in ASGI applications, because it's used internally by the Falcon framework.")
else:
raise NotImplementedError()
async def deserialize_async(self, stream: IO, content_type: str, content_length: Optional[int]) -> object:
data = (await stream.read())
content_length = len(data)
return self.deserialize(io.BytesIO(data), content_type, content_length)
exhaust_stream = False |
def _single_document_split(document: Document, pre_separator: str) -> Iterable[Document]:
page_content = document.page_content
for (i, content) in enumerate(page_content.split(pre_separator)):
metadata = document.metadata.copy()
if ('source' in metadata):
metadata['source'] = ((metadata['source'] + '_pre_split_') + str(i))
(yield Document(page_content=content, metadata=metadata)) |
def get_rarities(is_jp: bool) -> list[int]:
file_data = game_data_getter.get_file_latest('DataLocal', 'unitbuy.csv', is_jp)
if (file_data is None):
helper.error_text('Could not get unitbuy.csv')
return []
data = helper.parse_int_list_list(csv_handler.parse_csv(file_data.decode('utf-8')))
rarity_ids = helper.copy_first_n(data, 13)
return rarity_ids |
class Sensor(Device):
SYSTEM_CLASS_NAME = 'lego-sensor'
SYSTEM_DEVICE_NAME_CONVENTION = 'sensor*'
__slots__ = ['_address', '_command', '_commands', '_decimals', '_driver_name', '_mode', '_modes', '_num_values', '_units', '_value', '_bin_data_format', '_bin_data_size', '_bin_data', '_mode_scale']
def __init__(self, address=None, name_pattern=SYSTEM_DEVICE_NAME_CONVENTION, name_exact=False, **kwargs):
if (address is not None):
kwargs['address'] = address
super(Sensor, self).__init__(self.SYSTEM_CLASS_NAME, name_pattern, name_exact, **kwargs)
self._address = None
self._command = None
self._commands = None
self._decimals = None
self._driver_name = None
self._mode = None
self._modes = None
self._num_values = None
self._units = None
self._value = [None, None, None, None, None, None, None, None]
self._bin_data_format = None
self._bin_data_size = None
self._bin_data = None
self._mode_scale = {}
def _scale(self, mode):
if (mode in self._mode_scale):
scale = self._mode_scale[mode]
else:
scale = (10 ** (- self.decimals))
self._mode_scale[mode] = scale
return scale
def address(self):
(self._address, value) = self.get_attr_string(self._address, 'address')
return value
def command(self):
raise Exception('command is a write-only property!')
def command(self, value):
self._command = self.set_attr_string(self._command, 'command', value)
def commands(self):
(self._commands, value) = self.get_cached_attr_set(self._commands, 'commands')
return value
def decimals(self):
(self._decimals, value) = self.get_attr_int(self._decimals, 'decimals')
return value
def driver_name(self):
(self._driver_name, value) = self.get_cached_attr_string(self._driver_name, 'driver_name')
return value
def mode(self):
(self._mode, value) = self.get_attr_string(self._mode, 'mode')
return value
def mode(self, value):
self._mode = self.set_attr_string(self._mode, 'mode', value)
def modes(self):
(self._modes, value) = self.get_cached_attr_set(self._modes, 'modes')
return value
def num_values(self):
(self._num_values, value) = self.get_attr_int(self._num_values, 'num_values')
return value
def units(self):
(self._units, value) = self.get_attr_string(self._units, 'units')
return value
def value(self, n=0):
n = int(n)
(self._value[n], value) = self.get_attr_int(self._value[n], ('value' + str(n)))
return value
def bin_data_format(self):
(self._bin_data_format, value) = self.get_attr_string(self._bin_data_format, 'bin_data_format')
return value
def bin_data(self, fmt=None):
if (self._bin_data_size is None):
self._bin_data_size = ({'u8': 1, 's8': 1, 'u16': 2, 's16': 2, 's16_be': 2, 's32': 4, 'float': 4}.get(self.bin_data_format, 1) * self.num_values)
if (self._bin_data is None):
self._bin_data = self._attribute_file_open('bin_data')
self._bin_data.seek(0)
raw = bytearray(self._bin_data.read(self._bin_data_size))
if (fmt is None):
return raw
return unpack(fmt, raw)
def _ensure_mode(self, mode):
if (self.mode != mode):
self.mode = mode |
def test_nonintrinsic():
string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)})
file_path = str((test_dir / 'test_nonintrinsic.f90'))
string += write_rpc_notification('textDocument/didOpen', {'textDocument': {'uri': file_path}})
(errcode, results) = run_request(string)
assert (errcode == 0)
assert (results[1]['diagnostics'] == []) |
def main(page: Page):
page.title = 'Drag and Drop example'
def drag_will_accept(e):
e.control.content.border = border.all(2, (colors.BLACK45 if (e.data == 'true') else colors.RED))
e.control.update()
def drag_accept(e: DragTargetAcceptEvent):
src = page.get_control(e.src_id)
e.control.content.bgcolor = src.content.bgcolor
e.control.content.border = None
e.control.update()
def drag_leave(e):
e.control.content.border = None
e.control.update()
page.add(Row([Column([Draggable(group='color', content=Container(width=50, height=50, bgcolor=colors.CYAN, border_radius=5), content_feedback=Container(width=20, height=20, bgcolor=colors.CYAN, border_radius=3)), Draggable(group='color', content=Container(width=50, height=50, bgcolor=colors.YELLOW, border_radius=5)), Draggable(group='color1', content=Container(width=50, height=50, bgcolor=colors.GREEN, border_radius=5))]), Container(width=100), DragTarget(group='color', content=Container(width=50, height=50, bgcolor=colors.BLUE_GREY_100, border_radius=5), on_will_accept=drag_will_accept, on_accept=drag_accept, on_leave=drag_leave)])) |
def parse_longer(tokens: Tokens, options: List[Option], argv: bool=False, more_magic: bool=False) -> List[Pattern]:
current_token = tokens.move()
if ((current_token is None) or (not current_token.startswith('--'))):
raise tokens.error(f'parse_longer got what appears to be an invalid token: {current_token}')
(longer, maybe_eq, maybe_value) = current_token.partition('=')
if (maybe_eq == maybe_value == ''):
value = None
else:
value = maybe_value
similar = [o for o in options if (o.longer and (longer == o.longer))]
start_collision = (len([o for o in options if (o.longer and (longer in o.longer) and o.longer.startswith(longer))]) > 1)
if (argv and (not len(similar)) and (not start_collision)):
similar = [o for o in options if (o.longer and (longer in o.longer) and o.longer.startswith(longer))]
if (more_magic and (not similar)):
corrected = [(longer, o) for o in options if (o.longer and (levenshtein_norm(longer, o.longer) < 0.25))]
if corrected:
print(f'NB: Corrected {corrected[0][0]} to {corrected[0][1].longer}')
similar = [correct for (original, correct) in corrected]
if (len(similar) > 1):
raise tokens.error(f'{longer} is not a unique prefix: {similar}?')
elif (len(similar) < 1):
argcount = (1 if (maybe_eq == '=') else 0)
o = Option(None, longer, argcount)
options.append(o)
if (tokens.error is DocoptExit):
o = Option(None, longer, argcount, (value if argcount else True))
else:
o = Option(similar[0].short, similar[0].longer, similar[0].argcount, similar[0].value)
if (o.argcount == 0):
if (value is not None):
raise tokens.error(('%s must not have an argument' % o.longer))
elif (value is None):
if (tokens.current() in [None, '--']):
raise tokens.error(('%s requires argument' % o.longer))
value = tokens.move()
if (tokens.error is DocoptExit):
o.value = (value if (value is not None) else True)
return [o] |
class LiteSATAIdentify(Module):
def __init__(self, user_port):
self.start = Signal()
self.done = Signal()
self.data_width = user_port.dw
fifo = ResetInserter()(stream.SyncFIFO([('data', 32)], 512, buffered=True))
self.submodules += fifo
self.source = fifo.source
(source, sink) = (user_port.sink, user_port.source)
self.submodules.fsm = fsm = FSM(reset_state='IDLE')
fsm.act('IDLE', self.done.eq(1), If(self.start, NextState('SEND-CMD')))
self.comb += [source.last.eq(1), source.identify.eq(1)]
fsm.act('SEND-CMD', fifo.reset.eq(1), source.valid.eq(1), If((source.valid & source.ready), NextState('WAIT-ACK')))
fsm.act('WAIT-ACK', If((sink.valid & sink.identify), NextState('RECEIVE-DATA')))
self.comb += fifo.sink.data.eq(sink.data)
fsm.act('RECEIVE-DATA', sink.ready.eq(fifo.sink.ready), If(sink.valid, fifo.sink.valid.eq(1), If(sink.last, NextState('IDLE')))) |
def complete_nusing_tasks(document):
filters = {'reference_name': document.name, 'mandatory': 1, 'status': ['not in', ['Completed', 'Cancelled']]}
tasks = frappe.get_all('Nursing Task', filters=filters)
for task_name in tasks:
task = frappe.get_doc('Nursing Task', task_name)
task.status = 'Completed'
task.task_document_name = create_vital_signs(document.patient)
task.save() |
class QueryMethodTask(ITask):
task_id: str = None
workflow_instance: object = None
query_name: str = None
query_input: Payloads = None
exception_thrown: BaseException = None
ret_value: object = None
data_converter: DataConverter = None
def start(self):
logger.debug(f'[query-task-{self.task_id}-{self.query_name}] Created')
self.task = asyncio.get_event_loop().create_task(self.query_main())
async def query_main(self):
logger.debug(f'[query-task-{self.task_id}-{self.query_name}] Running')
current_task.set(self)
if (not (self.query_name in self.workflow_instance._query_methods)):
self.status = Status.DONE
self.exception_thrown = QueryNotFound(self.query_name)
logger.error(f'Query not found: {self.query_name}')
return
query_proc = self.workflow_instance._query_methods[self.query_name]
self.status = Status.RUNNING
try:
logger.info(f'Invoking query {self.query_name}')
if (not self.query_input):
query_input = []
else:
hints = get_fn_args_type_hints(query_proc)
hints.pop(0)
query_input = self.data_converter.from_payloads(self.query_input, hints)
self.ret_value = (await query_proc(self.workflow_instance, *query_input))
logger.info(f'Query {self.query_name} returned {self.ret_value}')
except CancelledError:
logger.debug('Coroutine cancelled (expected)')
except Exception as ex:
logger.error(f'Query {self.query_name} failed', exc_info=1)
self.exception_thrown = ex
finally:
self.status = Status.DONE |
class WindowsRoute(Route):
PROG_ROW = re.compile('^\\s*([^\\s]+)\\s+([^\\s]+)\\s+([^\\s]+)\\s+([^\\s]+)\\s+([^\\s]+)$')
def __init__(self, device, config):
super().__init__(device, config)
self._connector_helper = ConnectorHelper(self._device)
def get_v4_routes(self):
routes = []
lines = self._connector_helper.check_command(['netstat', '-rn'])[0].splitlines()
for line in lines:
if ('Destination' in line):
continue
match = WindowsRoute.PROG_ROW.match(line)
if (not match):
continue
dest = '{}/{}'.format(match.group(1), netaddr.IPAddress(match.group(2)).netmask_bits())
entry = RouteEntry(dest=dest, gway=match.group(3), flags='', iface=match.group(4))
routes.append(entry)
return routes |
class TestBot():
def __init__(self, extra_plugin_dir=None, loglevel=logging.DEBUG, extra_config=None):
self.bot_thread = None
self.setup(extra_plugin_dir=extra_plugin_dir, loglevel=loglevel, extra_config=extra_config)
def setup(self, extra_plugin_dir: Optional[str]=None, loglevel=logging.DEBUG, extra_config=None):
tempdir = mkdtemp()
config = ShallowConfig()
config.__dict__.update(importlib.import_module('errbot.config-template').__dict__)
config.BOT_DATA_DIR = tempdir
config.BOT_LOG_FILE = ((tempdir + sep) + 'log.txt')
config.STORAGE = 'Memory'
if (extra_config is not None):
log.debug('Merging %s to the bot config.', repr(extra_config))
for (k, v) in extra_config.items():
setattr(config, k, v)
logging.basicConfig(format='%(levelname)s:%(message)s')
file = logging.FileHandler(config.BOT_LOG_FILE, encoding='utf-8')
self.logger = logging.getLogger('')
self.logger.setLevel(loglevel)
self.logger.addHandler(file)
config.BOT_EXTRA_PLUGIN_DIR = extra_plugin_dir
config.BOT_LOG_LEVEL = loglevel
self.bot_config = config
def start(self, timeout: int=2) -> None:
if (self.bot_thread is not None):
raise Exception('Bot has already been started')
self._bot = setup_bot('Test', self.logger, self.bot_config)
self.bot_thread = Thread(target=self.bot.serve_forever, name='TestBot main thread', daemon=True)
self.bot_thread.start()
self.bot.push_message('!echo ready')
try:
for i in range(60):
msg = self.bot.pop_message(timeout=timeout)
if (msg == 'ready'):
break
log.warning('Queue was not empty, the non-consumed message is:')
log.warning(msg)
log.warning('Check the previous test and remove spurrious messages.')
except Empty:
raise AssertionError('The "ready" message has not been received (timeout).')
def bot(self) -> ErrBot:
return self._bot
def stop(self) -> None:
if (self.bot_thread is None):
raise Exception('Bot has not yet been started')
self.bot.push_message(QUIT_MESSAGE)
self.bot_thread.join()
reset_app()
log.info('Main bot thread quits')
self.bot.zap_queues()
self.bot.reset_rooms()
self.bot_thread = None
def pop_message(self, timeout: int=5, block: bool=True):
return self.bot.pop_message(timeout, block)
def push_message(self, msg: Message, extras=''):
return self.bot.push_message(msg, extras=extras)
def push_presence(self, presence: Presence):
return self.bot.push_presence(presence)
def exec_command(self, command, timeout: int=5):
self.bot.push_message(command)
return self.bot.pop_message(timeout)
def zap_queues(self):
return self.bot.zap_queues()
def assertInCommand(self, command, response, timeout=5, dedent=False):
if dedent:
command = '\n'.join(textwrap.dedent(command).splitlines()[1:])
self.bot.push_message(command)
msg = self.bot.pop_message(timeout)
assert (response in msg), f'{response} not in {msg}.'
(assertInCommand)
def assertCommand(self, command, response, timeout=5, dedent=False):
pass
def assertCommandFound(self, command, timeout=5):
self.bot.push_message(command)
assert ('not found' not in self.bot.pop_message(timeout))
def inject_mocks(self, plugin_name: str, mock_dict: dict):
plugin = self.bot.plugin_manager.get_plugin_obj_by_name(plugin_name)
if (plugin is None):
raise Exception(f'"{plugin_name}" is not loaded.')
for (field, mock_obj) in mock_dict.items():
if (not hasattr(plugin, field)):
raise ValueError(f'No property/attribute named "{field}" attached.')
setattr(plugin, field, mock_obj) |
def test_explicit_instance_paths(modules_tmpdir):
with pytest.raises(ValueError) as excinfo:
flask.Flask(__name__, instance_path='instance')
assert ('must be absolute' in str(excinfo.value))
app = flask.Flask(__name__, instance_path=str(modules_tmpdir))
assert (app.instance_path == str(modules_tmpdir)) |
class Availability(gh.ObjectType):
phone = gh.Boolean(phone=gh.String(description=''), description='')
name = gh.Boolean(name=gh.String(description=''), description='')
def Field(cls, **kw):
return gh.Field(Availability, resolver=cls.resolve, **kw)
def resolve(root, info):
return Availability()
def resolve_phone(root, info, phone):
if (not models.is_phone_number(phone)):
raise GraphQLError('')
from authext.models import PhoneLogin
return (not PhoneLogin.objects.filter(phone=phone.strip()).exists())
def resolve_name(root, info, name):
if (not models.is_name(name)):
raise GraphQLError('')
return (not models.Player.objects.filter(name=name.strip()).exists()) |
class PerspectiveMenuManager(MenuManager):
groups = List(Group)
id = 'PerspectivesMenu'
name = 'Perspectives'
window = Instance('pyface.workbench.api.WorkbenchWindow')
def _groups_default(self):
groups = [self._create_perspective_group(self.window), self._create_user_perspective_group(self.window), self._create_reset_perspective_group(self.window)]
return groups
('window.perspectives.items')
def rebuild(self, event):
self.destroy()
self.reset_traits(['groups'])
self.changed = True
return
def _create_perspective_group(self, window):
perspectives = window.perspectives[:]
perspectives.sort(key=(lambda x: x.name))
group = Group()
for perspective in perspectives:
group.append(SetActivePerspectiveAction(perspective=perspective, window=window))
return group
def _create_user_perspective_group(self, window):
group = Group(NewUserPerspectiveAction(window=window), SaveAsUserPerspectiveAction(window=window), RenameUserPerspectiveAction(window=window), DeleteUserPerspectiveAction(window=window))
return group
def _create_reset_perspective_group(self, window):
group = Group(ResetActivePerspectiveAction(window=window), ResetAllPerspectivesAction(window=window))
return group |
class Su(Module):
aliases = ['ifconfig']
def init(self):
self.register_info({'author': ['Emilio Pinna'], 'license': 'GPLv3'})
self.register_vectors([ShellCmd('expect -c \'spawn su -c "${command}" "${user}"; expect -re "assword"; send "${ passwd }\r\n"; expect eof;\'', name='sh_expect', postprocess=(lambda x: (re.findall('Password: (?:\r\n)?([\\s\\S]+)', x)[0] if ('Password: ' in x) else ''))), PythonCode('\n import pexpect as p,sys\n c = p.spawn("su ${user} -c ${command}")\n c.expect(".*assword:");c.sendline("${ passwd }")\n i = c.expect([p.EOF,p.TIMEOUT])\n if i!=p.TIMEOUT:\n sys.stdout.write(c.before[3:].decode("utf-8","replace"))\n ', name='pyexpect')])
self.register_arguments([{'name': 'passwd', 'help': "User's password"}, {'name': 'command', 'help': 'Shell command', 'nargs': '+'}, {'name': '-user', 'help': 'User to run the command with', 'default': 'root'}, {'name': '-stderr_redirection', 'default': ' 2>&1'}, {'name': '-vector-sh', 'choices': ('system', 'passthru', 'shell_exec', 'exec', 'popen', 'proc_open', 'perl_system', 'pcntl')}, {'name': '-vector', 'choices': self.vectors.get_names()}])
def setup(self):
args_check = {'user': self.args['user'], 'passwd': self.args['passwd'], 'command': 'whoami'}
(vector_name, result) = self.vectors.find_first_result(names=[self.args.get('vector', '')], format_args=args_check, condition=(lambda result: ((self.session['shell_sh']['status'] == Status.FAIL) or ((self.session['shell_sh']['status'] == Status.RUN) and result and (result.rstrip() == self.args['user'])))))
if ((self.session['shell_sh']['status'] == Status.RUN) and result and (result.rstrip() == self.args['user'])):
self.session['shell_su']['stored_args']['vector'] = vector_name
return Status.RUN
else:
log.warn(messages.module_shell_su.error_su_executing)
return Status.IDLE
def run(self, **kwargs):
self.args['command'] = ' '.join(self.args['command']).replace("'", "\\'")
format_args = {'user': self.args['user'], 'passwd': self.args['passwd'], 'command': self.args['command']}
if self.args.get('vector_sh'):
format_args['vector'] = self.args['vector_sh']
if self.args.get('stderr_redirection'):
format_args['stderr_redirection'] = self.args['stderr_redirection']
return self.vectors.get_result(name=self.args['vector'], format_args=format_args) |
class OAuthProviderCreate(BaseModel):
provider: AvailableOAuthProvider
client_id: str
client_secret: str
scopes: list[str]
name: (str | None) = None
openid_configuration_endpoint: (HttpUrl | None) = None
_validate_openid_provider = model_validator(mode='after')(validate_openid_provider) |
(scope='module', params=['rank_one', 'rank_two'])
def tensor(V, int_type, request):
if (request.param == 'rank_one'):
u = Coefficient(V)
v = TestFunction(V)
elif (request.param == 'rank_two'):
u = TrialFunction(V)
v = TestFunction(V)
else:
raise ValueError(('Not recognized parameter: %s' % request.param))
measure = {'cell': dx, 'interior_facet': dS, 'exterior_facet': ds}
return Tensor((inner(u, v) * measure[int_type])) |
class ToggleViewVisibilityAction(WorkbenchAction):
id = Delegate('view', modify=True)
name = Delegate('view', modify=True)
style = 'toggle'
view = Instance(IView)
def destroy(self):
if (self.view is not None):
self._remove_view_listeners(self.view)
def perform(self, event):
self._toggle_view_visibility(self.view)
return
def _view_changed(self, old, new):
if (old is not None):
self._remove_view_listeners(old)
if (new is not None):
self._add_view_listeners(new)
self._refresh_checked()
return
def _add_view_listeners(self, view):
view.observe(self._refresh_checked, 'visible')
view.observe(self._refresh_checked, 'window')
def _remove_view_listeners(self, view):
view.observe(self._refresh_checked, 'visible', remove=True)
view.observe(self._refresh_checked, 'window', remove=True)
def _refresh_checked(self, event=None):
self.checked = ((self.view is not None) and (self.view.window is not None) and self.view.visible)
def _toggle_view_visibility(self, view):
if view.visible:
view.hide()
else:
view.show()
return |
class AdAssetFeedSpecTitle(AbstractObject):
def __init__(self, api=None):
super(AdAssetFeedSpecTitle, self).__init__()
self._isAdAssetFeedSpecTitle = True
self._api = api
class Field(AbstractObject.Field):
adlabels = 'adlabels'
text = 'text'
url_tags = 'url_tags'
_field_types = {'adlabels': 'list<AdAssetFeedSpecAssetLabel>', 'text': 'string', 'url_tags': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def test_clone_repository(isolated_client, mock_fal_persistent_dirs):
from fal.toolkit.utils.download_utils import FAL_REPOSITORY_DIR
EXAMPLE_REPO_URL = '
EXAMPLE_REPO_FIRST_COMMIT = '64b0a89c8391bd2cb3ca23cdeae01779e11aee05'
EXAMPLE_REPO_SECOND_COMMIT = '34ecbca8cc7b64719d2a5c40dd3272f8d13bc1d2'
expected_path = (FAL_REPOSITORY_DIR / 'isolate')
_client()
def clone_without_commit_hash():
repo_path = clone_repository(EXAMPLE_REPO_URL)
return repo_path
repo_path = clone_without_commit_hash()
assert (str(repo_path) == str(expected_path)), 'Path should be the target location'
_client()
def clone_with_commit_hash():
first_path = clone_repository(EXAMPLE_REPO_URL, commit_hash=EXAMPLE_REPO_FIRST_COMMIT)
first_repo_hash = _get_git_revision_hash(first_path)
second_path = clone_repository(EXAMPLE_REPO_URL, commit_hash=EXAMPLE_REPO_SECOND_COMMIT)
second_repo_hash = _get_git_revision_hash(repo_path)
return (first_path, first_repo_hash, second_path, second_repo_hash)
(first_path, first_repo_hash, second_path, second_repo_hash) = clone_with_commit_hash()
assert (str(expected_path) == str(first_path)), 'Path should be the target location'
assert (str(expected_path) == str(second_path)), 'Path should be the target location'
assert (first_repo_hash == EXAMPLE_REPO_FIRST_COMMIT), 'The commit hash should be the same'
assert (second_repo_hash == EXAMPLE_REPO_SECOND_COMMIT), 'The commit hash should be the same'
_client()
def clone_with_force():
first_path = clone_repository(EXAMPLE_REPO_URL, commit_hash=EXAMPLE_REPO_FIRST_COMMIT, force=False)
first_repo_stat = first_path.stat()
second_path = clone_repository(EXAMPLE_REPO_URL, commit_hash=EXAMPLE_REPO_FIRST_COMMIT, force=False)
second_repo_stat = second_path.stat()
third_path = clone_repository(EXAMPLE_REPO_URL, commit_hash=EXAMPLE_REPO_FIRST_COMMIT, force=True)
third_repo_stat = third_path.stat()
return (first_path, first_repo_stat, second_path, second_repo_stat, third_path, third_repo_stat)
(first_path, first_repo_stat, second_path, second_repo_stat, third_path, third_repo_stat) = clone_with_force()
assert (str(expected_path) == str(first_path)), 'Path should be the target location'
assert (str(expected_path) == str(second_path)), 'Path should be the target location'
assert (str(expected_path) == str(third_path)), 'Path should be the target location'
assert (first_repo_stat.st_mtime == second_repo_stat.st_mtime), 'The repository should not be cloned again'
assert (first_repo_stat.st_mtime < third_repo_stat.st_mtime), 'The repository should be cloned again with force=True' |
class CmdWield(_BaseTwitchCombatCommand):
key = 'wield'
help_category = 'combat'
def parse(self):
if (not self.args):
self.msg('What do you want to wield?')
raise InterruptCommand()
super().parse()
def func(self):
item = self.caller.search(self.args, candidates=self.caller.equipment.get_wieldable_objects_from_backpack())
if (not item):
self.msg('(You must carry the item to wield it.)')
return
combathandler = self.get_or_create_combathandler()
combathandler.queue_action({'key': 'wield', 'item': item, 'dt': 3})
combathandler.msg(f'$You() reach for {item.get_display_name(self.caller)}!', self.caller) |
_required
_required
_required('VMS_VM_BACKUP_ENABLED')
def backup_list(request, hostname):
vm = get_vm(request, hostname)
context = {'vm': vm, 'can_edit': request.user.is_admin(request), 'lastbkp': [], 'last_bkpid': request.GET.get('last_snapid', None), 'bkpdefs': True}
context.update(get_vm_backups(request, vm))
return render(request, 'gui/vm/backup_list.html', context) |
def test_multi_categorical_entropy():
logits = torch.from_numpy(np.random.randn(8))
dist = MultiCategoricalProbabilityDistribution(logits=logits, action_space=spaces.MultiDiscrete((3, 5)), temperature=1.0)
assert (dist.entropy().numpy().ndim == 0)
logits = torch.from_numpy(np.random.randn(100, 8))
dist = MultiCategoricalProbabilityDistribution(logits=logits, action_space=spaces.MultiDiscrete((3, 5)), temperature=1.0)
assert (dist.entropy().numpy().ndim == 1)
assert (dist.entropy().numpy().shape == (100,))
logits = torch.from_numpy(np.random.randn(100, 8, 8))
dist = MultiCategoricalProbabilityDistribution(logits=logits, action_space=spaces.MultiDiscrete((3, 5)), temperature=1.0)
assert (dist.entropy().numpy().ndim == 2)
assert (dist.entropy().numpy().shape == (100, 8)) |
class Bug(Base):
__tablename__ = 'bugs'
__exclude_columns__ = ('id', 'updates')
__get_by__ = ('bug_id',)
bug_id = Column(Integer, unique=True)
title = Column(Unicode(255))
security = Column(Boolean, default=False)
parent = Column(Boolean, default=False)
updates = relationship('Update', secondary=update_bug_table, back_populates='bugs')
feedback = relationship('BugKarma', back_populates='bug')
def url(self) -> str:
return (config['buglink'] % self.bug_id)
def update_details(self, bug: typing.Optional['bugzilla.bug.Bug']=None) -> None:
bugs.bugtracker.update_details(bug, self)
def default_message(self, update: Update) -> str:
install_msg = (f'''Soon you'll be able to install the update with the following command:
`{update.install_command}`''' if update.install_command else '')
msg_data = {'update_title': update.get_title(delim=', ', nvr=True), 'update_beauty_title': update.get_title(beautify=True, nvr=True), 'update_alias': update.alias, 'repo': f'{update.release.long_name} {update.status.description}', 'install_instructions': install_msg, 'update_url': update.abs_url()}
if (update.status is UpdateStatus.stable):
message = config['stable_bug_msg'].format(**msg_data)
elif (update.status is UpdateStatus.testing):
if (update.release.id_prefix == 'FEDORA-EPEL'):
if ('testing_bug_epel_msg' in config):
template = config['testing_bug_epel_msg']
else:
template = config['testing_bug_msg']
log.warning("No 'testing_bug_epel_msg' found in the config.")
else:
template = config['testing_bug_msg']
message = template.format(**msg_data)
else:
raise ValueError(f'Trying to post a default comment to a bug, but {update.alias} is not in Stable or Testing status.')
return message
def add_comment(self, update: Update, comment: typing.Optional[str]=None) -> None:
if ((update.type is UpdateType.security) and self.parent and (update.status is not UpdateStatus.stable)):
log.debug('Not commenting on parent security bug %s', self.bug_id)
else:
if (not comment):
comment = self.default_message(update)
log.debug(('Adding comment to Bug #%d: %s' % (self.bug_id, comment)))
bugs.bugtracker.comment(self.bug_id, comment)
def testing(self, update: Update) -> None:
if ((update.type is UpdateType.security) and self.parent):
log.debug('Not modifying parent security bug %s', self.bug_id)
else:
comment = self.default_message(update)
bugs.bugtracker.on_qa(self.bug_id, comment)
def close_bug(self, update: Update) -> None:
versions = dict([(b.nvr_name, b.nvr) for b in update.builds])
bugs.bugtracker.close(self.bug_id, versions=versions, comment=self.default_message(update))
def modified(self, update: Update, comment: str) -> None:
if ((update.type is UpdateType.security) and self.parent):
log.debug('Not modifying parent security bug %s', self.bug_id)
else:
bugs.bugtracker.modified(self.bug_id, comment) |
def test_get_polygons_skipname(string_to_well):
well_definition = '1.01\n Unknown\n custom_name 0 0 0\n 1\n Zonelog DISC 1 zone1 2 zone2 3 zone3\n 1 1 1 1'
well = string_to_well(well_definition)
polygons = well.get_polygons(skipname=True)
assert ('NAME' not in polygons.dataframe.columns)
assert (polygons.name == 'custom_name') |
def test_overriding_resources():
config = '\ndeployment:\n enabled: true\ndaemonset:\n resources:\n limits:\n cpu: "25m"\n memory: "128Mi"\n requests:\n cpu: "25m"\n memory: "128Mi"\n'
r = helm_template(config)
assert (r['daemonset'][name]['spec']['template']['spec']['containers'][0]['resources'] == {'requests': {'cpu': '25m', 'memory': '128Mi'}, 'limits': {'cpu': '25m', 'memory': '128Mi'}})
assert (r['deployment'][name]['spec']['template']['spec']['containers'][0]['resources'] == {'requests': {'cpu': '100m', 'memory': '100Mi'}, 'limits': {'cpu': '1000m', 'memory': '200Mi'}})
config = '\ndeployment:\n enabled: true\n resources:\n limits:\n cpu: "25m"\n memory: "128Mi"\n requests:\n cpu: "25m"\n memory: "128Mi"\n'
r = helm_template(config)
assert (r['daemonset'][name]['spec']['template']['spec']['containers'][0]['resources'] == {'requests': {'cpu': '100m', 'memory': '100Mi'}, 'limits': {'cpu': '1000m', 'memory': '200Mi'}})
assert (r['deployment'][name]['spec']['template']['spec']['containers'][0]['resources'] == {'requests': {'cpu': '25m', 'memory': '128Mi'}, 'limits': {'cpu': '25m', 'memory': '128Mi'}}) |
class OptionSeriesColumnLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
.register_type(TableDump2MrtRecord.SUBTYPE_PEER_INDEX_TABLE)
class TableDump2PeerIndexTableMrtMessage(TableDump2MrtMessage):
_HEADER_FMT = '!4sH'
HEADER_SIZE = struct.calcsize(_HEADER_FMT)
_PEER_COUNT_FMT = '!H'
PEER_COUNT_SIZE = struct.calcsize(_PEER_COUNT_FMT)
_TYPE = {'ascii': ['bgp_id']}
def __init__(self, bgp_id, peer_entries, view_name='', view_name_len=None, peer_count=None):
self.bgp_id = bgp_id
assert isinstance(peer_entries, (list, tuple))
for p in peer_entries:
assert isinstance(p, MrtPeer)
self.peer_entries = peer_entries
assert isinstance(view_name, str)
self.view_name = view_name
self.view_name_len = view_name_len
self.peer_count = peer_count
def parse(cls, buf):
(bgp_id, view_name_len) = struct.unpack_from(cls._HEADER_FMT, buf)
bgp_id = addrconv.ipv4.bin_to_text(bgp_id)
offset = cls.HEADER_SIZE
(view_name,) = struct.unpack_from(('!%ds' % view_name_len), buf, offset)
view_name = str(view_name.decode('utf-8'))
offset += view_name_len
(peer_count,) = struct.unpack_from(cls._PEER_COUNT_FMT, buf, offset)
offset += cls.PEER_COUNT_SIZE
rest = buf[offset:]
peer_entries = []
for i in range(peer_count):
(p, rest) = MrtPeer.parse(rest)
peer_entries.insert(i, p)
return cls(bgp_id, peer_entries, view_name, view_name_len, peer_count)
def serialize(self):
view_name = self.view_name.encode('utf-8')
self.view_name_len = len(view_name)
self.peer_count = len(self.peer_entries)
buf = (struct.pack(self._HEADER_FMT, addrconv.ipv4.text_to_bin(self.bgp_id), self.view_name_len) + view_name)
buf += struct.pack(self._PEER_COUNT_FMT, self.peer_count)
for p in self.peer_entries:
buf += p.serialize()
return buf |
class TestELFSHA256(BaseTestCase):
ELF = 'esp32-app-cust-ver-info.elf'
SHA_OFFS = 176
BIN = 'esp32-app-cust-ver-info.bin'
def test_binary_patched(self):
try:
self.run_elf2image('esp32', self.ELF, extra_args=['--elf-sha256-offset', f'{self.SHA_OFFS:#x}'])
image = esptool.bin_image.LoadFirmwareImage('esp32', self.BIN)
rodata_segment = image.segments[0]
bin_sha256 = rodata_segment.data[(self.SHA_OFFS - 32):((self.SHA_OFFS - 32) + 32)]
with open(self.ELF, 'rb') as f:
elf_computed_sha256 = hashlib.sha256(f.read()).digest()
with open(self.BIN, 'rb') as f:
f.seek(self.SHA_OFFS)
bin_sha256_raw = f.read(len(elf_computed_sha256))
assert (elf_computed_sha256 == bin_sha256)
assert (elf_computed_sha256 == bin_sha256_raw)
finally:
try_delete(self.BIN)
def test_no_overwrite_data(self, capsys):
with pytest.raises(subprocess.CalledProcessError):
self.run_elf2image('esp32', 'esp32-bootloader.elf', extra_args=['--elf-sha256-offset', '0xb0'])
output = capsys.readouterr().out
assert ('SHA256' in output)
assert ('zero' in output) |
class OptionSeriesVennSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_defaults()
class SpeakerImageSizeSchema(Schema):
class Meta():
type_ = 'speaker-image-size'
self_view = 'v1.speaker_image_size_detail'
self_view_kwargs = {'id': '<id>'}
inflect = dasherize
id = fields.Str(dump_only=True)
type = fields.Str(allow_none=True)
small_size_width_height = fields.Integer(validate=(lambda n: (n >= 0)), allow_none=True)
small_size_quality = fields.Integer(validate=(lambda n: (0 <= n <= 100)), allow_none=True)
thumbnail_size_width_height = fields.Integer(validate=(lambda n: (n >= 0)), allow_none=True)
thumbnail_size_quality = fields.Integer(validate=(lambda n: (0 <= n <= 100)), allow_none=True)
icon_size_width_height = fields.Integer(validate=(lambda n: (n >= 0)), allow_none=True)
icon_size_quality = fields.Integer(validate=(lambda n: (0 <= n <= 100)), allow_none=True) |
class Command(BaseCommand):
help = 'Generate test data for a development environment.'
def handle(self, *args, **options):
Faker.seed(1)
SuperUserFactory()
UserFactory(username='pixel', first_name='Pixel', last_name='McPixelston')
EmailtemplateFactory()
user = UserFactory(username='embassysfadmin', first_name='SF', last_name='Admin')
location = LocationFactory(slug='embassysf', name='Embassy SF', house_admins=[user])
ResourceFactory(location=location, name='Batcave')
ResourceFactory(location=location, name='Ada Lovelace')
location = LocationFactory(slug='amsterdam', name='Embassy Amsterdam')
ResourceFactory(location=location, name='Some room')
location = LocationFactory(slug='redvic', name='The Red Victorian')
ResourceFactory(location=location, name='Another room')
self.stdout.write(self.style.SUCCESS('Successfully generated testdata')) |
class OptionPlotoptionsHistogramTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionPlotoptionsHistogramTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionPlotoptionsHistogramTooltipDatetimelabelformats)
def distance(self):
return self._config_get(6)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get('')
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get('<span style="font-size: 0.8em">{point.x} - {point.x2}</span><br/><span style="color:{point.color}"></span> {series.name} <b>{point.y}</b><br/>')
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
def async_method(method: Callable[(..., TReturn)]) -> Callable[(..., Coroutine[(Any, Any, TReturn)])]:
(method)
async def wrapper(cls_or_self: Any, *args: Any, **kwargs: Any) -> TReturn:
cls_method = getattr(cls_or_self, method.__name__)
loop = asyncio.get_event_loop()
return (await loop.run_in_executor(None, functools.partial(cls_method, **kwargs), *args))
return wrapper |
def check_nyanko_signature_v1(signature: str, data: str, inquiry_code: str) -> bool:
data += data
curr_hmac_data = signature[40:]
curr_random_data = signature[:40]
curr_input_rand = (inquiry_code.encode('utf-8') + curr_random_data.encode('utf-8'))
hmac_data = hmac.new(curr_input_rand, data.encode('utf-8'), digestmod=hashlib.sha1).hexdigest()
return (hmac_data == curr_hmac_data) |
def change_to_auth_mandatory(fledge_url, wait_time):
time.sleep(wait_time)
conn =
conn.request('PUT', '/fledge/category/rest_api', json.dumps({'authentication': 'mandatory'}))
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert ('mandatory' == jdoc['authentication']['value'])
conn.request('PUT', '/fledge/restart', json.dumps({}))
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert ('Fledge restart has been scheduled.' == jdoc['message']) |
class OptionSeriesNetworkgraphSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.parametrize('monkeypatch_resolver', [True, False])
def test_json_err_no_handler(asgi, monkeypatch_resolver):
app = create_app(asgi)
handlers = media.Handlers({falcon.MEDIA_URLENCODED: media.URLEncodedFormHandler()})
if monkeypatch_resolver:
def _resolve(media_type, default, raise_not_found=True):
with pytest.warns(DeprecatedWarning, match='This undocumented method'):
h = handlers.find_by_media_type(media_type, default, raise_not_found=raise_not_found)
return (h, None, None)
handlers._resolve = _resolve
app.req_options.media_handlers = handlers
app.resp_options.media_handlers = handlers
class Resource():
def on_get(self, req, resp):
raise falcon.HTTPForbidden()
app.add_route('/', Resource())
result = testing.simulate_get(app, '/')
assert (result.status_code == 403)
assert (result.json == falcon.HTTPForbidden().to_dict()) |
.gui()
.skipif((sys.platform != 'linux'), reason='Linux specific test')
def test_synchronized_capture_triggers_request_error(monkeypatch, dbus_portal):
def _mocked_interface_call(*args):
return dbus_portal.QtDBus.QDBusMessage()
monkeypatch.setattr(dbus_portal.QtDBus.QDBusInterface, 'call', _mocked_interface_call)
with pytest.raises(ScreenshotRequestError):
_ = dbus_portal._synchronized_capture(interactive=False) |
def getModulesForPid(pid):
hProcess = open_process(pid)
hModules = enum_process_modules(hProcess)
modules = {}
modules[pid] = set()
for hModule in hModules:
modules[pid].add((getModuleFileName(hProcess, hModule), getModuleInformation(hProcess, hModule)))
return modules |
class OptionPlotoptionsParetoSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_trapping_of_bad_request_key_errors(app, client):
('/key')
def fail():
flask.request.form['missing_key']
('/abort')
def allow_abort():
flask.abort(400)
rv = client.get('/key')
assert (rv.status_code == 400)
assert (b'missing_key' not in rv.data)
rv = client.get('/abort')
assert (rv.status_code == 400)
app.debug = True
with pytest.raises(KeyError) as e:
client.get('/key')
assert e.errisinstance(BadRequest)
assert ('missing_key' in e.value.get_description())
rv = client.get('/abort')
assert (rv.status_code == 400)
app.debug = False
app.config['TRAP_BAD_REQUEST_ERRORS'] = True
with pytest.raises(KeyError):
client.get('/key')
with pytest.raises(BadRequest):
client.get('/abort') |
def test_search_cache_insert(frontend_editing_db, frontend_db):
result = frontend_db.get_query_from_cache(RULE_UID)
assert (result is None)
result = frontend_editing_db.add_to_search_query_cache('{"foo": "bar"}', 'rule foo{}')
assert (result == RULE_UID)
result = frontend_db.get_query_from_cache(RULE_UID)
assert isinstance(result, CachedQuery)
assert (result.query == '{"foo": "bar"}')
assert (result.yara_rule == 'rule foo{}') |
class TestScaffoldSkillFailsWhenSkillAlreadyExists():
def setup_class(cls):
cls.runner = CliRunner()
cls.agent_name = 'myagent'
cls.resource_name = 'myresource'
cls.cwd = os.getcwd()
cls.t = tempfile.mkdtemp()
dir_path = Path('packages')
tmp_dir = (cls.t / dir_path)
src_dir = (cls.cwd / Path(ROOT_DIR, dir_path))
shutil.copytree(str(src_dir), str(tmp_dir))
os.chdir(cls.t)
result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'init', '--author', AUTHOR])
assert (result.exit_code == 0)
result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'create', '--local', cls.agent_name], standalone_mode=False)
assert (result.exit_code == 0)
os.chdir(cls.agent_name)
result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'scaffold', 'skill', cls.resource_name], standalone_mode=False)
assert (result.exit_code == 0)
cls.result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'scaffold', 'skill', cls.resource_name], standalone_mode=False)
def test_exit_code_equal_to_1(self):
assert (self.result.exit_code == 1)
def test_error_message_skill_already_existing(self):
s = "A skill with name '{}' already exists. Aborting...".format(self.resource_name)
assert (self.result.exception.message == s)
def test_resource_directory_exists(self):
assert Path(self.t, self.agent_name, 'skills', self.resource_name).exists()
def teardown_class(cls):
os.chdir(cls.cwd)
try:
shutil.rmtree(cls.t)
except (OSError, IOError):
pass |
def triplets_in_range(start, end):
for limit in range(4, (end + 1), 4):
for (x_pos, y_pos, z_pos) in primitive_triplets(limit):
alpha = x_pos
beta = y_pos
gamma = z_pos
while (alpha < start):
alpha = (alpha + x_pos)
beta = (beta + y_pos)
gamma = (gamma + z_pos)
while (gamma <= end):
(yield [alpha, beta, gamma])
alpha = (alpha + x_pos)
beta = (beta + y_pos)
gamma = (gamma + z_pos) |
def process_range(setting_info, value):
input_range = list(range(setting_info['input_range'][0], (setting_info['input_range'][1] + 1), setting_info['input_range'][2]))
output_range = list(custom_range(setting_info['output_range'][0], (setting_info['output_range'][1] + 1), setting_info['output_range'][2]))
if (len(input_range) != len(output_range)):
raise ValueError('Input range and output range must have the same length')
matched_value = matches_value_in_range(setting_info['input_range'][0], setting_info['input_range'][1], setting_info['input_range'][2], int(value))
return output_range[input_range.index(matched_value)] |
(name=CHANGES)
def validate_changes(changes):
for (k, v) in changes.items():
if (k not in (MULTI_EXIT_DISC, ENABLED, CONNECT_MODE)):
raise ConfigValueError(desc=('Unknown field to change: %s' % k))
if (k == MULTI_EXIT_DISC):
validate_med(v)
elif (k == ENABLED):
validate_enabled(v)
elif (k == CONNECT_MODE):
validate_connect_mode(v)
return changes |
class QGradientControl(QtGui.QWidget):
def __init__(self, parent=None, gradient_table=None, width=100, height=100):
super(QGradientControl, self).__init__(parent=parent)
self.resize(width, height)
self.setAttribute(QtCore.Qt.WA_OpaquePaintEvent, True)
self.width = width
self.height = height
self.gradient_table = gradient_table
assert (gradient_table.size == width)
self.setMinimumSize(100, 50)
def paintEvent(self, event):
super(QGradientControl, self).paintEvent(event)
painter = QtGui.QPainter(self)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
painter.setBrush(brush)
painter.setBackgroundMode(QtCore.Qt.OpaqueMode)
sz = self.size()
(width, height) = (sz.width(), sz.height())
xform = self.gradient_table.scaling_function
start_y = 0
end_y = height
if xform:
start_y = (height / 2)
color = QtGui.QColor()
for x in range(width):
(r, g, b, a) = self.gradient_table.get_pos_rgba_color_lerped((float(x) / (width - 1)))
color.setRgb(int((255 * r)), int((255 * g)), int((255 * b)))
painter.setPen(color)
brush.setColor(color)
painter.drawLine(x, start_y, x, end_y)
if xform:
end_y = start_y
start_y = 0
for x in range(width):
f = (float(x) / (width - 1))
(r, g, b, a) = self.gradient_table.get_pos_rgba_color_lerped(xform(f))
color.set(int((255 * r)), int((255 * g)), int((255 * b)))
brush.setColor(color)
painter.drawLine(x, start_y, x, end_y) |
class OptionSeriesSolidgaugeSonificationDefaultinstrumentoptionsMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
('ecs_deploy.cli.get_client')
def test_run_task_without_diff(get_client, runner):
get_client.return_value = EcsTestClient('acces_key', 'secret_key')
result = runner.invoke(cli.run, (CLUSTER_NAME, 'test-task', '2', '-e', 'application', 'foo', 'bar', '--no-diff'))
assert (not result.exception)
assert (result.exit_code == 0)
assert (u'Using task definition: test-task' not in result.output)
assert (u'Changed environment' not in result.output)
assert (u'Successfully started 2 instances of task: test-task:2' in result.output)
assert (u'- arn:foo:bar' in result.output)
assert (u'- arn:lorem:ipsum' in result.output) |
_toolkit([ToolkitName.qt])
class TestInteractions(unittest.TestCase):
def test_mouse_click(self):
button = QtGui.QPushButton()
click_slot = mock.Mock()
button.clicked.connect((lambda checked: click_slot(checked)))
_interaction_helpers.mouse_click_qwidget(button, 0)
self.assertEqual(click_slot.call_count, 1)
def test_mouse_click_disabled(self):
button = QtGui.QPushButton()
button.setEnabled(False)
click_slot = mock.Mock()
button.clicked.connect((lambda checked: click_slot(checked)))
_interaction_helpers.mouse_click_qwidget(button, 0)
self.assertEqual(click_slot.call_count, 0)
def test_mouse_click_combobox_warns(self):
combo = None
with self.assertWarns(UserWarning):
_interaction_helpers.mouse_click_combobox(combo, 0, 0)
def test_key_sequence(self):
textboxes = [QtGui.QLineEdit(), QtGui.QTextEdit()]
for (i, textbox) in enumerate(textboxes):
with self.subTest(widget=textbox.__class__.__name__):
change_slot = mock.Mock()
textbox.textChanged.connect((lambda *args: change_slot(*args)))
_interaction_helpers.key_sequence_qwidget(textbox, command.KeySequence('abc'), 0)
if (i == 0):
self.assertEqual(textbox.text(), 'abc')
else:
self.assertEqual(textbox.toPlainText(), 'abc')
self.assertEqual(change_slot.call_count, 3)
textbox = QtGui.QLabel()
_interaction_helpers.key_sequence_qwidget(textbox, command.KeySequence('abc'), 0)
self.assertEqual(textbox.text(), '')
def test_key_sequence_textbox_with_unicode(self):
for code in range(32, 127):
with self.subTest(code=code, word=chr(code)):
textbox = QtGui.QLineEdit()
change_slot = mock.Mock()
textbox.textChanged.connect((lambda text: change_slot(text)))
_interaction_helpers.key_sequence_textbox(textbox, command.KeySequence((chr(code) * 3)), delay=0)
self.assertEqual(textbox.text(), (chr(code) * 3))
self.assertEqual(change_slot.call_count, 3)
def test_key_sequence_unsupported_key(self):
textbox = QtGui.QLineEdit()
with self.assertRaises(ValueError) as exception_context:
_interaction_helpers.key_sequence_textbox(textbox, command.KeySequence(chr(31)), delay=0)
self.assertIn('is currently not supported.', str(exception_context.exception))
def test_key_sequence_backspace_character(self):
textbox = QtGui.QLineEdit()
with self.assertRaises(ValueError) as exception_context:
_interaction_helpers.key_sequence_textbox(textbox, command.KeySequence('\x08'), delay=0)
self.assertIn('is currently not supported.', str(exception_context.exception))
def test_key_sequence_insert_point_qlineedit(self):
textbox = QtGui.QLineEdit()
textbox.setText('123')
_interaction_helpers.key_sequence_textbox(textbox, command.KeySequence('abc'), delay=0)
self.assertEqual(textbox.text(), '123abc')
def test_key_sequence_insert_point_qtextedit(self):
textbox = QtGui.QTextEdit()
textbox.setText('123')
_interaction_helpers.key_sequence_textbox(textbox, command.KeySequence('abc'), delay=0)
self.assertEqual(textbox.toPlainText(), '123abc')
def test_key_sequence_disabled(self):
textbox = QtGui.QLineEdit()
textbox.setEnabled(False)
with self.assertRaises(Disabled):
_interaction_helpers.key_sequence_qwidget(textbox, command.KeySequence('abc'), 0)
def test_key_click(self):
textbox = QtGui.QLineEdit()
change_slot = mock.Mock()
textbox.editingFinished.connect((lambda : change_slot()))
_interaction_helpers.key_sequence_qwidget(textbox, command.KeySequence('abc'), 0)
self.assertEqual(change_slot.call_count, 0)
_interaction_helpers.key_click_qwidget(textbox, command.KeyClick('Enter'), 0)
self.assertEqual(change_slot.call_count, 1)
textbox = QtGui.QTextEdit()
change_slot = mock.Mock()
textbox.textChanged.connect((lambda : change_slot()))
_interaction_helpers.key_click_qwidget(textbox, command.KeyClick('Enter'), 0)
change_slot.assert_called()
self.assertEqual(textbox.toPlainText(), '\n')
textbox = QtGui.QLabel()
_interaction_helpers.key_click_qwidget(textbox, command.KeyClick('A'), 0)
self.assertEqual(textbox.text(), '')
def test_key_click_disabled(self):
textbox = QtGui.QLineEdit()
textbox.setEnabled(False)
change_slot = mock.Mock()
textbox.editingFinished.connect((lambda text: change_slot(text)))
with self.assertRaises(Disabled):
_interaction_helpers.key_click_qwidget(textbox, command.KeyClick('Enter'), 0)
self.assertEqual(change_slot.call_count, 0)
def test_check_q_model_index_valid(self):
self.widget = QtGui.QListWidget()
self.items = ['a', 'b', 'c']
self.widget.addItems(self.items)
self.good_q_index = self.widget.model().index(1, 0)
self.bad_q_index = self.widget.model().index(10, 0)
self.model = self.widget.model()
_interaction_helpers.check_q_model_index_valid(self.good_q_index)
with self.assertRaises(LookupError):
_interaction_helpers.check_q_model_index_valid(self.bad_q_index)
def test_key_click_q_slider_helpful_err(self):
slider = QtGui.QSlider()
with self.assertRaises(ValueError) as exc:
_interaction_helpers.key_click_qslider(slider, command.KeyClick('Enter'), 0)
self.assertIn("['Down', 'Left', 'Page Down', 'Page Up', 'Right', 'Up']", str(exc.exception)) |
def edit_rare_gacha_seed(save_stats: dict[(str, Any)]) -> dict[(str, Any)]:
rare_gacha_seed = item.IntItem(name='Rare Gacha Seed', value=item.Int(save_stats['rare_gacha_seed']['Value'], signed=False), max_value=None)
rare_gacha_seed.edit()
save_stats['rare_gacha_seed']['Value'] = rare_gacha_seed.get_value()
return save_stats |
class OptionSeriesBulletSonificationTracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def upgrade():
op.execute('alter type connectiontype rename to connectiontype_old')
op.execute("create type connectiontype as enum('postgres', 'mongodb', 'mysql', ' 'snowflake', 'redshift', 'mssql', 'mariadb')")
op.execute('alter table connectionconfig alter column connection_type type connectiontype using connection_type::text::connectiontype')
op.execute('drop type connectiontype_old') |
class UnhidePost(MethodView):
decorators = [login_required]
def post(self, post_id):
post = Post.query.filter((Post.id == post_id)).first_or_404()
if (not Permission(Has('makehidden'), IsAtleastModeratorInForum(forum=post.topic.forum))):
flash(_('You do not have permission to unhide this post'), 'danger')
return redirect(post.topic.url)
if (not post.hidden):
flash(_('Post is already unhidden'), 'warning')
redirect(post.topic.url)
post.unhide()
post.save()
flash(_('Post unhidden'), 'success')
return redirect(post.topic.url) |
def test_balance_serialization():
msg = LedgerApiMessage(message_id=2, target=1, performative=LedgerApiMessage.Performative.BALANCE, ledger_id='some_ledger_id', balance=125)
msg.to = 'receiver'
envelope = Envelope(to=msg.to, sender='sender', message=msg)
envelope_bytes = envelope.encode()
actual_envelope = Envelope.decode(envelope_bytes)
expected_envelope = envelope
assert (expected_envelope.to == actual_envelope.to)
assert (expected_envelope.sender == actual_envelope.sender)
assert (expected_envelope.protocol_specification_id == actual_envelope.protocol_specification_id)
assert (expected_envelope.message != actual_envelope.message)
actual_msg = LedgerApiMessage.serializer.decode(actual_envelope.message)
actual_msg.to = actual_envelope.to
actual_msg.sender = actual_envelope.sender
expected_msg = msg
assert (expected_msg == actual_msg) |
def test_pin_assets_to_dummy_backend(dummy_ipfs_backend, ethpm_spec_dir, owned_manifest_path):
backend = get_ipfs_backend()
hashes = backend.pin_assets(owned_manifest_path)
asset_data = hashes[0]
assert (asset_data['Name'] == 'v3.json')
assert (asset_data['Hash'] == 'QmcxvhkJJVpbxEAa6cgW3B6XwPJb79w9GpNUv2P2THUzZR')
assert (asset_data['Size'] == '478')
dir_data = backend.pin_assets((((ethpm_spec_dir / 'examples') / 'standard-token') / 'contracts'))
dir_names = [result['Name'] for result in dir_data]
dir_hashes = [result['Hash'] for result in dir_data]
dir_sizes = [result['Size'] for result in dir_data]
assert (len(dir_data) == 2)
assert ('StandardToken.sol' in dir_names)
assert ('QmUofKBtNJVaqoSAtnHfrarJyyLm1oMUTAK4yCtnmYMJVy' in dir_hashes)
assert ('2949' in dir_sizes) |
class OptionPlotoptionsDumbbellDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def setup_logging(logger_name: str, logging_level=None, logger_filename: str=None):
if (not logging_level):
logging_level = _get_logging_level()
logger = _build_logger(logger_name, logging_level, logger_filename)
try:
import coloredlogs
color_level = (logging_level if logging_level else 'INFO')
coloredlogs.install(level=color_level, logger=logger)
except ImportError:
pass |
class SnippetViewSet(viewsets.ModelViewSet):
queryset = Snippet.objects.all()
serializer_class = SnippetSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly)
(detail=True, renderer_classes=[renderers.StaticHTMLRenderer])
def highlight(self, request, *args, **kwargs):
snippet = self.get_object()
return Response(snippet.highlighted)
def perform_create(self, serializer):
serializer.save(owner=self.request.user) |
def _test_dummy_env_for_split_config(split_config: Dict[(str, Dict)]) -> None:
base_env = build_dummy_maze_env()
env = SplitActionsWrapper.wrap(base_env, split_config=split_config)
_ = env.action_space
for (key, sub_actions) in split_config.items():
for sub_key in sub_actions.keys():
assert (sub_key in env.action_space.spaces.keys())
assert (key not in env.action_space.spaces.keys())
assert (env.action_spaces_dict[0] == env.action_space)
base_env_action = base_env.action_space.sample()
split_action = env.reverse_action(base_env_action)
for (key, sub_actions) in split_config.items():
for sub_key in sub_actions.keys():
assert (sub_key in split_action)
assert (key not in split_action)
for key in base_env.action_space.spaces.keys():
assert np.all((base_env_action[key] == env.action(split_action)[key])) |
def list_workflow(args):
namespace = (args.namespace if (args.namespace is not None) else 'default')
workflows: List[WorkflowMeta] = []
offset = 0
while True:
res = ops.list_workflows(namespace=namespace, limit=10, offset=offset)
if (res is None):
break
else:
offset += len(res)
workflows.extend(res)
AIFlowConsole().print_as(data=sorted(workflows, key=(lambda w: w.id)), output=args.output, mapper=(lambda x: {'id': x.id, 'workflow_name': x.name, 'namespace': x.namespace, 'create_time': x.create_time, 'update_time': x.update_time, 'is_enabled': x.is_enabled, 'event_offset': x.event_offset})) |
def dump_manifest(contents: t.Dict, manifest_path: t.Union[(Path, str)]):
manifest_path = Path(manifest_path)
assert manifest_path.is_absolute()
conf = editorconfig.get_properties(manifest_path)
indent: t.Union[(str, int)]
if (conf.get('indent_style') == 'space'):
indent = int(conf.get('indent_size', 4))
elif (conf.get('indent_style') == 'tab'):
indent = '\t'
else:
indent = 4
if (max_line_length := conf.get('max_line_length')):
try:
_yaml.width = int(max_line_length)
except ValueError:
log.warning('Ignoring invalid max_line_length %r', max_line_length)
newline: t.Optional[bool]
if ('insert_final_newline' in conf):
newline = {'true': True, 'false': False}.get(conf['insert_final_newline'])
else:
with manifest_path.open('r') as fp:
newline = _check_newline(fp)
with manifest_path.open('w', encoding='utf-8') as fp:
if (manifest_path.suffix in ('.yaml', '.yml')):
_yaml.dump(contents, fp)
else:
json.dump(obj=contents, fp=fp, indent=indent)
if newline:
fp.write('\n') |
def make_heat_mnts():
temp_mnt1 = TemperatureMonitor(size=(1, 2, 3), name='test')
temp_mnt2 = TemperatureMonitor(size=(1, 2, 3), name='tet', unstructured=True)
temp_mnt3 = TemperatureMonitor(size=(1, 0, 3), name='tri', unstructured=True, conformal=True)
temp_mnt4 = TemperatureMonitor(size=(1, 0, 3), name='empty', unstructured=True, conformal=False)
return (temp_mnt1, temp_mnt2, temp_mnt3, temp_mnt4) |
def test_receipts_request_with_fully_invalid_response():
headers_bundle = mk_headers(1, 3, 2, 5, 4)
(headers, _, _) = zip(*headers_bundle)
wrong_headers = mk_headers(4, 3, 8)
(_, wrong_receipts, wrong_trie_roots_and_data) = zip(*wrong_headers)
receipts_bundle = tuple(zip(wrong_receipts, wrong_trie_roots_and_data))
validator = ReceiptsValidator(headers)
with pytest.raises(ValidationError):
validator.validate_result(receipts_bundle) |
def test_create_privacy_request_sets_due_date(db: Session, policy: Policy) -> None:
pr = PrivacyRequest.create(db=db, data={'policy_id': policy.id, 'status': 'pending'})
assert (pr.due_date is not None)
pr.delete(db)
requested_at = datetime.now(timezone.utc)
due_date = (timedelta(days=policy.execution_timeframe) + requested_at)
pr = PrivacyRequest.create(db=db, data={'requested_at': requested_at, 'policy_id': policy.id, 'status': 'pending'})
assert (pr.due_date == due_date)
pr.delete(db)
requested_at_str = '2021-08-30T16:09:37.359Z'
requested_at = datetime.strptime(requested_at_str, API_DATE_FORMAT).replace(tzinfo=timezone.utc)
due_date = (timedelta(days=policy.execution_timeframe) + requested_at)
pr = PrivacyRequest.create(db=db, data={'requested_at': requested_at_str, 'policy_id': policy.id, 'status': 'pending'})
assert (pr.due_date == due_date)
pr.delete(db) |
class Tricky(ui.Widget):
def init(self):
with ui.VBox():
self.reset = ui.Button(text='Reset event system')
with ui.HFix(flex=1):
SyncedSlidersWrong(flex=1)
SyncedSlidersRight(flex=1)
ui.Widget(flex=1)
('reset.pointer_click')
def _reset(self):
global window
window.setTimeout(event.loop.reset, 0) |
class InterfaceContainer():
def __init__(self, project: Any) -> None:
self._project = project
for path in BROWNIE_FOLDER.glob('data/interfaces/*.json'):
with path.open() as fp:
abi = json.load(fp)
self._add(path.stem, abi)
def _add(self, name: str, abi: List) -> None:
constructor = InterfaceConstructor(name, abi)
setattr(self, name, constructor) |
class Datetime(TraitType):
default_value_type = DefaultValue.constant
def __init__(self, default_value=None, *, allow_none=False, **metadata):
super().__init__(default_value, **metadata)
self.allow_none = allow_none
def validate(self, object, name, value):
if (value is None):
if self.allow_none:
return value
elif isinstance(value, datetime.datetime):
return value
self.error(object, name, value)
def info(self):
if self.allow_none:
none_qualifier = ' or None'
else:
none_qualifier = ''
return f'a datetime{none_qualifier}'
def create_editor(self):
return datetime_editor() |
def get_tags_and_nids_from_search(notes):
tags = []
nids_anki = []
nids_siac = []
def _addToTags(tags, tagStr):
if (tagStr == ''):
return tags
for tag in tagStr.split(' '):
if (tag == ''):
continue
if (tag in tags):
continue
tags.append(tag)
return tags
for note in notes:
if (note.note_type != 'user'):
nids_anki.append(note.id)
else:
nids_siac.append(note.id)
tags = _addToTags(tags, note.tags)
return (tags, nids_anki, nids_siac) |
class Flow_Add_5_1(base_tests.SimpleProtocol):
def runTest(self):
logging.info('Flow_Add_5_1 TEST BEGIN')
num_flows = test_param_get('num_flows', 100)
logging.info('Deleting all flows from switch')
delete_all_flows(self.controller)
sw = Switch()
self.assertTrue(sw.connect(self.controller), 'Failed to connect to switch')
fi = Flow_Info()
fi.rand(10)
while True:
fc = Flow_Cfg()
fc.rand(fi, required_wildcards(self), sw.tbl_stats.entries[0].wildcards, sw.sw_features.actions, sw.valid_ports, sw.valid_queues)
fcc = fc.canonical()
if (fcc.match != fc.match):
break
ft = Flow_Tbl()
ft.insert(fcc)
logging.info('Sending flow add to switch:')
logging.info(str(fc))
logging.info('should be canonicalized as:')
logging.info(str(fcc))
fc.send_rem = False
self.assertTrue(sw.flow_add(fc), 'Failed to add flow')
self.assertTrue(sw.barrier(), 'Barrier failed')
result = True
sw.settle()
if (not sw.errors_verify(0)):
result = False
sw.flow_tbl = ft
if (not sw.flow_tbl_verify()):
result = False
self.assertTrue(result, 'Flow_Add_5_1 TEST FAILED')
logging.info('Flow_Add_5_1 TEST PASSED') |
def test_validator_numeric_numpy():
np_ints = [np.int_, np.intc, np.intp, np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16, np.uint32, np.uint64]
np_floats = [np.float_, np.float16, np.float32, np.float64]
schema_ints = ['int', 'long']
schema_floats = ['float', 'double']
for (nptype, schema) in zip(np_ints, schema_ints):
validate(nptype(1), schema)
for (nptype, schema) in zip(np_ints, schema_floats):
validate(nptype(1), schema)
for (nptype, schema) in zip(np_floats, schema_floats):
validate(nptype(1), schema)
for (nptype, schema) in zip(np_floats, schema_ints):
with pytest.raises(ValidationError):
validate(nptype(1), schema) |
class FlicketConfig(Base):
__tablename__ = 'flicket_config'
def __init__(self, **kwargs):
for (key, value) in kwargs.items():
setattr(self, key, value)
id = db.Column(db.Integer, primary_key=True)
mail_server = db.Column(db.String(128))
mail_port = db.Column(db.Integer)
mail_use_tls = db.Column(db.BOOLEAN)
mail_use_ssl = db.Column(db.BOOLEAN)
mail_debug = db.Column(db.BOOLEAN)
mail_username = db.Column(db.String(128))
mail_password = db.Column(db.String(256))
mail_default_sender = db.Column(db.String(128))
mail_max_emails = db.Column(db.Integer)
mail_suppress_send = db.Column(db.BOOLEAN)
mail_ascii_attachments = db.Column(db.BOOLEAN)
posts_per_page = db.Column(db.Integer)
allowed_extensions = db.Column(db.String(256))
ticket_upload_folder = db.Column(db.String(256))
avatar_upload_folder = db.Column(db.String(256))
application_title = db.Column(db.String(32))
base_url = db.Column(db.String(128))
auth_domain = db.Column(db.String(64))
use_auth_domain = db.Column(db.BOOLEAN, default=False)
csv_dump_limit = db.Column(db.Integer, default=1000)
change_category = db.Column(db.BOOLEAN, default=False)
change_category_only_admin_or_super_user = db.Column(db.BOOLEAN, default=False)
def extension_allowed(filename):
extension = filename.rsplit('.', 1)[1]
if (extension.lower() in FlicketConfig.valid_extensions()):
return True
return False
def valid_extensions():
config = FlicketConfig.query.one()
extensions = config.allowed_extensions.split(',')
extensions = [i.strip() for i in extensions]
return extensions
def __repr__(self):
return '<FlicketConfig model class>' |
def lazy_import():
from fastly.model.logging_common_response import LoggingCommonResponse
from fastly.model.logging_datadog_additional import LoggingDatadogAdditional
from fastly.model.service_id_and_version_string import ServiceIdAndVersionString
from fastly.model.timestamps import Timestamps
globals()['LoggingCommonResponse'] = LoggingCommonResponse
globals()['LoggingDatadogAdditional'] = LoggingDatadogAdditional
globals()['ServiceIdAndVersionString'] = ServiceIdAndVersionString
globals()['Timestamps'] = Timestamps |
()
def erc1155_contract(ledger_api, ganache, ganache_addr, ganache_port):
contract = get_register_erc1155()
crypto = make_crypto(EthereumCrypto.identifier, private_key_path=ETHEREUM_PRIVATE_KEY_PATH)
tx = contract.get_deploy_transaction(ledger_api=ledger_api, deployer_address=crypto.address, gas=5000000)
gas = ledger_api.api.eth.estimateGas(transaction=tx)
tx['gas'] = gas
tx_signed = crypto.sign_transaction(tx)
tx_receipt = ledger_api.send_signed_transaction(tx_signed)
receipt = ledger_api.get_transaction_receipt(tx_receipt)
contract_address = cast(Dict, receipt)['contractAddress']
(yield (contract, contract_address)) |
def main(span_key: str, model_path: Path, test_path: Path):
nlp = spacy.load(model_path)
spancat = nlp.get_pipe('spancat')
test_doc_bin = DocBin().from_disk(test_path)
test_docs = list(test_doc_bin.get_docs(nlp.vocab))
total_candidates = 0
total_real_candidates = 0
matching_candidates = 0
msg.info('Starting evaluation')
for test_doc in tqdm(test_docs, total=len(test_docs), desc=f'Evaluation test dataset'):
text = test_doc.text
doc = nlp(text)
spancat.set_candidates([doc])
total_candidates += len(doc.spans['candidates'])
total_real_candidates += len(test_doc.spans[span_key])
for test_span in test_doc.spans[span_key]:
for span in doc.spans['candidates']:
if ((span.start == test_span.start) and (span.end == test_span.end)):
matching_candidates += 1
msg.good('Evaluation successful')
coverage = round(((matching_candidates / total_real_candidates) * 100), 2)
candidates_relation = round(((total_candidates / total_real_candidates) * 100), 2)
msg.divider('Suggester KPI')
suggester_header = ['KPI', 'Value']
suggester_data = [('Suggester candidates', total_candidates), ('Real candidates', total_real_candidates), ('% Ratio', f'{candidates_relation}%'), ('% Coverage', f'{coverage}%')]
msg.table(suggester_data, header=suggester_header, divider=True) |
class ESEvents():
_epoch_stats(np.nanmean)
_stats_grouping('policy_id')
def policy_grad_norm(self, policy_id: int, value: float):
_epoch_stats(np.nanmean)
_stats_grouping('policy_id')
def policy_norm(self, policy_id: int, value: float):
_epoch_stats(np.sum, output_name='real_time')
_epoch_stats(np.sum, output_name='total_real_time', cumulative=True)
def real_time(self, value: float):
_epoch_stats(np.mean)
def update_ratio(self, value: float): |
class StandardResponseCode():
HTTP_100 = 100
HTTP_101 = 101
HTTP_102 = 102
HTTP_103 = 103
HTTP_200 = 200
HTTP_201 = 201
HTTP_202 = 202
HTTP_203 = 203
HTTP_204 = 204
HTTP_205 = 205
HTTP_206 = 206
HTTP_207 = 207
HTTP_208 = 208
HTTP_226 = 226
HTTP_300 = 300
HTTP_301 = 301
HTTP_302 = 302
HTTP_303 = 303
HTTP_304 = 304
HTTP_305 = 305
HTTP_307 = 307
HTTP_308 = 308
HTTP_400 = 400
HTTP_401 = 401
HTTP_402 = 402
HTTP_403 = 403
HTTP_404 = 404
HTTP_405 = 405
HTTP_406 = 406
HTTP_407 = 407
HTTP_408 = 408
HTTP_409 = 409
HTTP_410 = 410
HTTP_411 = 411
HTTP_412 = 412
HTTP_413 = 413
HTTP_414 = 414
HTTP_415 = 415
HTTP_416 = 416
HTTP_417 = 417
HTTP_418 = 418
HTTP_421 = 421
HTTP_422 = 422
HTTP_423 = 423
HTTP_424 = 424
HTTP_425 = 425
HTTP_426 = 426
HTTP_427 = 427
HTTP_428 = 428
HTTP_429 = 429
HTTP_430 = 430
HTTP_431 = 431
HTTP_451 = 451
HTTP_500 = 500
HTTP_501 = 501
HTTP_502 = 502
HTTP_503 = 503
HTTP_504 = 504
HTTP_505 = 505
HTTP_506 = 506
HTTP_507 = 507
HTTP_508 = 508
HTTP_509 = 509
HTTP_510 = 510
HTTP_511 = 511
WS_1000 = 1000
WS_1001 = 1001
WS_1002 = 1002
WS_1003 = 1003
WS_1005 = 1005
WS_1006 = 1006
WS_1007 = 1007
WS_1008 = 1008
WS_1009 = 1009
WS_1010 = 1010
WS_1011 = 1011
WS_1012 = 1012
WS_1013 = 1013
WS_1014 = 1014
WS_1015 = 1015
WS_3000 = 3000
WS_3003 = 3003 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.