code stringlengths 281 23.7M |
|---|
('unique_count')
class UniqueCountPipe(ByPipe):
minimum_args = 1
def output_schemas(cls, arguments, event_schemas):
if (len(event_schemas) < 1):
return event_schemas
event_schemas = list(event_schemas)
(first_event_type,) = event_schemas[0].schema.keys()
if any((v for v in event_schemas[0].schema.values())):
event_schemas[0] = event_schemas[0].merge(Schema({first_event_type: {'count': 'number', 'total_hosts': 'number', 'hosts': ['string'], 'percent': 'number'}}, allow_any=False, allow_generic=True))
return event_schemas |
class FieldProjectionAngleMonitor(AbstractFieldProjectionMonitor):
proj_distance: float = pydantic.Field(1000000.0, title='Projection Distance', description='Radial distance of the projection points from ``local_origin``.', units=MICROMETER)
theta: ObsGridArray = pydantic.Field(..., title='Polar Angles', description='Polar angles with respect to the global z axis, relative to the location of ``local_origin``, at which to project fields.', units=RADIAN)
phi: ObsGridArray = pydantic.Field(..., title='Azimuth Angles', description='Azimuth angles with respect to the global z axis, relative to the location of ``local_origin``, at which to project fields.', units=RADIAN)
def storage_size(self, num_cells: int, tmesh: ArrayFloat1D) -> int:
return ((((BYTES_COMPLEX * len(self.theta)) * len(self.phi)) * len(self.freqs)) * 6) |
class ManifestStaticS3Storage(ManifestFilesMixin, StaticS3Storage):
default_s3_settings = StaticS3Storage.default_s3_settings.copy()
default_s3_settings.update({'AWS_S3_MAX_AGE_SECONDS_CACHED': (((60 * 60) * 24) * 365)})
def post_process(self, *args, **kwargs):
initial_aws_s3_max_age_seconds = self.settings.AWS_S3_MAX_AGE_SECONDS
self.settings.AWS_S3_MAX_AGE_SECONDS = self.settings.AWS_S3_MAX_AGE_SECONDS_CACHED
try:
(yield from super().post_process(*args, **kwargs))
finally:
self.settings.AWS_S3_MAX_AGE_SECONDS = initial_aws_s3_max_age_seconds |
class CrossAttnUpBlock2D(nn.Module):
def __init__(self, in_channels: int, out_channels: int, prev_output_channel: int, temb_channels: int, dropout: float=0.0, num_layers: int=1, resnet_eps: float=1e-06, resnet_time_scale_shift: str='default', resnet_act_fn: str='swish', resnet_groups: int=32, resnet_pre_norm: bool=True, attn_num_head_channels=1, cross_attention_dim=1280, attention_type='default', output_scale_factor=1.0, downsample_padding=1, add_upsample=True, use_linear_projection=False):
super().__init__()
resnets = []
attentions = []
self.attention_type = attention_type
self.attn_num_head_channels = attn_num_head_channels
for i in range(num_layers):
res_skip_channels = (in_channels if (i == (num_layers - 1)) else out_channels)
resnet_in_channels = (prev_output_channel if (i == 0) else out_channels)
resnets.append(ResnetBlock2D(in_channels=(resnet_in_channels + res_skip_channels), out_channels=out_channels, temb_channels=temb_channels, eps=resnet_eps, groups=resnet_groups, dropout=dropout, time_embedding_norm=resnet_time_scale_shift, non_linearity=resnet_act_fn, output_scale_factor=output_scale_factor, pre_norm=resnet_pre_norm))
attentions.append(SpatialTransformer(out_channels, attn_num_head_channels, (out_channels // attn_num_head_channels), depth=1, context_dim=cross_attention_dim, use_linear_projection=use_linear_projection))
self.attentions = nn.ModuleList(attentions)
self.resnets = nn.ModuleList(resnets)
if add_upsample:
self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)])
else:
self.upsamplers = None
def forward(self, hidden_states, res_hidden_states_tuple, temb=None, encoder_hidden_states=None):
for (resnet, attn) in zip(self.resnets, self.attentions):
res_hidden_states = res_hidden_states_tuple[(- 1)]
res_hidden_states_tuple = res_hidden_states_tuple[:(- 1)]
hidden_states = ops.concatenate()([hidden_states, res_hidden_states], dim=(- 1))
hidden_states = resnet(hidden_states, temb=temb)
hidden_states = attn(hidden_states, context=encoder_hidden_states)
if (self.upsamplers is not None):
for upsampler in self.upsamplers:
hidden_states = upsampler(hidden_states)
return hidden_states |
class Foo(HasTraits):
a = Any
b = Bool
s = Str
i = Instance(HasTraits)
e = Event
d = Delegate('i')
p = Property
def _get_p(self):
return self._p
def _set_p(self, p):
self._p = p
p_ro = Property
def _get_p_ro(self):
return id(self)
p_wo = Property
def _set_p_wo(self, p_wo):
self._p_wo = p_wo |
class RLAB(Lab):
BASE = 'xyz-d65'
NAME = 'rlab'
SERIALIZE = ('--rlab',)
WHITE = WHITES['2deg']['D65']
CHANNELS = (Channel('l', 0.0, 100.0), Channel('a', (- 125.0), 125.0, flags=FLG_MIRROR_PERCENT), Channel('b', (- 125.0), 125.0, flags=FLG_MIRROR_PERCENT))
ENV = Environment(WHITE, YN, SURROUND['average'], D['hard-copy'])
def to_base(self, coords: Vector) -> Vector:
return rlab_to_xyz(coords, self.ENV)
def from_base(self, coords: Vector) -> Vector:
return xyz_to_rlab(coords, self.ENV) |
class PopupCircularProgress(PopupSlider):
defaults = [d for d in PopupSlider.defaults if (not d[0].startswith('bar_border'))]
defaults += [('start_angle', 0, "Starting angle (in degrees) for progress marker. 0 is 12 o'clock and angle increases in a clockwise direction."), ('clockwise', True, 'Progress increases in a clockwise direction.'), ('clip', True, 'When ``True`` the drawing area is limited to the circular progress bar. This allows the progress bar to be placed on top of other controls and still show their content.')]
def __init__(self, value=None, **config):
PopupSlider.__init__(self, value, **config)
self.add_defaults(PopupCircularProgress.defaults)
def _configure(self, qtile, container):
PopupSlider._configure(self, qtile, container)
self.radius = (((min(self.width, self.height) // 2) - self.end_margin) - (self.bar_size // 2))
self.origin = ((self.width // 2), (self.height // 2))
self._start_angle = ((((self.start_angle - 90) * math.pi) / 180.0) % (2 * math.pi))
def paint(self):
self.drawer.ctx.save()
if self.clip:
self._clip_area()
self.clear(self._background)
if (self.percentage < 1):
self._paint_arc(self.colour_above, 0, 1)
self._paint_arc(self.colour_below, self._start_angle, self.percentage)
self.drawer.ctx.restore()
def _clip_area(self):
self.drawer.ctx.new_sub_path()
self.drawer.ctx.arc(*self.origin, (self.radius + (self.bar_size / 2)), 0, (2 * math.pi))
self.drawer.ctx.new_sub_path()
self.drawer.ctx.arc_negative(*self.origin, (self.radius - (self.bar_size / 2)), (2 * math.pi), 0)
self.drawer.ctx.clip()
def _paint_arc(self, colour, start, end):
arc_size = ((end * math.pi) * 2)
if self.clockwise:
step = arc_size
else:
step = ((math.pi * 2) - arc_size)
if (step == 0):
step -= (math.pi * 2)
ctx = self.drawer.ctx
arc_func = (ctx.arc if self.clockwise else ctx.arc_negative)
ctx.save()
self.drawer.set_source_rgb(colour)
ctx.set_line_width(self.bar_size)
arc_func(*self.origin, self.radius, start, (start + step))
ctx.stroke()
ctx.restore() |
def get_frame_info(frame, lineno, with_locals=True, library_frame_context_lines=None, in_app_frame_context_lines=None, include_paths_re=None, exclude_paths_re=None, locals_processor_func=None):
f_locals = getattr(frame, 'f_locals', {})
if _getitem_from_frame(f_locals, '__traceback_hide__'):
return None
f_globals = getattr(frame, 'f_globals', {})
loader = f_globals.get('__loader__')
module_name = f_globals.get('__name__')
f_code = getattr(frame, 'f_code', None)
if f_code:
abs_path = frame.f_code.co_filename
function = frame.f_code.co_name
else:
abs_path = None
function = None
try:
base_filename = sys.modules[module_name.split('.', 1)[0]].__file__
filename = abs_path.split(base_filename.rsplit(os.path.sep, 2)[0], 1)[(- 1)].lstrip(os.path.sep)
except Exception:
filename = abs_path
if (not filename):
filename = abs_path
frame_result = {'abs_path': abs_path, 'filename': filename, 'module': module_name, 'function': function, 'lineno': lineno, 'library_frame': is_library_frame(abs_path, include_paths_re, exclude_paths_re)}
context_lines = (library_frame_context_lines if frame_result['library_frame'] else in_app_frame_context_lines)
if (context_lines and (lineno is not None) and abs_path):
frame_result['context_metadata'] = (abs_path, lineno, int((context_lines / 2)), loader, module_name)
if with_locals:
if ((f_locals is not None) and (not isinstance(f_locals, dict))):
try:
f_locals = to_dict(f_locals)
except Exception:
f_locals = '<invalid local scope>'
if locals_processor_func:
f_locals = {varname: locals_processor_func(var) for (varname, var) in f_locals.items()}
frame_result['vars'] = transform(f_locals)
return frame_result |
class OptionPlotoptionsScatter3dSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
_routes.route('/<string:event_identifier>/sessions/dates')
_event_id
def get_dates(event_id):
date_list = list(zip(*db.session.query(func.date(Session.starts_at)).distinct().filter((Session.event_id == event_id), (Session.starts_at != None), or_((Session.state == 'accepted'), (Session.state == 'confirmed'))).order_by(asc(func.date(Session.starts_at))).all()))
dates = list(map(str, (date_list[0] if date_list else [])))
return jsonify(dates) |
class ExprCursorA(CursorArgumentProcessor):
def __init__(self, many=False):
self.match_many = many
def _cursor_call(self, expr_pattern, all_args):
if self.match_many:
if isinstance(expr_pattern, list):
if all((isinstance(ec, PC.ExprCursor) for ec in expr_pattern)):
return expr_pattern
else:
for ec in expr_pattern:
if (not isinstance(ec, PC.ExprCursor)):
self.err(f'expected a list of ExprCursor, not {type(expr_pattern)}')
elif (not isinstance(expr_pattern, str)):
self.err('expected an ExprCursor or pattern string')
elif isinstance(expr_pattern, PC.ExprCursor):
return expr_pattern
elif isinstance(expr_pattern, PC.Cursor):
self.err(f'expected an ExprCursor, not {type(expr_pattern)}')
elif (not isinstance(expr_pattern, str)):
self.err('expected an ExprCursor or pattern string')
proc = all_args['proc']
matches = proc.find(expr_pattern, many=self.match_many)
if self.match_many:
for m in matches:
if (not isinstance(m, PC.ExprCursor)):
self.err(f'expected pattern to match only ExprCursors, not {type(m)}')
return matches
else:
match = matches
if (not isinstance(match, PC.ExprCursor)):
self.err(f'expected pattern to match an ExprCursor, not {type(match)}')
return match |
def validate_observation_statistics(statistics: dict, validation_callback: Callable):
def process_key(_key: Any):
if (('min' in statistics[_key]) and isinstance(statistics[_key]['min'], np.ndarray)):
statistics[_key]['min'] = np.min(np.asarray(statistics[_key]['values']))
statistics[_key]['max'] = np.max(np.asarray(statistics[_key]['values']))
statistics[_key]['interval'] = (statistics[_key]['max'] - statistics[_key]['min'])
statistics[_key].pop('values')
assert validation_callback(statistics[_key]), 'Validation callback failed.'
else:
validate_observation_statistics(statistics[_key], validation_callback)
if isinstance(statistics, dict):
for key in statistics:
process_key(key)
elif isinstance(statistics, tuple):
for (i, _) in enumerate(statistics):
process_key(i) |
class Object(Field):
name = 'object'
_coerce = True
def __init__(self, doc_class=None, dynamic=None, properties=None, **kwargs):
if (doc_class and (properties or (dynamic is not None))):
raise ValidationException('doc_class and properties/dynamic should not be provided together')
if doc_class:
self._doc_class = doc_class
else:
from .document import InnerDoc
self._doc_class = type('InnerDoc', (InnerDoc,), {})
for (name, field) in (properties or {}).items():
self._doc_class._doc_type.mapping.field(name, field)
if (dynamic is not None):
self._doc_class._doc_type.mapping.meta('dynamic', dynamic)
self._mapping = copy.deepcopy(self._doc_class._doc_type.mapping)
super().__init__(**kwargs)
def __getitem__(self, name):
return self._mapping[name]
def __contains__(self, name):
return (name in self._mapping)
def _empty(self):
return self._wrap({})
def _wrap(self, data):
return self._doc_class.from_es(data, data_only=True)
def empty(self):
if self._multi:
return AttrList([], self._wrap)
return self._empty()
def to_dict(self):
d = self._mapping.to_dict()
d.update(super().to_dict())
return d
def _collect_fields(self):
return self._mapping.properties._collect_fields()
def _deserialize(self, data):
if isinstance(data, self._doc_class):
return data
if isinstance(data, AttrDict):
data = data._d_
return self._wrap(data)
def _serialize(self, data):
if (data is None):
return None
if isinstance(data, collections.abc.Mapping):
return data
return data.to_dict()
def clean(self, data):
data = super().clean(data)
if (data is None):
return None
if isinstance(data, (list, AttrList)):
for d in data:
d.full_clean()
else:
data.full_clean()
return data
def update(self, other, update_only=False):
if (not isinstance(other, Object)):
return
self._mapping.update(other._mapping, update_only) |
def load(profile_path, limit=200, quiet=False, extract_features_vector=True):
profile_file = os.path.join(profile_path, 'profile.json')
tweets_path = os.path.join(profile_path, 'tweets')
if (not os.path.exists(profile_file)):
return None
if (not os.path.exists(tweets_path)):
return None
try:
profile = None
with open(profile_file, 'rt') as fp:
profile = json.load(fp)
tweets = []
replies = []
retweets = []
tweet_files = list(glob.glob(os.path.join(tweets_path, '*_*.json')))
tweet_files.sort(key=(lambda x: x.split('_').pop()), reverse=True)
for filename in tweet_files:
num_tweets = len(tweets)
num_replies = len(replies)
num_retweets = len(retweets)
num_total = ((num_tweets + num_replies) + num_retweets)
if (num_total == limit):
break
with open(filename, 'rt') as fp:
tweet = json.load(fp)
if (('retweeted_status' in tweet) and (tweet['retweeted_status'] is not None)):
retweets.append(tweet)
elif (('in_reply_to_status_id' in tweet) and (tweet['in_reply_to_status_id'] is not None)):
replies.append(tweet)
else:
tweets.append(tweet)
num_tweets = len(tweets)
num_replies = len(replies)
num_retweets = len(retweets)
num_total = ((num_tweets + num_replies) + num_retweets)
if (num_total == 0):
return None
data = (profile, tweets, replies, retweets)
vector = None
if extract_features_vector:
if (not quiet):
print(('vectorializing %s : %d tweets, %d replies, %d retweets' % (profile_path, num_tweets, num_replies, num_retweets)))
vector = features.extract(profile, tweets, replies, retweets)
elif (not quiet):
print(('loaded %s : %d tweets, %d replies, %d retweets' % (profile_path, num_tweets, num_replies, num_retweets)))
return (data, vector)
except Exception as e:
print(traceback.format_exc())
print(('problem loading %s: %s' % (profile_path, e)))
return None |
def get_histogram_for_distribution(*, current_distribution: Distribution, reference_distribution: Optional[Distribution]=None, title: str='', xaxis_title: Optional[str]=None, yaxis_title: Optional[str]=None, color_options: ColorOptions):
current_histogram = HistogramData(name='current', x=pd.Series(current_distribution.x), count=pd.Series(current_distribution.y))
if (reference_distribution is not None):
reference_histogram: Optional[HistogramData] = HistogramData(name='reference', x=pd.Series(reference_distribution.x), count=pd.Series(reference_distribution.y))
else:
reference_histogram = None
return histogram(title=title, primary_hist=current_histogram, secondary_hist=reference_histogram, xaxis_title=xaxis_title, yaxis_title=yaxis_title, color_options=color_options) |
class GraphQLBackendInstrumentation(AbstractInstrumentedModule):
name = 'graphql'
instrument_list = [('graphql.backend.core', 'GraphQLCoreBackend.document_from_string'), ('graphql.backend.cache', 'GraphQLCachedBackend.document_from_string')]
def get_graphql_tx_name(self, graphql_doc):
try:
op_def = [i for i in graphql_doc.definitions if (type(i).__name__ == 'OperationDefinition')][0]
except KeyError:
return 'GraphQL unknown operation'
op = op_def.operation
name = op_def.name
fields = op_def.selection_set.selections
return ('GraphQL %s %s' % (op.upper(), (name if name else '+'.join([f.name.value for f in fields]))))
def call(self, module, method, wrapped, instance, args, kwargs):
graphql_document = wrapped(*args, **kwargs)
transaction_name = self.get_graphql_tx_name(graphql_document.document_ast)
set_transaction_name(transaction_name)
return graphql_document |
def test_get_version_raise():
econfig = ecl_config.Ecl100Config()
class_file = inspect.getfile(ecl_config.Ecl100Config)
class_dir = os.path.dirname(os.path.abspath(class_file))
msg = os.path.join(class_dir, 'ecl100_config.yml')
with pytest.raises(ValueError, match=msg):
econfig._get_version(None) |
def test_fill_value():
def f(x, y, z):
return (((2 * (x ** 3)) + (3 * (y ** 2))) - z)
x = np.linspace(1, 4, 11)
y = np.linspace(4, 7, 22)
z = np.linspace(7, 9, 33)
data = f(*np.meshgrid(x, y, z, indexing='ij', sparse=True))
pts = np.array([[0.1, 6.2, 8.3], [3.3, 5.2, 10.1]])
op = RegularGridInterpolator((x, y, z), data, fill_value=np.nan)
f = op.evaluate(pts).eval()
assert np.all(np.isnan(f)) |
(scope='function')
def policy(db: Session, oauth_client: ClientDetail, storage_config: StorageConfig) -> Generator:
access_request_policy = Policy.create(db=db, data={'name': 'example access request policy', 'key': 'example_access_request_policy', 'client_id': oauth_client.id, 'execution_timeframe': 7})
access_request_rule = Rule.create(db=db, data={'action_type': ActionType.access.value, 'client_id': oauth_client.id, 'name': 'Access Request Rule', 'policy_id': access_request_policy.id, 'storage_destination_id': storage_config.id})
rule_target = RuleTarget.create(db=db, data={'client_id': oauth_client.id, 'data_category': DataCategory('user').value, 'rule_id': access_request_rule.id})
(yield access_request_policy)
try:
rule_target.delete(db)
except ObjectDeletedError:
pass
try:
access_request_rule.delete(db)
except ObjectDeletedError:
pass
try:
access_request_policy.delete(db)
except ObjectDeletedError:
pass |
class CovidStateMenu(menus.ListPageSource):
def __init__(self, entries: Iterable[str]):
super().__init__(entries, per_page=1)
async def format_page(self, menu: GenericMenu, state) -> str:
embed = discord.Embed(color=(await menu.ctx.embed_colour()), title='Covid-19 | USA | {} Statistics'.format(state['state']))
embed.add_field(name='Cases', value=humanize_number(state['cases']))
embed.add_field(name='Deaths', value=humanize_number(state['deaths']))
embed.add_field(name=f'Cases {menu.type}', value=humanize_number(state['todayCases']))
embed.add_field(name=f'Deaths {menu.type}', value=humanize_number(state['todayDeaths']))
embed.add_field(name=f'Active {menu.type}', value=humanize_number(state['active']))
embed.add_field(name='Total Tests', value=humanize_number(state['tests']))
embed.set_footer(text=f'Page {(menu.current_page + 1)}/{menu._source.get_max_pages()}')
return embed |
class TabRowContextMenu(JsPackage):
lib_set_var = False
def add(self, name: str, url: str, icon: str=None):
js_service = self.page.js.fncs.service()
if (icon is not None):
return JsObjects.JsVoid(('%s.options.rowContextMenu.push({label: \'<i class="%s" style="margin-right:5px"></i>%s\', action: function(e, row){var data = {row: row.getData(), label: \'%s\'}; %s(\'%s\', data)} })' % (self.toStr(), icon, name, name, js_service, url)))
return JsObjects.JsVoid(("%s.options.rowContextMenu.push({label: '%s', action: function(e, row){var data = {row: row.getData(), label: '%s'}; %s('%s', data)} })" % (self.toStr(), name, name, js_service, url)))
def fromConfig(self, services: list):
js_service = self.page.js.fncs.service()
services = JsUtils.jsConvertData(services, None)
return JsObjects.JsVoid(('\n%(menu)s.forEach(function(item){\n var label = item.label;\n if(typeof item.icon !== "undefined"){label = \'<i class="\'+ item.icon +\'" style="margin-right:5px"></i>\' + label}\n %(tableId)s.options.rowContextMenu.push({label: label, \n action: function(e, row){var data = {row: row.getData(), label: item.label}; %(serviceName)s(item.url, data)} })\n})' % {'menu': services, 'tableId': self.toStr(), 'serviceName': js_service})) |
def expand_pos(wordmap):
if (wordmap['pos'] == 'A'):
wordmap['pos'] = 'ADJECTIVE'
elif (wordmap['pos'] == 'N'):
wordmap['pos'] = 'NOUN'
elif (wordmap['pos'] == 'V'):
wordmap['pos'] = 'VERB'
elif (wordmap['pos'] == 'P'):
wordmap['pos'] = 'PARTICLE'
elif (wordmap['pos'] == 'Prop'):
wordmap['pos'] = 'NOUN'
wordmap['is_proper'] = True
elif (wordmap['pos'] == 'Adv'):
wordmap['pos'] = 'ADVERB'
elif (wordmap['pos'] == 'Adp'):
wordmap['pos'] = 'ADPOSITION'
elif (wordmap['pos'] == 'Intj'):
wordmap['pos'] = 'INTERJECTION'
elif (wordmap['pos'] == 'Conj'):
wordmap['pos'] = 'CONJUNCTION'
elif (wordmap['pos'] == 'Pre'):
wordmap['pos'] = 'NOUN'
wordmap['is_prefix'] = True
elif (wordmap['pos'] == 'Abbr'):
wordmap['pos'] = 'ABBREVIATION'
elif (wordmap['pos'] == 'Acro'):
wordmap['pos'] = 'ACRONYM'
elif (wordmap['pos'] == 'Num'):
wordmap['pos'] = 'NUMERAL'
elif (wordmap['pos'] == 'Pron'):
wordmap['pos'] = 'PRONOUN'
else:
print('Unrecognised POS', wordmap['pos'], file=stderr)
return wordmap |
class TintPreference(widgets.RGBAButtonPreference, widgets.CheckConditional):
name = 'plugin/moodbar/tint'
condition_preference_name = 'plugin/moodbar/use_tint'
default = 'rgba(255, 255, 255, 0.2)'
def __init__(self, preferences, widget):
widgets.RGBAButtonPreference.__init__(self, preferences, widget)
widgets.CheckConditional.__init__(self) |
class OptionSeriesTreegraphSonificationDefaultspeechoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_required
_required
_required(NetworkAdminPermission)
_required
_POST
def admin_network_form(request):
qs = request.GET.copy()
if (request.POST['action'] == 'update'):
try:
net = Subnet.objects.select_related('owner', 'dc_bound').get(name=request.POST['adm-name'])
except Subnet.DoesNotExist:
raise Http404
else:
net = None
form = AdminNetworkForm(request, net, request.POST, prefix='adm')
if form.is_valid():
args = (form.cleaned_data['name'],)
status = form.save(args=args)
if (status == 204):
return HttpResponse(None, status=status)
elif (status in (200, 201)):
if ((form.action == 'create') and (not form.cleaned_data.get('dc_bound'))):
qs['all'] = 1
if request.GET.get('ips', False):
redir_view = 'dc_network_ip_list'
redir_args = (net.name,)
else:
redir_view = 'dc_network_list'
redir_args = ()
return redirect(redir_view, *redir_args, query_string=qs)
return render(request, 'gui/dc/network_admin_form.html', {'form': form, 'nodc': request.GET.get('ips', '')}) |
class OptionSeriesSunburstSonificationContexttracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_load_schema_output_is_correct_6():
'
load_schema_dir = join(abspath(dirname(__file__)), 'load_schema_test_6')
schema_path = join(load_schema_dir, 'A.avsc')
loaded_schema = fastavro.schema.load_schema(schema_path, _write_hint=False)
expected_schema = {'name': 'A', 'type': 'record', 'fields': [{'name': 'array_field', 'type': {'type': 'array', 'items': {'name': 'B', 'type': 'record', 'fields': [{'name': 'foo', 'type': 'string'}]}}}, {'name': 'b', 'type': 'B'}]}
assert (loaded_schema == expected_schema) |
class TestCompress(unittest.TestCase):
def test_compress(self, func):
self.assertEqual(func(None), None)
self.assertEqual(func(''), '')
self.assertEqual(func('AABBCC'), 'AABBCC')
self.assertEqual(func('AAABCCDDDDE'), 'A3BC2D4E')
self.assertEqual(func('BAAACCDDDD'), 'BA3C2D4')
self.assertEqual(func('AAABAACCDDDD'), 'A3BA2C2D4')
print('Success: test_compress') |
def test_make_subprocess(tmp_path: Path) -> None:
process = local.start_controller(tmp_path, 'python -c \'import os;print(os.environ["SUBMITIT_LOCAL_JOB_ID"])\'', timeout_min=1)
paths = utils.JobPaths(tmp_path, str(process.pid), 0)
pg = process.pid
process.wait()
stdout = paths.stdout.read_text()
stderr = paths.stderr.read_text()
assert (stdout and (int(stdout.strip()) == pg)), f'PID link is broken (stderr: {stderr})' |
class _Keywords(_Filter):
underscore_name = 'keywords'
def generate_elasticsearch_query(cls, filter_values: List[str], query_type: _QueryType, **options) -> ES_Q:
keyword_queries = []
fields = ['recipient_name', 'naics_description', 'product_or_service_description', 'transaction_description', 'piid', 'fain', 'uri', 'recipient_unique_id', 'parent_recipient_unique_id', 'description', 'recipient_uei', 'parent_uei']
for filter_value in filter_values:
query = (es_sanitize(filter_value) + '*')
if ('\\' in es_sanitize(filter_value)):
query = (es_sanitize(filter_value) + '\\*')
keyword_queries.append(ES_Q('query_string', query=query, default_operator='AND', fields=fields))
return ES_Q('dis_max', queries=keyword_queries) |
class OptionPlotoptionsAreasplinerangeSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class TestSuperFencesCustomExceptionAttrList(util.MdCase):
extension = ['pymdownx.superfences', 'attr_list']
extension_configs = {'pymdownx.superfences': {'custom_fences': [{'name': 'test', 'class': 'test', 'format': custom_format, 'validator': custom_validator_except}]}}
def test_custom_fail_exception(self):
with self.assertRaises(SuperFencesException):
self.check_markdown('\n ```{.test}\n test\n ```\n ', '', True) |
class TestRansomNote(unittest.TestCase):
def test_ransom_note(self):
solution = Solution()
self.assertRaises(TypeError, solution.match_note_to_magazine, None, None)
self.assertEqual(solution.match_note_to_magazine('', ''), True)
self.assertEqual(solution.match_note_to_magazine('a', 'b'), False)
self.assertEqual(solution.match_note_to_magazine('aa', 'ab'), False)
self.assertEqual(solution.match_note_to_magazine('aa', 'aab'), True)
print('Success: test_ransom_note') |
def build_model():
num_labels = 20
kernel = 3
input_tensor = Input(shape=(320, 320, 3))
x = Conv2D(64, (kernel, kernel), activation='relu', padding='same', name='conv1_1', kernel_initializer='he_normal', bias_initializer='zeros')(input_tensor)
x = BatchNormalization()(x)
x = Conv2D(64, (kernel, kernel), activation='relu', padding='same', name='conv1_2', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
orig_1 = x
x = MaxPooling2D((2, 2), strides=(2, 2))(x)
x = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='conv2_1', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='conv2_2', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
orig_2 = x
x = MaxPooling2D((2, 2), strides=(2, 2))(x)
x = Conv2D(256, (kernel, kernel), activation='relu', padding='same', name='conv3_1', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(256, (kernel, kernel), activation='relu', padding='same', name='conv3_2', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(256, (kernel, kernel), activation='relu', padding='same', name='conv3_3', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
orig_3 = x
x = MaxPooling2D((2, 2), strides=(2, 2))(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', name='conv4_1', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', name='conv4_2', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', name='conv4_3', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
orig_4 = x
x = MaxPooling2D((2, 2), strides=(2, 2))(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', name='conv5_1', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', name='conv5_2', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', name='conv5_3', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
orig_5 = x
x = MaxPooling2D((2, 2), strides=(2, 2))(x)
x = UpSampling2D(size=(2, 2))(x)
the_shape = K.int_shape(orig_5)
shape = (1, the_shape[1], the_shape[2], the_shape[3])
origReshaped = Reshape(shape)(orig_5)
xReshaped = Reshape(shape)(x)
together = Concatenate(axis=1)([origReshaped, xReshaped])
x = Unpooling()(together)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', name='deconv5_1', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', name='deconv5_2', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', name='deconv5_3', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = UpSampling2D(size=(2, 2))(x)
the_shape = K.int_shape(orig_4)
shape = (1, the_shape[1], the_shape[2], the_shape[3])
origReshaped = Reshape(shape)(orig_4)
xReshaped = Reshape(shape)(x)
together = Concatenate(axis=1)([origReshaped, xReshaped])
x = Unpooling()(together)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', name='deconv4_1', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(512, (kernel, kernel), activation='relu', padding='same', name='deconv4_2', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(256, (kernel, kernel), activation='relu', padding='same', name='deconv4_3', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = UpSampling2D(size=(2, 2))(x)
the_shape = K.int_shape(orig_3)
shape = (1, the_shape[1], the_shape[2], the_shape[3])
origReshaped = Reshape(shape)(orig_3)
xReshaped = Reshape(shape)(x)
together = Concatenate(axis=1)([origReshaped, xReshaped])
x = Unpooling()(together)
x = Conv2D(256, (kernel, kernel), activation='relu', padding='same', name='deconv3_1', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(256, (kernel, kernel), activation='relu', padding='same', name='deconv3_2', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='deconv3_3', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = UpSampling2D(size=(2, 2))(x)
the_shape = K.int_shape(orig_2)
shape = (1, the_shape[1], the_shape[2], the_shape[3])
origReshaped = Reshape(shape)(orig_2)
xReshaped = Reshape(shape)(x)
together = Concatenate(axis=1)([origReshaped, xReshaped])
x = Unpooling()(together)
x = Conv2D(128, (kernel, kernel), activation='relu', padding='same', name='deconv2_1', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(64, (kernel, kernel), activation='relu', padding='same', name='deconv2_2', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = UpSampling2D(size=(2, 2))(x)
the_shape = K.int_shape(orig_1)
shape = (1, the_shape[1], the_shape[2], the_shape[3])
origReshaped = Reshape(shape)(orig_1)
xReshaped = Reshape(shape)(x)
together = Concatenate(axis=1)([origReshaped, xReshaped])
x = Unpooling()(together)
x = Conv2D(64, (kernel, kernel), activation='relu', padding='same', name='deconv1_1', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
x = Conv2D(64, (kernel, kernel), activation='relu', padding='same', name='deconv1_2', kernel_initializer='he_normal', bias_initializer='zeros')(x)
x = BatchNormalization()(x)
outputs = Conv2D(num_labels, (1, 1), activation='softmax', padding='valid', name='pred', kernel_initializer='he_normal', bias_initializer='zeros')(x)
model = Model(inputs=input_tensor, outputs=outputs, name='SegNet')
return model |
def load_profiles_info_1_5() -> Tuple[(Profile, Dict[(str, Any)])]:
flags: Namespace = get_flags()
profile_renderer = ProfileRenderer(getattr(flags, 'VARS', {}))
profile_name = find_profile_name(flags.PROFILE, flags.PROJECT_DIR, profile_renderer)
raw_profiles = read_profile(flags.PROFILES_DIR)
raw_profile = raw_profiles[profile_name]
target_name = find_target_name(flags.TARGET, raw_profile, profile_renderer)
fal_dict = Profile._get_profile_data(profile=raw_profile, profile_name=profile_name, target_name=target_name)
db_profile_target_name = fal_dict.get('db_profile')
assert db_profile_target_name, 'fal credentials must have a `db_profile` property set'
try:
db_profile = Profile.from_raw_profile_info(raw_profile=raw_profile, profile_name=profile_name, renderer=profile_renderer, user_config={}, target_override=db_profile_target_name)
except RecursionError as error:
raise AttributeError("Did you wrap a type 'fal' profile with another type 'fal' profile?") from error
override_properties = {'threads': (getattr(flags, 'THREADS', None) or fal_dict.get('threads') or db_profile.threads)}
return (db_profile, override_properties) |
def test_debug_after_response_sent(test_client_factory):
async def app(scope, receive, send):
response = Response(b'', status_code=204)
(await response(scope, receive, send))
raise RuntimeError('Something went wrong')
app = ServerErrorMiddleware(app, debug=True)
client = test_client_factory(app)
with pytest.raises(RuntimeError):
client.get('/') |
def get_token_font_size_feature(previous_token: Optional[LayoutToken], current_token: LayoutToken):
if (not previous_token):
return 'HIGHERFONT'
previous_font_size = previous_token.font.font_size
current_font_size = current_token.font.font_size
if ((not previous_font_size) or (not current_font_size)):
return 'HIGHERFONT'
if (previous_font_size < current_font_size):
return 'HIGHERFONT'
if (previous_font_size > current_font_size):
return 'LOWERFONT'
return 'SAMEFONTSIZE' |
class OptionSeriesWordcloudSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class MessageBox(QtWidgets.QMessageBox):
def __init__(self, text: Union[(str, List[str])], title: str, default_button=QtWidgets.QMessageBox.Ok, icon=QtWidgets.QMessageBox.Icon.Information, parent=None) -> int:
super().__init__(parent)
if isinstance(text, list):
self.setText('\n'.join(text))
else:
self.setText(text)
self.setWindowTitle(title)
self.setStandardButtons(default_button)
self.setIcon(icon)
return
def info(cls, text: Union[(str, List[str])], title: str, default_button=QtWidgets.QMessageBox.Ok, parent=None):
return MessageBox(text, ('[INFO] ' + title), default_button, icon=MessageBox.Icon.Information, parent=parent).exec_()
def question(cls, text: Union[(str, List[str])], title: str, default_button=(QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No), parent=None):
return MessageBox(text, ('[QUESTION] ' + title), default_button, icon=MessageBox.Icon.Question, parent=parent).exec_()
def warn(cls, text: Union[(str, List[str])], title: str, default_button=QtWidgets.QMessageBox.Ok, parent=None):
return MessageBox(text, ('[WARN] ' + title), default_button, icon=MessageBox.Icon.Warning, parent=parent).exec_()
def error(cls, text: Union[(str, List[str])], title: str, default_button=QtWidgets.QMessageBox.Ok, parent=None):
return MessageBox(text, ('[ERROR] ' + title), default_button, icon=MessageBox.Icon.Critical, parent=parent).exec_() |
class Github(IntervalModule):
settings = (('format', 'format string'), ('status', 'Dictionary mapping statuses to the text which represents that status type. This defaults to ``GitHub`` for all status types.'), ('colors', 'Dictionary mapping statuses to the color used to display the status text'), ('refresh_icon', 'Text to display (in addition to any text currently shown by the module) when refreshing the GitHub status. **NOTE:** Depending on how quickly the update is performed, the icon may not be displayed.'), ('update_error', 'Value for the ``{update_error}`` formatter when an error is encountered while checking GitHub status'), ('keyring_backend', 'alternative keyring backend for retrieving credentials'), ('username', ''), ('password', ''), ('access_token', ''), ('unread_marker', 'Defines the string that the ``{unread}`` formatter shows when there are pending notifications'), ('notify_status', 'Set to ``True`` to display a desktop notification on status changes'), ('notify_unread', 'Set to ``True`` to display a desktop notification when new notifications are detected'), ('unread_notification_template', 'String with no more than one ``%d``, which will be replaced by the number of new unread notifications. Useful for those with non-English locales who would like the notification to be in their native language. The ``%d`` can be omitted if desired.'), ('api_methods_url', 'URL from which to retrieve the API endpoint URL which this module will use to check the GitHub Status'), ('status_url', 'The URL to the status page (opened when the module is double-clicked with the right mouse button'), ('notifications_url', 'The URL to the GitHub notifications page (opened when the module is double-clicked with the left mouse button'))
_default_colors = {'none': '#28a745', 'maintenance': '#4f8cc9', 'minor': '#dbab09', 'major': '#e36209', 'critical': '#dc3545'}
format = '{status}[ {unread}][ {update_error}]'
status = {}
colors = _default_colors
refresh_icon = ''
update_error = '!'
username = ''
password = ''
access_token = ''
unread_marker = ''
notify_status = False
notify_unread = False
unread_notification_template = 'You have %d new notification(s)'
api_methods_url = API_METHODS_URL
status_url = STATUS_URL
notifications_url = NOTIFICATIONS_URL
interval = 600
max_error_len = 50
keyring_backend = None
unread = ''
unknown_color = None
unknown_status = '?'
failed_update = False
__previous_json = None
__current_json = None
new_unread = None
previous_unread = None
current_unread = None
config_error = None
data = {'status': '', 'unread': 0, 'unread_count': '', 'update_error': ''}
output = {'full_text': '', 'color': None}
on_leftclick = ['perform_update']
on_rightclick = ['show_status_notification']
on_doubleleftclick = ['launch_notifications_url']
on_doublerightclick = ['launch_status_url']
(internet)
def launch_status_url(self):
self.logger.debug('Launching %s in browser', self.status_url)
user_open(self.status_url)
(internet)
def launch_notifications_url(self):
self.logger.debug('Launching %s in browser', self.notifications_url)
user_open(self.notifications_url)
def init(self):
if (self.colors != self._default_colors):
new_colors = copy.copy(self._default_colors)
new_colors.update(self.colors)
self.colors = new_colors
self.logger.debug('colors = %s', self.colors)
self.condition = threading.Condition()
self.thread = threading.Thread(target=self.update_loop, daemon=True)
self.thread.start()
def update_loop(self):
try:
self.perform_update()
while True:
with self.condition:
self.condition.wait(self.interval)
self.perform_update()
except Exception:
msg = 'Exception in {thread} at {time}, module {name}'.format(thread=threading.current_thread().name, time=time.strftime('%c'), name=self.__class__.__name__)
self.logger.error(msg, exc_info=True)
(internet)
def status_api_request(self, url):
self.logger.debug('Making GitHub Status API request to %s', url)
try:
with urlopen(url) as content:
try:
content_type = dict(content.getheaders())['Content-Type']
charset = re.search('charset=(.*)', content_type).group(1)
except AttributeError:
charset = 'utf-8'
response_json = content.read().decode(charset).strip()
if (not response_json):
self.logger.error('JSON response from %s was blank', url)
return {}
try:
response = json.loads(response_json)
except json.decoder.JSONDecodeError as exc:
self.logger.error('Error loading JSON: %s', exc)
self.logger.debug('JSON text that failed to load: %s', response_json)
return {}
self.logger.log(5, 'API response: %s', response)
return response
except Exception as exc:
self.logger.error('Failed to make API request to %s. Exception follows:', url, exc_info=True)
return {}
def detect_status_change(self, response=None):
if (response is not None):
if (self.__previous_json is None):
self.__previous_json = response
return
if (response.get('status', {}).get('description') == self.__previous_json.get('status', {}).get('description')):
return
self.__previous_json = response
if (self.__previous_json is None):
return
self.show_status_notification()
def notify(message):
return DesktopNotification(title='GitHub', body=message).display()
def skip_notify(self, message):
self.logger.debug('Desktop notifications turned off. Skipped notification: %s', message)
return False
def show_status_notification(self):
message = self.current_status_description
(self.skip_notify(message) if ((not self.notify_status) or ((self.previous_status is None) and (self.current_status == 'none'))) else self.notify(message))
def show_unread_notification(self):
if ('%d' not in self.unread_notification_template):
formatted = self.unread_notification_template
else:
try:
new_unread = len(self.new_unread)
except TypeError:
new_unread = 0
try:
formatted = (self.unread_notification_template % new_unread)
except TypeError as exc:
self.logger.error('Failed to format {0!r}: {1}'.format(self.unread_notification_template, exc))
return False
return (self.skip_notify(formatted) if (not self.notify_unread) else self.notify(formatted))
(internet)
def perform_update(self):
self.output['full_text'] = (self.refresh_icon + self.output.get('full_text', ''))
self.failed_update = False
self.update_status()
try:
self.config_error = None
self.update_unread()
except ConfigError as exc:
self.config_error = exc
self.data['update_error'] = (self.update_error if self.failed_update else '')
self.refresh_display()
def current_incidents(self):
try:
return self.__current_json['incidents']
except (KeyError, TypeError):
return []
def previous_incidents(self):
try:
return self.__previous_json['incidents']
except (KeyError, TypeError):
return []
def current_status(self):
try:
return self.__current_json['status']['indicator']
except (KeyError, TypeError):
return None
def previous_status(self):
try:
return self.__previous_json['status']['indicator']
except (KeyError, TypeError):
return None
def current_status_description(self):
try:
return self.__current_json['status']['description']
except (KeyError, TypeError):
return None
(internet)
def update_status(self):
try:
self.__current_json = self.status_api_request(self.api_methods_url)
if (not self.__current_json):
self.failed_update = True
return
self.logger.debug('Current GitHub Status: %s', self.current_status)
self.data['status'] = self.status.get(self.current_status, 'GitHub')
if (self.current_incidents != self.previous_incidents):
self.show_status_notification()
self.__previous_json = self.__current_json
except Exception:
self.logger.error('Uncaught error occurred while checking GitHub status. Exception follows:', exc_info=True)
self.failed_update = True
(internet)
def update_unread(self):
self.new_unread = None
try:
if ((not self.username) and (not self.password) and (not self.access_token)):
self.logger.debug('No auth configured, notifications will not be checked')
return True
if (not HAS_REQUESTS):
self.logger.error('The requests module is required to check GitHub notifications')
self.failed_update = True
return False
self.logger.debug('Checking unread notifications using %s', ('access token' if self.access_token else 'username/password'))
if self.access_token:
request_kwargs = {'headers': {'Authorization': 'token {}'.format(self.access_token)}}
else:
request_kwargs = {'auth': (self.username, self.password)}
self.current_unread = set()
page_num = 0
old_unread_url = None
unread_url = AUTH_URL
while (old_unread_url != unread_url):
old_unread_url = unread_url
page_num += 1
self.logger.debug('Reading page %d of notifications (%s)', page_num, unread_url)
try:
response = requests.get(unread_url, **request_kwargs)
self.logger.log(5, 'Raw return from GitHub notification check: %s', response.text)
unread_data = json.loads(response.text)
except (requests.ConnectionError, requests.Timeout) as exc:
self.logger.error('Failed to check unread notifications: %s', exc)
self.failed_update = True
return False
except json.decoder.JSONDecodeError as exc:
self.logger.error('Error loading JSON: %s', exc)
self.logger.debug('JSON text that failed to load: %s', response.text)
self.failed_update = True
return False
if isinstance(unread_data, dict):
raise ConfigError(unread_data.get('message', 'Unknown error encountered retrieving unread notifications'))
self.current_unread.update([x['id'] for x in unread_data if ('id' in x)])
self.logger.debug('Checking for next page of notifications')
try:
link_header = response.headers['Link']
except AttributeError:
self.logger.error('No headers present in response. This might be due to an API change in the requests module.')
self.failed_update = True
continue
except KeyError:
self.logger.debug('Only one page of notifications present')
continue
else:
try:
links = requests.utils.parse_header_links(link_header)
except Exception as exc:
self.logger.error("Failed to parse 'Link' header: %s", exc)
self.failed_update = True
continue
for link in links:
try:
link_rel = link['rel']
if (link_rel != 'next'):
continue
unread_url = link['url']
break
except TypeError:
self.logger.warning("Malformed hypermedia link (%s) in 'Link' header (%s)", link, links)
continue
else:
self.logger.debug('No more pages of notifications remain')
if self.failed_update:
return False
self.data['unread_count'] = len(self.current_unread)
self.data['unread'] = (self.unread_marker if (self.data['unread_count'] > 0) else '')
if (self.previous_unread is not None):
if (not self.current_unread.issubset(self.previous_unread)):
self.new_unread = (self.current_unread - self.previous_unread)
if self.new_unread:
self.show_unread_notification()
self.previous_unread = self.current_unread
return True
except ConfigError as exc:
raise exc
except Exception as exc:
self.logger.error('Uncaught error occurred while checking GitHub notifications. Exception follows:', exc_info=True)
self.failed_update = True
return False
def refresh_display(self):
previous_color = self.output.get('color')
try:
color = self.colors.get(self.current_status, self.unknown_color)
except TypeError:
color = previous_color
self.output = {'full_text': formatp(self.format, **self.data).strip(), 'color': color}
def run(self):
if (self.config_error is not None):
raise self.config_error |
def get_casava_sample_sheet(samplesheet=None, fp=None, FCID_default='FC1'):
if (fp is not None):
sample_sheet_fp = fp
else:
sample_sheet_fp = io.open(samplesheet, 'rt')
sample_sheet_content = ''.join(sample_sheet_fp.readlines())
try:
iem = IEMSampleSheet(fp=io.StringIO(sample_sheet_content))
return iem.casava_sample_sheet()
except IlluminaDataError:
return CasavaSampleSheet(fp=io.StringIO(sample_sheet_content)) |
def send_mail(to, template, context):
html_content = render_to_string(f'accounts/emails/{template}.html', context)
text_content = render_to_string(f'accounts/emails/{template}.txt', context)
msg = EmailMultiAlternatives(context['subject'], text_content, settings.DEFAULT_FROM_EMAIL, [to])
msg.attach_alternative(html_content, 'text/html')
msg.send() |
.usefixtures('use_tmpdir')
def test_that_empty_job_directory_gives_warning():
test_config_file_base = 'test'
test_config_file_name = f'{test_config_file_base}.ert'
test_config_contents = dedent('\n NUM_REALIZATIONS 1\n DEFINE <STORAGE> storage/<CONFIG_FILE_BASE>-<DATE>\n RUNPATH <STORAGE>/runpath/realization-<IENS>/iter-<ITER>\n ENSPATH <STORAGE>/ensemble\n INSTALL_JOB_DIRECTORY empty\n ')
os.mkdir('empty')
with open(test_config_file_name, 'w', encoding='utf-8') as fh:
fh.write(test_config_contents)
with pytest.warns(ConfigWarning, match='No files found in job directory'):
_ = ErtConfig.from_file(test_config_file_name) |
class ConcatenateTanhTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(ConcatenateTanhTestCase, self).__init__(*args, **kwargs)
self._test_id = 0
def _run_concatenate(self, *, concatenate_op, input_shapes, dim=None, test_name='concatenate_tanh_cat', input_type='float16'):
logging.info(f'Test input shapes {input_shapes}, dim={dim}')
input_tensors_pt = [get_random_torch_tensor(shape, input_type) for (i, shape) in enumerate(input_shapes)]
Y_pt = (torch.cat(input_tensors_pt) if (dim is None) else torch.cat(input_tensors_pt, dim))
Y_pt = torch.tanh(Y_pt)
target = detect_target()
inputs = [Tensor(shape=shape, dtype=input_type, name=f'input_{i}', is_input=True) for (i, shape) in enumerate(input_shapes)]
Y = (concatenate_op(inputs) if (dim is None) else concatenate_op(inputs, dim))
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
y_shape = [d._attrs['values'][0] for d in Y._attrs['shape']]
logging.info(f'AITemplate output_shape: {y_shape}')
module = compile_model(Y, target, './tmp', f'{test_name}_{self._test_id}')
self._test_id += 1
input_tensors_ait = {f'input_{idx}': input_tensors_pt[idx] for idx in range(len(inputs))}
y = torch.empty_like(Y_pt)
module.run_with_tensors(input_tensors_ait, [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.01, rtol=0.01))
def _run_batch_concatenate(self, *, batch_sizes, concatenate_op, input_shapes, dim=0, test_name='concatenate_tanh_batch_cat', input_type='float16'):
logging.info(f'Batch test input shapes {input_shapes}, dim={dim}')
batch_dim = shape_utils.gen_int_var_min_max(batch_sizes, 'batch_size')
target = detect_target()
inputs = [Tensor(shape=[batch_dim, *shape], dtype=input_type, name=f'input_{i}', is_input=True) for (i, shape) in enumerate(input_shapes)]
Y = (concatenate_op(inputs) if (dim is None) else concatenate_op(inputs, dim))
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, target, './tmp', f'{test_name}_{self._test_id}')
self._test_id += 1
for batch in batch_sizes:
logging.info(f'checking batch: {batch}')
input_tensors_pt = [get_random_torch_tensor([batch, *shape], input_type) for (i, shape) in enumerate(input_shapes)]
Y_pt = (torch.cat(input_tensors_pt) if (dim is None) else torch.cat(input_tensors_pt, dim))
Y_pt = torch.tanh(Y_pt)
input_tensors_ait = {f'input_{idx}': input_tensors_pt[idx] for idx in range(len(inputs))}
y = torch.empty_like(Y_pt)
module.run_with_tensors(input_tensors_ait, [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.01, rtol=0.01))
def _run_masked_concatenate(self, *, concatenate_op, input_shapes, input_masks, dim=None, test_name='concatenate_tanh_masked_cat', input_type='float16'):
logging.info(f'Test input shapes {input_shapes}, input_masks={input_masks}, dim={dim}')
input_tensors_pt = [get_random_torch_tensor(shape, input_type) for (i, shape) in enumerate(input_shapes)]
Y_pt = (torch.tanh(torch.cat(input_tensors_pt)) if (dim is None) else torch.tanh(torch.cat(input_tensors_pt, dim)))
y_pt = Y_pt.cpu().numpy()
target = detect_target()
inputs = [Tensor(shape=shape, dtype=input_type, name=f'input_{i}', is_input=True) for (i, shape) in enumerate(input_shapes)]
Y = (concatenate_op(inputs) if (dim is None) else concatenate_op(inputs, dim))
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
y_shape = [d._attrs['values'][0] for d in Y._attrs['shape']]
inputs = [i for (mask, i) in zip(input_masks, inputs) if (mask is True)]
input_accessors = [i for (mask, i) in zip(input_masks, concatenate_op._attrs['input_accessors']) if (mask is True)]
concatenate_op._attrs['input_masks'] = input_masks
concatenate_op._attrs['inputs'] = inputs
concatenate_op._attrs['input_accessors'] = input_accessors
logging.info(f'AITemplate output_shape: {y_shape}')
module = compile_model(Y, target, './tmp', f'{test_name}_{self._test_id}')
self._test_id += 1
inputs = []
for (i, x_tensor_pt) in enumerate(input_tensors_pt):
if input_masks[i]:
inputs.append(x_tensor_pt)
y = get_torch_empty_tensor(y_shape, dtype=input_type)
module.run_with_tensors(inputs, [y])
split_sections = []
split_offset = 0
for shape in input_shapes[:(- 1)]:
split_offset = (split_offset + shape[dim])
split_sections.append(split_offset)
ys_pt = np.split(y_pt, split_sections, axis=dim)
ys = np.split(y.cpu().numpy(), split_sections, axis=dim)
for (mask, pt, actual) in zip(input_masks, ys_pt, ys):
if (mask is True):
np.testing.assert_allclose(actual, pt, atol=0.01, rtol=0.01)
def test_batch_cat_fp16(self):
self._run_batch_concatenate(batch_sizes=[1, 1], concatenate_op=ops.concatenate_tanh(), input_shapes=([1], [1]), dim=0, test_name='concatenate_tanh_batch_cat_fp16', input_type='float16')
self._run_batch_concatenate(batch_sizes=[1, 1], concatenate_op=ops.concatenate_tanh(), input_shapes=([1], [1]), dim=1, test_name='concatenate_tanh_batch_cat_fp16', input_type='float16')
self._run_batch_concatenate(batch_sizes=[3, 5, 9], concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=0, test_name='concatenate_tanh_batch_cat_fp16', input_type='float16')
self._run_batch_concatenate(batch_sizes=[3, 5, 9], concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=1, test_name='concatenate_tanh_batch_cat_fp16', input_type='float16')
self._run_batch_concatenate(batch_sizes=[3, 5, 9], concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=2, test_name='concatenate_tanh_batch_cat_fp16', input_type='float16')
self._run_batch_concatenate(batch_sizes=[3, 5, 9], concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=3, test_name='concatenate_tanh_batch_cat_fp16', input_type='float16')
self._run_batch_concatenate(batch_sizes=[3, 5, 9], concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 1, 4], [2, 3, 4]), dim=2, test_name='concatenate_tanh_batch_cat_fp16', input_type='float16')
self._run_batch_concatenate(batch_sizes=[3, 5, 9], concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4], [2, 3, 2]), dim=3, test_name='concatenate_tanh_batch_cat_fp16', input_type='float16')
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
def test_batch_cat_fp32(self):
self._run_batch_concatenate(batch_sizes=[1, 1], concatenate_op=ops.concatenate_tanh(), input_shapes=([1], [1]), dim=0, test_name='concatenate_tanh_batch_cat_fp32', input_type='float32')
self._run_batch_concatenate(batch_sizes=[1, 1], concatenate_op=ops.concatenate_tanh(), input_shapes=([1], [1]), dim=1, test_name='concatenate_tanh_batch_cat_fp32', input_type='float32')
self._run_batch_concatenate(batch_sizes=[3, 5, 9], concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=0, test_name='concatenate_tanh_batch_cat_fp32', input_type='float32')
self._run_batch_concatenate(batch_sizes=[3, 5, 9], concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=1, test_name='concatenate_tanh_batch_cat_fp32', input_type='float32')
def test_cat_fp16(self):
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([1], [1]), dim=0, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([1, 1], [1, 1]), dim=0, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([1, 1], [1, 1]), dim=1, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 1], [2, 1]), dim=1, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=[[2, 3, 4]], dim=1, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=0, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=1, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4], [2, 3, 4]), dim=2, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4], [3, 3, 4], [4, 3, 4]), dim=0, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4], [2, 4, 4], [2, 5, 4]), dim=1, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 6], [2, 3, 5], [2, 3, 4]), dim=2, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([1024, 32, 32], [1024, 16, 32], [1024, 8, 32]), dim=1, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([12, 3, 4, 5], [3, 3, 4, 5], [7, 3, 4, 5]), dim=0, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4, 5], [2, 3, 4, 5], [2, 3, 4, 5]), dim=1, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 9, 5], [2, 3, 4, 5], [2, 3, 1, 5]), dim=2, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4, 5], [2, 3, 4, 3], [2, 3, 4, 5]), dim=3, test_name='concatenate_tanh_cat_fp16', input_type='float16')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([1, 3, 1], [2, 3, 1], [3, 3, 1]), test_name='concatenate_tanh_cat_fp16', input_type='float16')
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
def test_cat_fp32(self):
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([1], [1]), dim=0, test_name='concatenate_tanh_cat_fp32', input_type='float32')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([1, 1], [1, 1]), dim=0, test_name='concatenate_tanh_cat_fp32', input_type='float32')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([1, 1], [1, 1]), dim=1, test_name='concatenate_tanh_cat_fp32', input_type='float32')
self._run_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 1], [2, 1]), dim=1, test_name='concatenate_tanh_cat_fp32', input_type='float32')
def test_masked_cat_fp16(self):
self._run_masked_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2], [2]), input_masks=[True, False], dim=0, test_name='concatenate_tanh_masked_cat_fp16', input_type='float16')
self._run_masked_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3], [5, 3], [3, 3]), input_masks=[False, True, True], dim=0, test_name='concatenate_tanh_masked_cat_fp16', input_type='float16')
self._run_masked_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 11, 4], [2, 5, 4], [2, 2, 4]), input_masks=[True, False, True], dim=1, test_name='concatenate_tanh_masked_cat_fp16', input_type='float16')
self._run_masked_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([1, 1, 1], [1, 1, 2], [1, 1, 4]), input_masks=[False, True, False], dim=2, test_name='concatenate_tanh_masked_cat_fp16', input_type='float16')
self._run_masked_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4], [2, 3, 8], [2, 3, 16]), input_masks=[False, True, False], dim=2, test_name='concatenate_tanh_masked_cat_fp16', input_type='float16')
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
def test_masked_cat_fp32(self):
self._run_masked_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2], [2]), input_masks=[True, False], dim=0, test_name='concatenate_tanh_masked_cat_fp32', input_type='float32')
self._run_masked_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3], [5, 3], [3, 3]), input_masks=[False, True, True], dim=0, test_name='concatenate_tanh_masked_cat_fp32', input_type='float32')
self._run_masked_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 11, 4], [2, 5, 4], [2, 2, 4]), input_masks=[True, False, True], dim=1, test_name='concatenate_tanh_masked_cat_fp32', input_type='float32')
self._run_masked_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([1, 1, 1], [1, 1, 2], [1, 1, 4]), input_masks=[False, True, False], dim=2, test_name='concatenate_tanh_masked_cat_fp32', input_type='float32')
self._run_masked_concatenate(concatenate_op=ops.concatenate_tanh(), input_shapes=([2, 3, 4], [2, 3, 8], [2, 3, 32]), input_masks=[False, True, False], dim=2, test_name='concatenate_tanh_masked_cat_fp32', input_type='float32') |
class DeleteFileCommand():
def __init__(self) -> None:
self._deleted_files: List[str] = []
def execute(self, filename: str) -> None:
print(f'deleting {filename}')
self._deleted_files.append(filename)
def undo(self) -> None:
filename = self._deleted_files.pop()
print(f'restoring {filename}') |
class Cycle():
name: str
top_line: str = None
bottom_line: str = None
small_icon: str = None
def __init__(self, name: str, config: Dict[(str, str)], computer: Computer):
self.name = name
if ('top_line' in config['top_line']):
self.top_line = config['top_line']
if ('bottom_line' in config['bottom_line']):
self.bottom_line = config['bottom_line']
if ('small_icon' in config['small_icon']):
self.small_icon = config['small_icon'] |
class FileDialogDemo(HasTraits):
file_name = File()
open = Button('Open...')
traits_view = View(HGroup(Item('open', show_label=False), '_', Item('file_name', style='readonly', springy=True)), width=0.5)
def _open_changed(self):
file_name = open_file(extensions=TextInfo(), filter='Python file (*.py)|*.py', id=demo_id)
if (file_name != ''):
self.file_name = file_name |
(name='api.vm.base.tasks.vm_create_cb', base=MgmtCallbackTask, bind=True)
()
def vm_create_cb(result, task_id, vm_uuid=None):
vm = Vm.objects.select_related('dc').get(uuid=vm_uuid)
dc_settings = vm.dc.settings
msg = result.get('message', '')
if ((result['returncode'] == 0) and (msg.find('Successfully created') >= 0)):
json = result.pop('json', None)
try:
json_active = vm.json.load(json)
vm.json_active = json_active
vm.json = json_active
if result['meta']['apiview']['recreate']:
Snapshot.objects.filter(vm=vm).delete()
SnapshotDefine.objects.filter(vm=vm).delete()
BackupDefine.objects.filter(vm=vm).delete()
vm.save_metadata('installed', False, save=False)
except Exception as e:
logger.error('Could not parse json output from POST vm_manage(%s). Error: %s', vm_uuid, e)
_vm_error(task_id, vm)
logger.exception(e)
raise TaskException(result, 'Could not parse json output')
else:
with transaction.atomic():
vm.save(update_node_resources=True, update_storage_resources=True)
vm_update_ipaddress_usage(vm)
vm_created.send(task_id, vm=vm)
if (msg.find('Successfully started') < 0):
logger.error('VM %s was created, but could not be started! Error: %s', vm_uuid, msg)
_vm_error(task_id, vm)
raise TaskException(result, ('Initial start failed (%s)' % msg))
if dc_settings.VMS_VM_CREATE_EMAIL_SEND:
sendmail(vm.owner, 'vm/base/vm_create_subject.txt', 'vm/base/vm_create_email.txt', extra_context={'vm': vm}, user_i18n=True, dc=vm.dc, fail_silently=True)
else:
logger.error('Found nonzero returncode in result from POST vm_manage(%s). Error: %s', vm_uuid, msg)
_vm_create_cb_failed(result, task_id, vm)
if (result['meta']['apiview']['recreate'] and (msg.find('Successfully deleted') >= 0)):
_vm_error(task_id, vm)
raise TaskException(result, ('Got bad return code (%s). Error: %s' % (result['returncode'], msg)))
logger.info('VM %s is waiting for deploy_over...', vm_uuid)
timer = 0
repeat = 0
while (not vm.has_deploy_finished()):
if (timer > VMS_VM_DEPLOY_TOOLONG):
if (repeat == VMS_VM_DEPLOY_TOOLONG_MAX_CYCLES):
logger.error('VM %s deploy process has timed out!', vm_uuid)
_vm_error(task_id, vm)
result['message'] = ('VM %s deploy has timed out' % vm.hostname)
task_log_cb_error(result, task_id, vm=vm, **result['meta'])
return result
repeat += 1
timer = 0
logger.error('VM %s takes too long to deploy. Sending force stop/start', vm_uuid)
(tid, err) = vm_reset(vm)
sleep(3.0)
timer += 3
logger.info('VM %s is completely deployed!', vm_uuid)
internal_metadata = vm.json.get('internal_metadata', {}).copy()
vm = Vm.objects.select_related('dc', 'template').get(pk=vm.pk)
vm_deployed.send(task_id, vm=vm)
if dc_settings.VMS_VM_DEPLOY_EMAIL_SEND:
sendmail(vm.owner, 'vm/base/vm_deploy_subject.txt', 'vm/base/vm_deploy_email.txt', fail_silently=True, extra_context={'vm': vm, 'internal_metadata': internal_metadata}, user_i18n=True, dc=vm.dc)
try:
result['message'] = '\n'.join(result['message'].strip().split('\n')[:(- 1)])
except Exception as e:
logger.exception(e)
task_log_cb_success(result, task_id, vm=vm, **result['meta'])
try:
if vm.template:
(vm_define_snapshot, vm_define_backup) = (vm.template.vm_define_snapshot, vm.template.vm_define_backup)
if (vm_define_snapshot or vm_define_backup):
user = User.objects.get(id=user_id_from_task_id(task_id))
request = get_dummy_request(vm.dc, method='POST', user=user)
SnapshotDefineView.create_from_template(request, vm, vm_define_snapshot, log=logger)
BackupDefineView.create_from_template(request, vm, vm_define_backup, log=logger)
except Exception as e:
logger.exception(e)
return result |
class OFPTableFeaturePropUnknown(OFPTableFeatureProp):
def __init__(self, type_, length=None, data=None):
super(OFPTableFeaturePropUnknown, self).__init__(type_, length)
self.data = data
def _parse_prop(cls, buf):
return {'data': buf}
def _serialize_prop(self):
return self.data |
class CategoricalCrossentropy(Loss):
names: Optional[Sequence[str]]
missing_value: Optional[Union[(str, int)]]
_name_to_i: Dict[(str, int)]
def __init__(self, *, normalize: bool=True, names: Optional[Sequence[str]]=None, missing_value: Optional[Union[(str, int)]]=None, neg_prefix: Optional[str]=None, label_smoothing: float=0.0):
self.normalize = normalize
self.names = names
self.missing_value = missing_value
self.neg_prefix = neg_prefix
self.label_smoothing = label_smoothing
if (names is not None):
self._name_to_i = {name: i for (i, name) in enumerate(names)}
else:
self._name_to_i = {}
def convert_truths(self, truths, guesses: Floats2d) -> Tuple[(Floats2d, Floats2d)]:
xp = get_array_module(guesses)
missing = []
negatives_mask = None
if self.names:
negatives_mask = xp.ones((len(truths), len(self.names)), dtype='f')
missing_value = self.missing_value
if isinstance(truths, list):
truths = list(truths)
if len(truths):
if isinstance(truths[0], int):
for (i, value) in enumerate(truths):
if (value == missing_value):
missing.append(i)
else:
if (self.names is None):
msg = "Cannot calculate loss from list of strings without names. You can pass the names as a keyword argument when you create the loss object, e.g. CategoricalCrossentropy(names=['dog', 'cat'])"
raise ValueError(msg)
for (i, value) in enumerate(truths):
if (value == missing_value):
truths[i] = self.names[0]
missing.append(i)
elif (value and self.neg_prefix and value.startswith(self.neg_prefix)):
truths[i] = value[len(self.neg_prefix):]
neg_index = self._name_to_i[truths[i]]
negatives_mask[i] = 0
negatives_mask[i][neg_index] = (- 1)
truths = [self._name_to_i[name] for name in truths]
truths = xp.asarray(truths, dtype='i')
mask = _make_mask(guesses, missing)
else:
mask = _make_mask_by_value(truths, guesses, missing_value)
if (truths.ndim != guesses.ndim):
truths = to_categorical(cast(Ints1d, truths), n_classes=guesses.shape[(- 1)], label_smoothing=self.label_smoothing)
elif self.label_smoothing:
raise ValueError('Label smoothing is only applied, when truths have type List[str], List[int] or Ints1d, but it seems like Floats2d was provided.')
if (negatives_mask is not None):
truths *= negatives_mask
truths[(truths == (- 1))] = 0
negatives_mask[(negatives_mask == (- 1))] = 1
mask *= negatives_mask
return (truths, mask)
def __call__(self, guesses: Floats2d, truths: IntsOrFloatsOrStrs) -> Tuple[(Floats2d, float)]:
d_truth = self.get_grad(guesses, truths)
return (d_truth, self._get_loss_from_grad(d_truth))
def get_grad(self, guesses: Floats2d, truths: IntsOrFloatsOrStrs) -> Floats2d:
(target, mask) = self.convert_truths(truths, guesses)
xp = get_array_module(target)
if (guesses.shape != target.shape):
err = f'Cannot calculate CategoricalCrossentropy loss: mismatched shapes: {guesses.shape} vs {target.shape}.'
raise ValueError(err)
if (xp.any((guesses > 1)) or xp.any((guesses < 0))):
err = f'Cannot calculate CategoricalCrossentropy loss with guesses outside the [0,1] interval.'
raise ValueError(err)
if (xp.any((target > 1)) or xp.any((target < 0))):
err = f'Cannot calculate CategoricalCrossentropy loss with truth values outside the [0,1] interval.'
raise ValueError(err)
difference = (guesses - target)
difference *= mask
if self.normalize:
difference = (difference / guesses.shape[0])
return difference
def get_loss(self, guesses: Floats2d, truths: IntsOrFloatsOrStrs) -> float:
d_truth = self.get_grad(guesses, truths)
return self._get_loss_from_grad(d_truth)
def _get_loss_from_grad(self, d_truth: Floats2d) -> float:
return (d_truth ** 2).sum() |
class EmbeddedModelRegistry(ModelRegistry):
def __init__(self, system_app: (SystemApp | None)=None, heartbeat_interval_secs: int=60, heartbeat_timeout_secs: int=120):
super().__init__(system_app)
self.registry: Dict[(str, List[ModelInstance])] = defaultdict(list)
self.heartbeat_interval_secs = heartbeat_interval_secs
self.heartbeat_timeout_secs = heartbeat_timeout_secs
self.heartbeat_thread = threading.Thread(target=self._heartbeat_checker)
self.heartbeat_thread.daemon = True
self.heartbeat_thread.start()
def _get_instances(self, model_name: str, host: str, port: int, healthy_only: bool=False) -> Tuple[(List[ModelInstance], List[ModelInstance])]:
instances = self.registry[model_name]
if healthy_only:
instances = [ins for ins in instances if (ins.healthy == True)]
exist_ins = [ins for ins in instances if ((ins.host == host) and (ins.port == port))]
return (instances, exist_ins)
def _heartbeat_checker(self):
while True:
for instances in self.registry.values():
for instance in instances:
if (instance.check_healthy and ((datetime.now() - instance.last_heartbeat) > timedelta(seconds=self.heartbeat_timeout_secs))):
instance.healthy = False
time.sleep(self.heartbeat_interval_secs)
async def register_instance(self, instance: ModelInstance) -> bool:
model_name = instance.model_name.strip()
host = instance.host.strip()
port = instance.port
(instances, exist_ins) = self._get_instances(model_name, host, port, healthy_only=False)
if exist_ins:
ins = exist_ins[0]
ins.weight = instance.weight
ins.healthy = True
ins.prompt_template = instance.prompt_template
ins.last_heartbeat = datetime.now()
else:
instance.healthy = True
instance.last_heartbeat = datetime.now()
instances.append(instance)
return True
async def deregister_instance(self, instance: ModelInstance) -> bool:
model_name = instance.model_name.strip()
host = instance.host.strip()
port = instance.port
(_, exist_ins) = self._get_instances(model_name, host, port, healthy_only=False)
if exist_ins:
ins = exist_ins[0]
ins.healthy = False
return True
async def get_all_instances(self, model_name: str, healthy_only: bool=False) -> List[ModelInstance]:
return self.sync_get_all_instances(model_name, healthy_only)
def sync_get_all_instances(self, model_name: str, healthy_only: bool=False) -> List[ModelInstance]:
instances = self.registry[model_name]
if healthy_only:
instances = [ins for ins in instances if (ins.healthy == True)]
return instances
async def get_all_model_instances(self, healthy_only: bool=False) -> List[ModelInstance]:
logger.debug('Current registry metadata:\n{self.registry}')
instances = list(itertools.chain(*self.registry.values()))
if healthy_only:
instances = [ins for ins in instances if (ins.healthy == True)]
return instances
async def send_heartbeat(self, instance: ModelInstance) -> bool:
(_, exist_ins) = self._get_instances(instance.model_name, instance.host, instance.port, healthy_only=False)
if (not exist_ins):
(await self.register_instance(instance))
return True
ins = exist_ins[0]
ins.last_heartbeat = datetime.now()
ins.healthy = True
return True |
class Section(BaseObject):
def __init__(self, api=None, category_id=None, created_at=None, description=None, html_url=None, id=None, locale=None, manageable_by=None, name=None, outdated=None, position=None, sorting=None, source_locale=None, updated_at=None, url=None, user_segment_id=None, **kwargs):
self.api = api
self.category_id = category_id
self.created_at = created_at
self.description = description
self.html_url = html_url
self.id = id
self.locale = locale
self.manageable_by = manageable_by
self.name = name
self.outdated = outdated
self.position = position
self.sorting = sorting
self.source_locale = source_locale
self.updated_at = updated_at
self.url = url
self.user_segment_id = user_segment_id
for (key, value) in kwargs.items():
setattr(self, key, value)
for key in self.to_dict():
if (getattr(self, key) is None):
try:
self._dirty_attributes.remove(key)
except KeyError:
continue
def category(self):
if (self.api and self.category_id):
return self.api._get_category(self.category_id)
def category(self, category):
if category:
self.category_id = category.id
self._category = category
def created(self):
if self.created_at:
return dateutil.parser.parse(self.created_at)
def created(self, created):
if created:
self.created_at = created
def updated(self):
if self.updated_at:
return dateutil.parser.parse(self.updated_at)
def updated(self, updated):
if updated:
self.updated_at = updated
def user_segment(self):
if (self.api and self.user_segment_id):
return self.api._get_user_segment(self.user_segment_id)
_segment.setter
def user_segment(self, user_segment):
if user_segment:
self.user_segment_id = user_segment.id
self._user_segment = user_segment |
def test_two_step_unwrap():
first_config = UnwrapPostProcessorConfiguration(data_path='results')
second_config = UnwrapPostProcessorConfiguration(data_path='info')
data = {'results': [{'info': {'email': '', 'preferences': {}}}, {'info': {'email': '', 'preferences': {}}}]}
first_processor = UnwrapPostProcessorStrategy(configuration=first_config)
second_processor = UnwrapPostProcessorStrategy(configuration=second_config)
first_result = first_processor.process(data)
assert (second_processor.process(first_result) == [{'email': '', 'preferences': {}}, {'email': '', 'preferences': {}}]) |
class VideoEditor(Editor):
control = Instance(QVideoWidget)
surface = Any()
media_content = Any()
media_player = Instance(QMediaPlayer)
aspect_ratio = AspectRatio()
state = PlayerState()
position = Float()
duration = Float()
media_status = MediaStatus()
buffer = Range(0, 100)
video_error = Str()
muted = Bool(False)
volume = Range(0.0, 100.0)
playback_rate = Float(1.0)
image_func = Callable()
notify_interval = Float(1.0)
_audio = Any()
def update_to_regular(self):
if (self.surface is not None):
self.surface.frameAvailable.disconnect(self.control.setImage)
self.surface = None
self.control = QVideoWidget()
self.control.setSizePolicy(QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Expanding)
self.control.setBackgroundRole(QPalette.ColorRole.Window)
self.media_player.setVideoOutput(self.control)
def update_to_functional(self):
self.control = ImageWidget(image_func=self.image_func)
self.control.setSizePolicy(QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Expanding)
self.control.setBackgroundRole(QPalette.ColorRole.Window)
self.surface = VideoSurface(widget=self.control)
self.surface.frameAvailable.connect(self.control.setImage)
self.media_player.setVideoOutput(self.surface)
def init(self, parent):
self.control = QVideoWidget()
self.control.setSizePolicy(QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Expanding)
self.control.setBackgroundRole(QPalette.ColorRole.Window)
if is_qt5:
self.media_player = QMediaPlayer(None, QMediaPlayer.VideoSurface)
else:
self.media_player = QMediaPlayer()
self._set_video_url()
self.media_player.setVideoOutput(self.control)
if is_qt5:
self.media_player.setMuted(self.muted)
else:
from pyface.qt.QtMultimedia import QAudioOutput
self._audio = QAudioOutput()
self._audio.setMuted(self.muted)
self.media_player.setAudioOutput(self._audio)
self._update_state()
self._update_aspect_ratio()
self._update_muted()
self._update_volume()
self._update_playback_rate()
self._update_notify_interval()
self._connect_signals()
self.set_tooltip()
def dispose(self):
self._disconnect_signals()
if (self.media_player is not None):
if (not is_qt5):
self.media_player.setSource('')
self.media_player.setVideoOutput(None)
if (not is_qt5):
self.media_player.setAudioOutput(None)
super().dispose()
def update_editor(self):
self._set_video_url()
def _connect_signals(self):
if (self.media_player is not None):
if is_qt5:
self.media_player.stateChanged.connect(self._state_changed_emitted)
self.media_player.error.connect(self._error_emitted)
self.media_player.bufferStatusChanged.connect(self._buffer_status_changed_emitted)
self.media_player.notifyIntervalChanged.connect(self._notify_interval_changed_emitted)
else:
self.media_player.playbackStateChanged.connect(self._state_changed_emitted)
self.media_player.errorOccurred.connect(self._error_emitted)
self.media_player.bufferProgressChanged.connect(self._buffer_status_changed_emitted)
self.media_player.positionChanged.connect(self._position_changed_emitted)
self.media_player.durationChanged.connect(self._duration_changed_emitted)
self.media_player.mediaStatusChanged.connect(self._media_status_changed_emitted)
def _disconnect_signals(self):
if (self.media_player is not None):
if is_qt5:
self.media_player.stateChanged.disconnect(self._state_changed_emitted)
self.media_player.error.disconnect(self._error_emitted)
self.media_player.bufferStatusChanged.disconnect(self._buffer_status_changed_emitted)
self.media_player.notifyIntervalChanged.disconnect(self._notify_interval_changed_emitted)
else:
self.media_player.playbackStateChanged.disconnect(self._state_changed_emitted)
self.media_player.errorOccurred.disconnect(self._error_emitted)
self.media_player.bufferProgressChanged.disconnect(self._buffer_status_changed_emitted)
self.media_player.positionChanged.disconnect(self._position_changed_emitted)
self.media_player.durationChanged.disconnect(self._duration_changed_emitted)
self.media_player.mediaStatusChanged.disconnect(self._media_status_changed_emitted)
def _set_video_url(self):
qurl = QUrl.fromUserInput(self.value)
if is_qt5:
from pyface.qt.QtMultimedia import QMediaContent
if qurl.isValid():
self.media_content = QMediaContent(qurl)
else:
self.media_content = QMediaContent(None)
else:
self.media_content = qurl
self.control.updateGeometry()
def _state_changed_emitted(self, state):
self.state = reversed_state_map[state]
def _position_changed_emitted(self, position):
with self.updating_value():
self.position = (position / 1000.0)
def _duration_changed_emitted(self, duration):
self.duration = (duration / 1000.0)
def _error_emitted(self, error):
if (error != QMediaPlayer.Error.NoError):
self.video_error = self.media_player.errorString()
else:
self.video_error = ''
def _media_status_changed_emitted(self, error):
self.media_status = media_status_map[self.media_player.mediaStatus()]
def _buffer_status_changed_emitted(self, error):
if is_qt5:
self.buffer = self.media_player.bufferStatus()
else:
self.buffer = int((self.media_player.bufferProgress() * 100))
def _notify_interval_changed_emitted(self, interval):
self.notify_interval = (interval / 1000.0)
('aspect_ratio')
def _aspect_ratio_observer(self, event):
if (self.control is not None):
self._update_aspect_ratio()
('image_func')
def _image_func_observer(self, event):
if (self.image_func is None):
self.update_to_regular()
elif (not is_qt5):
raise ValueError('image_func is not supported on Qt6')
else:
self.update_to_functional()
('media_content')
def _media_content_observer(self, event):
self.video_error = ''
if (self.media_player is not None):
if is_qt5:
self.media_player.setMedia(self.media_content)
else:
self.media_player.setSource(self.media_content)
('muted')
def _muted_observer(self, event):
if (self.media_player is not None):
self._update_muted()
('playback_rate')
def _playback_rate_observer(self, event):
if (self.media_player is not None):
self._update_playback_rate()
('position')
def _position_observer(self, event):
if ((self.media_player is not None) and (not self.updating)):
self.media_player.setPosition(int((self.position * 1000)))
('state')
def _state_observer(self, event):
if (self.media_player is not None):
self._update_state()
('volume')
def _volume_observer(self, event):
if (self.media_player is not None):
self._update_volume()
('notify_interval')
def _notify_interval_observer(self, event):
if (self.media_player is not None):
self._update_notify_interval()
def _update_aspect_ratio(self):
self.control.setAspectRatioMode(aspect_ratio_map[self.aspect_ratio])
def _update_muted(self):
if is_qt5:
self.media_player.setMuted(self.muted)
else:
self._audio.setMuted(self.muted)
def _update_playback_rate(self):
self.media_player.setPlaybackRate(self.playback_rate)
def _update_state(self):
if (self.state == 'stopped'):
self.media_player.stop()
self.control.repaint()
elif (self.state == 'playing'):
s = self.control.size()
w = s.width()
h = s.height()
self.media_player.play()
self.control.resize((w + 1), (h + 1))
self.control.resize(w, h)
elif (self.state == 'paused'):
self.media_player.pause()
def _update_volume(self):
linear_volume = QAudio.convertVolume((self.volume / 100.0), QAudio.VolumeScale.LogarithmicVolumeScale, QAudio.VolumeScale.LinearVolumeScale)
if is_qt5:
self.media_player.setVolume(int((linear_volume * 100)))
else:
self._audio.setVolume(linear_volume)
def _update_notify_interval(self):
if is_qt5:
interval = int((self.notify_interval * 1000))
self.media_player.setNotifyInterval(interval) |
def get_checksum(item):
userdata = item['UserData']
checksum = ('%s_%s_%s_%s_%s_%s_%s' % (item['Etag'], userdata['Played'], userdata['IsFavorite'], userdata.get('Likes', '-'), userdata['PlaybackPositionTicks'], userdata.get('UnplayedItemCount', '-'), userdata.get('PlayedPercentage', '-')))
return checksum |
class AllChildren(Rule):
rule: Rule
combined_rule: Rule
def __init__(self, rule: Rule, get_children: Callable[([Any], Dict[(str, Any)])], construct: Callable[([type, Dict[(str, Any)]], Any)], name: str='all_children') -> None:
Rule.__init__(self, name)
self.rule = rule
self.combined_rule = FirstMatch([AllListMembers(rule), AllListEditMembers(rule), AllTermChildren(rule, get_children, construct)])
def apply(self, test: Any) -> RuleResult:
return self.combined_rule(test)
def __str__(self) -> str:
return f'all_children( {str(self.rule)} )'
def always_succeeds(self) -> bool:
return self.rule.always_succeeds() |
def test_check_feeder_broker_connection(mocker):
from feeder.util.feeder import check_connection
class MockDevice(dict):
gatewayHid = 'gateway_hid'
device = MockDevice()
session = mocker.Mock()
broker = mocker.Mock()
broker._sessions = {'gateway_hid': (session, None)}
session.return_value.transitions.is_connected.return_value = True
results = check_connection(device=device, broker=broker)
assert results['connected'] |
def Run(params):
from mu_repo.get_repos_and_curr_branch import GetReposAndCurrBranch
from mu_repo.repos_with_changes import ComputeReposWithChangesFromCurrentBranchToOrigin
repos_and_curr_branch = GetReposAndCurrBranch(params)
keywords = {}
if (len(params.args) < 2):
Print('Not enough arguments passed.')
return
pattern = params.args[1]
for arg in params.args[2:]:
if arg.startswith('--'):
i = arg.index('=')
key = arg[2:i]
val = arg[(i + 1):]
keywords[key] = val
else:
Print(('Unexpected parameter: %s' % (arg,)))
return
if (pattern is None):
Print('Main pattern not specified.')
return
if ('dest' not in keywords):
Print('--dest= not specified')
return
dest = keywords['dest']
repos_with_changes = set(ComputeReposWithChangesFromCurrentBranchToOrigin(repos_and_curr_branch, params, target_branch=dest))
import webbrowser
for (repo, branch) in repos_and_curr_branch:
keywords['source'] = branch
if (repo in repos_with_changes):
import os.path
if (repo == '.'):
repo = os.path.basename(os.path.realpath('.'))
else:
repo = repo.replace('.', '').replace('/', '').replace('\\', '')
keywords['repo'] = repo
url = pattern.format(**keywords)
webbrowser.open_new_tab(url) |
_index_loaded
def default_search_with_decks(editor: aqt.editor.Editor, textRaw: Optional[str], decks: List[int]):
if (textRaw is None):
return
index = get_index()
if (len(textRaw) > 3000):
if ((editor is not None) and (editor.web is not None)):
UI.empty_result('Query was <b>too long</b>')
return
cleaned = index.clean(textRaw)
if (len(cleaned) == 0):
UI.empty_result(('Query was empty after cleaning.<br/><br/><b>Query:</b> <i>%s</i>' % utility.text.trim_if_longer_than(textRaw, 100).replace('\x1f', '').replace('`', '`')))
return
index.lastSearch = (cleaned, decks, 'default')
searchRes = index.search(cleaned, decks) |
.feature('unit')
.story('south')
class TestServicesSouthServer():
def south_fixture(self, mocker):
def cat_get():
config = _TEST_CONFIG
config['plugin']['value'] = config['plugin']['default']
return config
mocker.patch.object(FledgeMicroservice, '__init__', return_value=None)
south_server = Server()
south_server._storage = MagicMock(spec=StorageClientAsync)
attrs = {'create_configuration_category.return_value': None, 'get_configuration_category.return_value': cat_get(), 'register_interest.return_value': {'id': 1234, 'message': 'all ok'}}
south_server._core_microservice_management_client = Mock()
south_server._core_microservice_management_client.configure_mock(**attrs)
mocker.patch.object(south_server, '_name', 'test')
ingest_start = mocker.patch.object(Ingest, 'start', return_value=mock_coro())
log_exception = mocker.patch.object(South._LOGGER, 'exception')
log_error = mocker.patch.object(South._LOGGER, 'error')
log_info = mocker.patch.object(South._LOGGER, 'info')
log_warning = mocker.patch.object(South._LOGGER, 'warning')
return (cat_get, south_server, ingest_start, log_exception, log_error, log_info, log_warning)
.asyncio
async def test__start_async_plugin(self, mocker, loop):
(cat_get, south_server, ingest_start, log_exception, log_error, log_info, log_warning) = self.south_fixture(mocker)
mock_plugin = MagicMock()
attrs = copy.deepcopy(plugin_attrs)
attrs['plugin_info.return_value']['mode'] = 'async'
mock_plugin.configure_mock(**attrs)
sys.modules['fledge.plugins.south.test.test'] = mock_plugin
(await south_server._start(loop))
(await asyncio.sleep(0.5))
assert (1 == ingest_start.call_count)
ingest_start.assert_called_with(south_server)
assert (1 == log_info.call_count)
assert (0 == log_exception.call_count)
assert (south_server._task_main.done() is True)
.asyncio
async def test__start_async_plugin_bad_plugin_value(self, mocker, loop):
mocker.patch.object(FledgeMicroservice, '__init__', return_value=None)
south_server = Server()
south_server._storage = MagicMock(spec=StorageClientAsync)
mocker.patch.object(south_server, '_name', 'test')
mocker.patch.object(south_server, '_stop', return_value=mock_coro())
log_exception = mocker.patch.object(South._LOGGER, 'exception')
(await south_server._start(loop))
(await asyncio.sleep(0.5))
log_exception.assert_called_with('Failed to initialize plugin {}'.format(south_server._name))
.asyncio
async def test__start_async_plugin_bad_plugin_name(self, mocker, loop):
(cat_get, south_server, ingest_start, log_exception, log_error, log_info, log_warning) = self.south_fixture(mocker)
mocker.patch.object(south_server, '_stop', return_value=mock_coro())
sys.modules['fledge.plugins.south.test.test'] = None
with patch.object(South._LOGGER, 'error') as log_error:
(await south_server._start(loop))
(await asyncio.sleep(0.5))
assert (1 == log_error.call_count)
log_error.assert_called_once_with('Unable to load module |{}| for South plugin |{}| - error details |{}|'.format(south_server._name, south_server._name, south_server._name))
assert (1 == log_exception.call_count)
log_exception.assert_called_with('Failed to initialize plugin {}'.format(south_server._name))
.asyncio
async def test__start_async_plugin_bad_plugin_type(self, mocker, loop):
(cat_get, south_server, ingest_start, log_exception, log_error, log_info, log_warning) = self.south_fixture(mocker)
mocker.patch.object(south_server, '_stop', return_value=mock_coro())
mock_plugin = MagicMock()
attrs = copy.deepcopy(plugin_attrs)
attrs['plugin_info.return_value']['mode'] = 'async'
attrs['plugin_info.return_value']['type'] = 'bad'
mock_plugin.configure_mock(**attrs)
sys.modules['fledge.plugins.south.test.test'] = mock_plugin
with patch.object(South._LOGGER, 'error') as log_error:
(await south_server._start(loop))
(await asyncio.sleep(0.5))
assert (1 == log_error.call_count)
log_error.assert_called_once_with('cannot proceed the execution, only the type -south- is allowed - plugin name |{}| plugin type |bad|'.format(south_server._name))
log_exception.assert_called_with('Failed to initialize plugin {}'.format(south_server._name))
.asyncio
async def test__start_poll_plugin(self, loop, mocker):
(cat_get, south_server, ingest_start, log_exception, log_error, log_info, log_warning) = self.south_fixture(mocker)
mocker.patch.object(south_server, '_stop', return_value=mock_coro())
mock_plugin = MagicMock()
attrs = copy.deepcopy(plugin_attrs)
attrs['plugin_info.return_value']['mode'] = 'poll'
attrs['plugin_info.return_value']['config'].update({'pollInterval': {'description': 'The interval between poll calls expressed in milliseconds.', 'type': 'integer', 'default': '1000', 'value': '1000'}})
attrs['plugin_init.return_value'].update({'pollInterval': {'description': 'The interval between poll calls expressed in milliseconds.', 'type': 'integer', 'default': '1000', 'value': '1000'}})
mock_plugin.configure_mock(**attrs)
sys.modules['fledge.plugins.south.test.test'] = mock_plugin
South._MAX_RETRY_POLL = 1
(await south_server._start(loop))
(await asyncio.sleep(0.5))
assert (1 == ingest_start.call_count)
ingest_start.assert_called_with(south_server)
assert (1 == log_info.call_count)
assert (0 == log_warning.call_count)
assert (south_server._task_main.done() is False)
.asyncio
async def test__exec_plugin_async(self, loop, mocker):
(cat_get, south_server, ingest_start, log_exception, log_error, log_info, log_warning) = self.south_fixture(mocker)
mock_plugin = MagicMock()
attrs = copy.deepcopy(plugin_attrs)
attrs['plugin_info.return_value']['mode'] = 'async'
mock_plugin.configure_mock(**attrs)
sys.modules['fledge.plugins.south.test.test'] = mock_plugin
(await south_server._start(loop))
(await asyncio.sleep(0.5))
(await south_server._exec_plugin_async())
assert (2 == log_info.call_count)
log_info.assert_called_with('Started South Plugin: {}'.format(south_server._name))
.asyncio
async def test__exec_plugin_poll(self, loop, mocker):
(cat_get, south_server, ingest_start, log_exception, log_error, log_info, log_warning) = self.south_fixture(mocker)
mocker.patch.object(south_server, '_stop', return_value=mock_coro())
mock_plugin = MagicMock()
attrs = copy.deepcopy(plugin_attrs)
attrs['plugin_info.return_value']['mode'] = 'poll'
attrs['plugin_info.return_value']['config'].update({'pollInterval': {'description': 'The interval between poll calls expressed in milliseconds.', 'type': 'integer', 'default': '1000', 'value': '1000'}})
attrs['plugin_init.return_value'].update({'pollInterval': {'description': 'The interval between poll calls expressed in milliseconds.', 'type': 'integer', 'default': '1000', 'value': '1000'}})
mock_plugin.configure_mock(**attrs)
sys.modules['fledge.plugins.south.test.test'] = mock_plugin
South._MAX_RETRY_POLL = 1
South._TIME_TO_WAIT_BEFORE_RETRY = 0.1
(await south_server._start(loop))
(await asyncio.sleep(0.5))
(await south_server._exec_plugin_poll())
assert (2 == log_info.call_count)
log_info.assert_called_with('Started South Plugin: {}'.format(south_server._name))
.asyncio
async def test__exec_plugin_poll_exceed_retries(self, loop, mocker):
(cat_get, south_server, ingest_start, log_exception, log_error, log_info, log_warning) = self.south_fixture(mocker)
mocker.patch.object(south_server, '_stop', return_value=mock_coro())
mock_plugin = MagicMock()
attrs = copy.deepcopy(plugin_attrs)
attrs['plugin_info.return_value']['mode'] = 'poll'
attrs['plugin_info.return_value']['config'].update({'pollInterval': {'description': 'The interval between poll calls expressed in milliseconds.', 'type': 'integer', 'default': '1000', 'value': '1000'}})
attrs['plugin_init.return_value'].update({'pollInterval': {'description': 'The interval between poll calls expressed in milliseconds.', 'type': 'integer', 'default': '1000', 'value': '1000'}})
mock_plugin.configure_mock(**attrs)
sys.modules['fledge.plugins.south.test.test'] = mock_plugin
South._MAX_RETRY_POLL = 1
South._TIME_TO_WAIT_BEFORE_RETRY = 0.1
(await south_server._start(loop))
(await asyncio.sleep(0.5))
(await south_server._exec_plugin_poll())
assert (2 == log_info.call_count)
assert (2 == log_warning.call_count)
assert (2 == log_error.call_count)
calls = [call('Stopped all polling tasks for plugin: test'), call('Stopped all polling tasks for plugin: test')]
log_warning.assert_has_calls(calls, any_order=True)
.asyncio
async def test_run(self, mocker):
pass
.asyncio
async def test__stop(self, loop, mocker):
(cat_get, south_server, ingest_start, log_exception, log_error, log_info, log_warning) = self.south_fixture(mocker)
mocker.patch.object(Ingest, 'stop', return_value=mock_coro())
mock_plugin = MagicMock()
attrs = copy.deepcopy(plugin_attrs)
attrs['plugin_info.return_value']['mode'] = 'async'
mock_plugin.configure_mock(**attrs)
sys.modules['fledge.plugins.south.test.test'] = mock_plugin
(await south_server._start(loop))
(await asyncio.sleep(0.1))
South._CLEAR_PENDING_TASKS_TIMEOUT = 1
(await south_server._stop(loop))
assert (3 == log_info.call_count)
calls = [call('Started South Plugin: {}'.format(south_server._name)), call('Stopped the Ingest server.'), call('Stopping South service event loop, for plugin test.')]
log_info.assert_has_calls(calls, any_order=True)
assert (0 == log_exception.call_count)
.asyncio
async def test__stop_plugin_stop_error(self, loop, mocker):
(cat_get, south_server, ingest_start, log_exception, log_error, log_info, log_warning) = self.south_fixture(mocker)
mocker.patch.object(Ingest, 'stop', return_value=mock_coro())
mock_plugin = MagicMock()
attrs = copy.deepcopy(plugin_attrs)
attrs['plugin_info.return_value']['mode'] = 'async'
attrs['plugin_shutdown.side_effect'] = RuntimeError
mock_plugin.configure_mock(**attrs)
sys.modules['fledge.plugins.south.test.test'] = mock_plugin
(await south_server._start(loop))
(await asyncio.sleep(0.5))
South._CLEAR_PENDING_TASKS_TIMEOUT = 1
(await south_server._stop(loop))
assert (3 == log_info.call_count)
calls = [call('Started South Plugin: {}'.format(south_server._name)), call('Stopped the Ingest server.'), call('Stopping South service event loop, for plugin test.')]
log_info.assert_has_calls(calls, any_order=True)
assert (1 == log_exception.call_count)
.asyncio
async def test_shutdown(self, loop, mocker):
(cat_get, south_server, ingest_start, log_exception, log_error, log_info, log_warning) = self.south_fixture(mocker)
mocker.patch.object(south_server, '_stop', return_value=mock_coro())
mocker.patch.object(south_server, 'unregister_service_with_core', return_value=True)
call_patch = mocker.patch.object(asyncio.get_event_loop(), 'call_later')
(await south_server.shutdown(request=None))
assert (1 == call_patch.call_count)
.asyncio
async def test_shutdown_error(self, loop, mocker):
(cat_get, south_server, ingest_start, log_exception, log_error, log_info, log_warning) = self.south_fixture(mocker)
mocker.patch.object(south_server, '_stop', return_value=mock_coro(), side_effect=RuntimeError)
mocker.patch.object(south_server, 'unregister_service_with_core', return_value=True)
call_patch = mocker.patch.object(asyncio.get_event_loop(), 'call_later', side_effect=RuntimeError)
from aio import HTTPInternalServerError
with pytest.raises(HTTPInternalServerError):
(await south_server.shutdown(request=None))
.asyncio
async def test_change(self, loop, mocker):
(cat_get, south_server, ingest_start, log_exception, log_error, log_info, log_warning) = self.south_fixture(mocker)
mock_plugin = MagicMock()
attrs = copy.deepcopy(plugin_attrs)
attrs['plugin_info.return_value']['mode'] = 'async'
mock_plugin.configure_mock(**attrs)
sys.modules['fledge.plugins.south.test.test'] = mock_plugin
(await south_server._start(loop))
(await asyncio.sleep(0.5))
(await south_server.change(request=None))
assert (4 == log_info.call_count)
calls = [call('Started South Plugin: test'), call('Configuration has changed for South plugin test'), call('Reconfiguration done for South plugin test'), call('Started South Plugin: test')]
log_info.assert_has_calls(calls, any_order=True)
.asyncio
async def test_change_filter(self, loop, mocker):
_FILTER_TEST_CONFIG = {'plugin': {'description': 'Python module name of the plugin to load', 'type': 'string', 'default': 'test', 'value': 'test'}, 'filter': {'type': 'JSON', 'default': '{"pipeline": ["scale"]}', 'value': '{"pipeline": ["scale"]}', 'description': 'Filter pipeline'}}
mocker.patch.object(FledgeMicroservice, '__init__', return_value=None)
south_server = Server()
south_server._storage = MagicMock(spec=StorageClientAsync)
attrs = {'create_configuration_category.return_value': None, 'get_configuration_category.return_value': _FILTER_TEST_CONFIG, 'register_interest.return_value': {'id': 1234, 'message': 'all ok'}}
south_server._core_microservice_management_client = Mock()
south_server._core_microservice_management_client.configure_mock(**attrs)
mocker.patch.object(south_server, '_name', 'test')
ingest_start = mocker.patch.object(Ingest, 'start', return_value=mock_coro())
log_warning = mocker.patch.object(South._LOGGER, 'warning')
mock_plugin = MagicMock()
attrs = copy.deepcopy(plugin_attrs)
attrs['plugin_info.return_value']['mode'] = 'async'
mock_plugin.configure_mock(**attrs)
sys.modules['fledge.plugins.south.test.test'] = mock_plugin
(await south_server._start(loop))
(await asyncio.sleep(0.5))
(await south_server.change(request=None))
assert (1 == log_warning.call_count)
calls = [call('South Service [%s] does not support the use of a filter pipeline.', 'test')]
log_warning.assert_has_calls(calls, any_order=True)
.asyncio
async def test_change_error(self, loop, mocker):
from fledge.services.south import exceptions
(cat_get, south_server, ingest_start, log_exception, log_error, log_info, log_warning) = self.south_fixture(mocker)
mock_plugin = MagicMock()
attrs = copy.deepcopy(plugin_attrs)
attrs['plugin_info.return_value']['mode'] = 'async'
attrs['plugin_reconfigure.side_effect'] = exceptions.DataRetrievalError
mock_plugin.configure_mock(**attrs)
sys.modules['fledge.plugins.south.test.test'] = mock_plugin
with pytest.raises(TypeError):
(await south_server._start(loop))
(await asyncio.sleep(0.5))
(await south_server.change(request=None))
assert (2 == log_info.call_count)
calls = [call('Started South Plugin: test'), call('Configuration has changed for South plugin test')]
log_info.assert_has_calls(calls, any_order=True)
assert (1 == log_exception.call_count)
calls = [call('Data retrieval error in plugin test during reconfigure')]
log_exception.assert_has_calls(calls, any_order=True) |
class TestElaborate(unittest.TestCase):
pitch_motif = [80, 77, None]
duration_motif = [2, 1, 1]
scale = [5, 7, 8, 10, 0, 1, 4]
steps = [(- 1), (- 1), (- 1)]
def test_right(self):
out = elaborate(self.pitch_motif, self.duration_motif, 0, self.steps, self.scale, 'right', (1 / 4))
expected = ([80, 79, 77, 76, 77, None], [1.5, (1 / 6), (1 / 6), (1 / 6), 1, 1])
self.assertEqual(out, expected)
def test_right_absolute(self):
out = elaborate(self.pitch_motif, self.duration_motif, 0, self.steps, self.scale, 'right', (1 / 4), False)
expected = ([80, 79, 79, 79, 77, None], [1.5, (1 / 6), (1 / 6), (1 / 6), 1, 1])
self.assertEqual(out, expected)
def test_right_chord(self):
pitch_motif = [[80, 84], 77, None]
out = elaborate(pitch_motif, self.duration_motif, 0, self.steps, self.scale, 'right', (1 / 4))
expected = ([[80, 84], [79, 82], [77, 80], [76, 79], 77, None], [1.5, (1 / 6), (1 / 6), (1 / 6), 1, 1])
self.assertEqual(out, expected)
def test_left(self):
out = elaborate(self.pitch_motif, self.duration_motif, 0, self.steps, self.scale, 'left')
expected = ([76, 77, 79, 80, 77, None], [0.5, 0.5, 0.5, 0.5, 1, 1])
self.assertEqual(out, expected)
def test_previous(self):
out = elaborate(self.pitch_motif, self.duration_motif, 1, self.steps, self.scale, 'previous')
expected = ([80, 72, 73, 76, 77, None], [0.5, 0.5, 0.5, 0.5, 1, 1])
self.assertEqual(out, expected)
def test_previous_0(self):
out = elaborate(self.pitch_motif, self.duration_motif, 0, self.steps, self.scale, 'previous', duration=4)
expected = ([76, 77, 79, 80, 77, None], [(- 1), (- 1), (- 1), 2, 1, 1])
self.assertEqual(out, expected)
def test_next(self):
out = elaborate(self.pitch_motif, self.duration_motif, 0, self.steps, self.scale, 'next', (1 / 2))
expected = ([80, 79, 77, 76, 77, None], [2, (1 / 6), (1 / 6), (1 / 6), (1 / 2), 1])
self.assertEqual(out, expected)
def test_next_last(self):
pitch_motif = [80, 77, 80]
out = elaborate(pitch_motif, self.duration_motif, 2, self.steps, self.scale, 'next', duration=4)
expected = ([80, 77, 80, 79, 77, 76], [2, 1, 1, (- 1), (- 1), (- 1)])
self.assertEqual(out, expected)
def test_none_0(self):
pitch_motif = [[80, 81], 77, None]
out = elaborate(pitch_motif, self.duration_motif, 0, [0, None], self.scale, 'right')
expected = ([[80, 81], [80, 81], None, 77, None], [(2 / 3), (2 / 3), (2 / 3), 1, 1])
self.assertEqual(out, expected) |
def test_get_raw_message_serialization():
kwargs_arg = ContractApiMessage.Kwargs({'key_1': 1, 'key_2': 2})
msg = ContractApiMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=ContractApiMessage.Performative.GET_RAW_MESSAGE, ledger_id='some_ledger_id', contract_id='some_contract_id', contract_address='some_contract_address', callable='some_callable', kwargs=kwargs_arg)
msg.to = 'receiver'
envelope = Envelope(to=msg.to, sender='sender', message=msg)
envelope_bytes = envelope.encode()
actual_envelope = Envelope.decode(envelope_bytes)
expected_envelope = envelope
assert (expected_envelope.to == actual_envelope.to)
assert (expected_envelope.sender == actual_envelope.sender)
assert (expected_envelope.protocol_specification_id == actual_envelope.protocol_specification_id)
assert (expected_envelope.message != actual_envelope.message)
actual_msg = ContractApiMessage.serializer.decode(actual_envelope.message)
actual_msg.to = actual_envelope.to
actual_msg.sender = actual_envelope.sender
expected_msg = msg
assert (expected_msg == actual_msg) |
def test_hicCompareMatrices_doubleMinusOneEqual0():
outfile = NamedTemporaryFile(suffix='.cool', delete=False)
outfile.close()
args = '--matrices {} {} --outFileName {} --operation diff'.format((ROOT + 'hicCompareMatrices/small_test_matrix_twice.cool'), (ROOT + 'small_test_matrix.cool'), outfile.name).split()
compute(hicCompareMatrices.main, args, 5)
input = hm.hiCMatrix((ROOT + 'small_test_matrix.cool'))
new = hm.hiCMatrix(outfile.name)
nt.assert_equal([], new.matrix.data)
nt.assert_equal(input.cut_intervals, new.cut_intervals)
os.unlink(outfile.name) |
def cflaf(x, d, M=128, P=5, mu_L=0.2, mu_FL=0.5, mu_a=0.5):
nIters = (min(len(x), len(d)) - M)
Q = (P * 2)
beta = 0.9
sk = np.arange(0, (Q * M), 2)
ck = np.arange(1, (Q * M), 2)
pk = np.tile(np.arange(P), M)
u = np.zeros(M)
w_L = np.zeros(M)
w_FL = np.zeros((Q * M))
alpha = 0
gamma = 1
e = np.zeros(nIters)
for n in range(nIters):
u[1:] = u[:(- 1)]
u[0] = x[n]
g = np.repeat(u, Q)
g[sk] = np.sin(((pk * np.pi) * g[sk]))
g[ck] = np.cos(((pk * np.pi) * g[ck]))
y_L = np.dot(w_L, u.T)
y_FL = np.dot(w_FL, g.T)
e_FL = (d[n] - (y_L + y_FL))
w_FL = (w_FL + (((mu_FL * e_FL) * g) / (np.dot(g, g) + 0.001)))
lambda_n = (1 / (1 + np.exp((- alpha))))
y_N = (y_L + (lambda_n * y_FL))
e_n = (d[n] - y_N)
gamma = ((beta * gamma) + ((1 - beta) * (y_FL ** 2)))
alpha = (alpha + (((((mu_a * e_n) * y_FL) * lambda_n) * (1 - lambda_n)) / gamma))
alpha = np.clip(alpha, (- 4), 4)
w_L = (w_L + (((mu_L * e_n) * u) / (np.dot(u, u) + 0.001)))
e[n] = e_n
return e |
def resample_file(input_file: Path, output_file: Path, overwrite: bool, samping_rate: int, mono: bool):
import librosa
import soundfile as sf
if ((overwrite is False) and output_file.exists()):
return
(audio, _) = librosa.load(str(input_file), sr=samping_rate, mono=mono)
if (audio.ndim == 2):
audio = audio.T
sf.write(str(output_file), audio, samping_rate) |
class MutationResponse():
def __init__(self, mutation, response):
self.code = response['code']
self.url = response['url']
self.headers = response['headers']
self.body = decode_bytes(response['body'])
self.body_binary = response['body']
self.time = response['time'] |
class EmailTemplateRenderer():
def __init__(self, repository: 'EmailTemplateRepository', *, templates_overrides: (dict[(EmailTemplateType, 'EmailTemplate')] | None)=None):
self.repository = repository
self._jinja_environment: (jinja2.Environment | None) = None
self.templates_overrides = templates_overrides
async def render(self, type: Literal[EmailTemplateType.WELCOME], context: 'WelcomeContext') -> str:
...
async def render(self, type: Literal[EmailTemplateType.VERIFY_EMAIL], context: 'VerifyEmailContext') -> str:
...
async def render(self, type: Literal[EmailTemplateType.FORGOT_PASSWORD], context: 'ForgotPasswordContext') -> str:
...
async def render(self, type, context: 'EmailContext') -> str:
jinja_environment = (await self._get_jinja_environment())
template_object = jinja_environment.get_template(type.value)
return template_object.render(context.model_dump())
async def _get_jinja_environment(self) -> jinja2.Environment:
if (self._jinja_environment is None):
templates = (await self.repository.all())
loader = jinja2.FunctionLoader(EmailTemplateLoader(templates, templates_overrides=self.templates_overrides))
self._jinja_environment = ImmutableSandboxedEnvironment(loader=loader, autoescape=True)
return self._jinja_environment |
def test():
assert (len(pattern) == 2), 'Le motif doit decrire deux tokens (deux dictionnaires).'
assert (isinstance(pattern[0], dict) and isinstance(pattern[1], dict)), "Chaque element d'un motif doit etre un dictionnaire."
assert ((len(pattern[0]) == 1) and (len(pattern[1]) == 1)), "Chaque element du motif ne doit comporter qu'une seule cle."
assert any(((pattern[0].get(key) == 'iOS') for key in ['text', 'TEXT'])), 'Recherches-tu sur le texte du premier token ?'
assert any(((pattern[1].get(key) == True) for key in ['is_digit', 'IS_DIGIT'])), "Recherches-tu l'attribut is_digit sur le deuxieme token ?"
__msg__.good('Bien joue !') |
def do_istft(data):
window_fn = tf.signal.hamming_window
win_size = args.stft['win_size']
hop_size = args.stft['hop_size']
inv_window_fn = tf.signal.inverse_stft_window_fn(hop_size, forward_window_fn=window_fn)
pred_cpx = (data[(..., 0)] + (1j * data[(..., 1)]))
pred_time = tf.signal.inverse_stft(pred_cpx, win_size, hop_size, window_fn=inv_window_fn)
return pred_time |
def get_gauntlet_progress(lengths: dict[(str, Any)], unlock: bool=True) -> dict[(str, Any)]:
total = lengths['total']
stars = lengths['stars']
stages = lengths['stages']
clear_progress = get_length_data(4, 1, (total * stars))
clear_progress = list(helper.chunks(clear_progress, stars))
clear_amount = get_length_data(4, 2, ((total * stages) * stars))
unlock_next = []
if unlock:
unlock_next = get_length_data(4, 1, (total * stars))
unlock_next = list(helper.chunks(unlock_next, stars))
clear_amount = list(helper.chunks(clear_amount, (stages * stars)))
clear_amount_sep: list[list[list[int]]] = []
for clear_amount_val in clear_amount:
sub_chapter_clears: list[list[int]] = []
for j in range(stars):
sub_chapter_clears.append(clear_amount_val[j::stars])
clear_amount_sep.append(sub_chapter_clears)
clear_amount = clear_amount_sep
return {'Value': {'clear_progress': clear_progress, 'clear_amount': clear_amount, 'unlock_next': unlock_next}, 'Lengths': lengths} |
class APIResponse(Response):
api_return_types = (list, dict)
def __init__(self, content=None, *args, **kwargs):
super().__init__(None, *args, **kwargs)
media_type = None
if (isinstance(content, self.api_return_types) or (content == '')):
renderer = request.accepted_renderer
if ((content != '') or renderer.handles_empty_responses):
media_type = request.accepted_media_type
options = self.get_renderer_options()
content = renderer.render(content, media_type, **options)
if (self.status_code == 204):
self.status_code = 200
if (content is None):
content = []
if isinstance(content, (str, bytes, bytearray)):
self.set_data(content)
else:
self.response = content
if (media_type is not None):
self.headers['Content-Type'] = str(media_type)
def get_renderer_options(self):
return {'status': self.status, 'status_code': self.status_code, 'headers': self.headers} |
def read_line(f, metrics, iline_idx, xline_idx):
samples = metrics['samplecount']
xline_stride = metrics['xline_stride']
iline_stride = metrics['iline_stride']
offsets = metrics['offset_count']
xline_trace0 = _segyio.fread_trace0(20, len(iline_idx), xline_stride, offsets, xline_idx, 'crossline')
iline_trace0 = _segyio.fread_trace0(1, len(xline_idx), iline_stride, offsets, iline_idx, 'inline')
buf = numpy.zeros((len(iline_idx), samples), dtype=numpy.single)
f.getline(xline_trace0, len(iline_idx), xline_stride, offsets, buf)
assert (sum(sum(buf)) == approx(800., abs=1e-06))
f.getline(iline_trace0, len(xline_idx), iline_stride, offsets, buf)
assert (sum(sum(buf)) == approx(305., abs=1e-06))
f.close() |
class EnvoyBuilder(base_builder.BaseBuilder):
def __init__(self, manager: source_manager.SourceManager) -> None:
super(EnvoyBuilder, self).__init__(manager)
self._source_tree = self._source_manager.get_source_tree(proto_source.SourceRepository.SRCID_ENVOY)
self._source_repo = self._source_manager.get_source_repository(proto_source.SourceRepository.SRCID_ENVOY)
self.set_build_dir(self._source_tree.get_source_directory())
def _validate(self) -> None:
if (self._source_repo.identity != proto_source.SourceRepository.SRCID_ENVOY):
raise EnvoyBuilderError('This class builds Envoy only.')
def clean_envoy(self) -> None:
self._validate()
self._run_bazel_clean()
def build_envoy(self) -> None:
cmd_params = cmd_exec.CommandParameters(cwd=self._build_dir)
cmd = 'bazel build {bazel_options}'.format(bazel_options=self._generate_bazel_options(proto_source.SourceRepository.SRCID_ENVOY))
if (not cmd.endswith(' ')):
cmd += ' '
if os.getenv('SALVO_WORKAROUND_LINK_ERROR'):
cmd += '--incompatible_require_linker_input_cc_api=false '
cmd += constants.ENVOY_BINARY_BUILD_TARGET
cmd_exec.run_check_command(cmd, cmd_params)
def build_envoy_binary_from_source(self) -> str:
self._validate()
self._source_tree.copy_source_directory()
self._source_tree.checkout_commit_hash()
self.clean_envoy()
self.build_envoy()
return os.path.join(self._build_dir, constants.ENVOY_BINARY_TARGET_OUTPUT_PATH)
def build_su_exec(self) -> None:
cmd_params = cmd_exec.CommandParameters(cwd=self._build_dir)
cmd = 'bazel build {bazel_options}'.format(bazel_options=self._generate_bazel_options(proto_source.SourceRepository.SRCID_ENVOY))
if (not cmd.endswith(' ')):
cmd += ' '
cmd += 'external:su-exec'
cmd_exec.run_check_command(cmd, cmd_params)
def stage_su_exec(self) -> None:
dir_mode = 493
dest_path = os.path.join(self._build_dir, 'build_release')
if (not os.path.exists(dest_path)):
os.mkdir(dest_path, dir_mode)
cmd = 'cp -fv '
cmd += 'bazel-bin/external/com_github_ncopa_suexec/su-exec '
cmd += 'build_release/su-exec'
cmd_params = cmd_exec.CommandParameters(cwd=self._build_dir)
cmd_exec.run_command(cmd, cmd_params)
def build_envoy_image_from_source(self) -> None:
self.build_envoy_binary_from_source()
self.stage_envoy(False)
self.build_su_exec()
self.stage_su_exec()
self.create_docker_image()
def stage_envoy(self, strip_binary: bool) -> None:
dir_mode = 493
dest_path = os.path.join(self._build_dir, 'build_release_stripped')
if (not os.path.exists(dest_path)):
os.mkdir(dest_path, dir_mode)
cmd = ('objcopy --strip-debug ' if strip_binary else 'cp -fv ')
cmd += constants.ENVOY_BINARY_TARGET_OUTPUT_PATH
cmd += ' build_release_stripped/envoy'
cmd_params = cmd_exec.CommandParameters(cwd=self._build_dir)
cmd_exec.run_command(cmd, cmd_params)
def _generate_docker_ignore(self) -> None:
omit_from_dockerignore = ['configs', 'build_release', 'build_release_stripped', 'ci']
pwd = os.getcwd()
os.chdir(self._build_dir)
discovered_files = glob.glob('*')
files_to_write = filter((lambda f: (f not in omit_from_dockerignore)), discovered_files)
with open('.dockerignore', 'w') as dockerignore:
for entry in files_to_write:
dockerignore.write('{entry}\n'.format(entry=entry))
os.chdir(pwd)
def create_docker_image(self) -> None:
self._generate_docker_ignore()
commit_hash = self._source_repo.commit_hash
cmd = 'docker build '
cmd += '-f ci/Dockerfile-envoy '
cmd += '-t envoyproxy/envoy-dev:{hash} '.format(hash=commit_hash)
cmd += "--build-arg TARGETPLATFORM='.' ."
cmd_params = cmd_exec.CommandParameters(cwd=self._build_dir)
cmd_exec.run_command(cmd, cmd_params) |
class TestAction(unittest.TestCase, UnittestTools):
def setUp(self):
self.application = GUIApplication()
def test_defaults(self):
action = GUIApplicationAction()
event = ActionEvent()
action.perform(event)
self.assertTrue(action.enabled)
self.assertTrue(action.visible)
self.assertIsNone(action.object)
def test_application(self):
action = GUIApplicationAction(application=self.application)
event = ActionEvent()
action.perform(event)
self.assertTrue(action.enabled)
self.assertTrue(action.visible)
self.assertEqual(action.object, self.application)
def test_application_changed(self):
action = GUIApplicationAction()
self.assertIsNone(action.object)
with self.assertTraitChanges(action, 'object', 1):
action.application = self.application
self.assertEqual(action.object, self.application)
with self.assertTraitChanges(action, 'object', 1):
action.application = None
self.assertIsNone(action.object)
def test_destroy(self):
action = GUIApplicationAction(application=self.application)
action.destroy()
self.assertEqual(action.object, None) |
def extractMdzstranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class CommandsTestCase(TestCase):
def test_import_practices_from_epraccur(self):
Practice.objects.create(code='A81044', ccg_id='00M', ccg_change_reason='Manually set')
args = []
epraccur = 'frontend/tests/fixtures/commands/'
epraccur += 'epraccur_sample.csv'
opts = {'epraccur': epraccur}
call_command('import_practices', *args, **opts)
p = Practice.objects.get(code='A81043')
self.assertEqual(p.ccg.code, '00M')
self.assertEqual(p.name, 'THE MANOR HOUSE SURGERY')
addr = 'THE MANOR HOUSE SURGERY, BRAIDWOOD ROAD, NORMANBY, '
addr += 'MIDDLESBROUGH, CLEVELAND, TS6 0HA'
self.assertEqual(p.address_pretty(), addr)
self.assertEqual(p.postcode, 'TS6 0HA')
self.assertEqual(p.open_date, datetime.date(1974, 4, 1))
self.assertEqual(p.close_date, None)
self.assertEqual(p.status_code, 'A')
self.assertEqual(p.join_provider_date, datetime.date(2013, 4, 1))
self.assertEqual(p.leave_provider_date, None)
self.assertEqual(p.get_setting_display(), 'Prison')
p = Practice.objects.get(code='A81044')
self.assertEqual(p.ccg.code, '00M')
self.assertEqual(p.get_setting_display(), 'GP Practice')
p = Practice.objects.get(code='Y01063')
self.assertEqual(p.ccg, None)
def test_import_practices_from_hscic(self):
args = []
hscic = 'frontend/tests/fixtures/commands/hscic_practices.csv'
opts = {'hscic_address': hscic}
call_command('import_practices', *args, **opts)
p = Practice.objects.get(code='A81001')
self.assertEqual(p.name, 'THE DENSHAM SURGERY')
addr = 'THE HEALTH CENTRE, LAWSON STREET, '
addr += 'STOCKTON, CLEVELAND, TS18 1HU'
self.assertEqual(p.address_pretty(), addr)
self.assertEqual(p.open_date, None)
self.assertEqual(p.ccg, None) |
def read_grdecl_3d_property(filename, keyword, dimensions, dtype=float):
result = None
with open_grdecl(filename, keywords=[], simple_keywords=(keyword,)) as kw_generator:
try:
(_, result) = next(kw_generator)
except StopIteration as si:
raise xtgeo.KeywordNotFoundError(f'Cannot import {keyword}, not present in file {filename}?') from si
f_order_values = np.array([dtype(v) for v in result])
return np.ascontiguousarray(f_order_values.reshape(dimensions, order='F')) |
_page.route('/table/settings', methods=['POST'])
def settings():
res = check_uuid(all_data['uuid'], request.json['uuid'])
if (res != None):
return jsonify(res)
settings = request.json['settings']
all_data['settings'].update({key.replace(' ', '_'): value for (key, value) in settings.items()})
return jsonify(status='success', msg='') |
class dns_analytics(bsn_tlv):
type = 190
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = dns_analytics()
_type = reader.read('!H')[0]
assert (_type == 190)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
return True
def pretty_print(self, q):
q.text('dns_analytics {')
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}') |
def get_input(req):
try:
x = request.args.get('x')
if ((x is not None) and (x != '')):
return x
except:
pass
try:
x = request.form.get('x')
if ((x is not None) and (x != '')):
return x
except:
pass
try:
x = request.files['x']
if (x is not None):
return x
except:
pass
try:
x = req.get_json(silent=True)
if (x is not None):
return x
except:
pass
return None |
def test_write_jsonl_file():
data = [{'hello': 'world'}, {'test': 123}]
with make_tempdir() as temp_dir:
file_path = (temp_dir / 'tmp.json')
write_jsonl(file_path, data)
with Path(file_path).open('r', encoding='utf8') as f:
assert (f.read() == '{"hello":"world"}\n{"test":123}\n') |
class TProtocolMessage(Message):
protocol_id = PublicId.from_str('fetchai/t_protocol:0.1.0')
protocol_specification_id = PublicId.from_str('some_author/some_protocol_name:1.0.0')
DataModel = CustomDataModel
DataModel1 = CustomDataModel1
DataModel2 = CustomDataModel2
DataModel3 = CustomDataModel3
DataModel4 = CustomDataModel4
class Performative(Message.Performative):
PERFORMATIVE_CT = 'performative_ct'
PERFORMATIVE_EMPTY_CONTENTS = 'performative_empty_contents'
PERFORMATIVE_MT = 'performative_mt'
PERFORMATIVE_O = 'performative_o'
PERFORMATIVE_PCT = 'performative_pct'
PERFORMATIVE_PMT = 'performative_pmt'
PERFORMATIVE_PT = 'performative_pt'
def __str__(self) -> str:
return str(self.value)
_performatives = {'performative_ct', 'performative_empty_contents', 'performative_mt', 'performative_o', 'performative_pct', 'performative_pmt', 'performative_pt'}
__slots__: Tuple[(str, ...)] = tuple()
class _SlotsCls():
__slots__ = ('content_bool', 'content_bytes', 'content_ct', 'content_dict_bool_bool', 'content_dict_bool_bytes', 'content_dict_bool_float', 'content_dict_bool_int', 'content_dict_bool_str', 'content_dict_int_bool', 'content_dict_int_bytes', 'content_dict_int_float', 'content_dict_int_int', 'content_dict_int_str', 'content_dict_str_bool', 'content_dict_str_bytes', 'content_dict_str_float', 'content_dict_str_int', 'content_dict_str_str', 'content_float', 'content_int', 'content_list_bool', 'content_list_bytes', 'content_list_float', 'content_list_int', 'content_list_str', 'content_o_bool', 'content_o_ct', 'content_o_dict_str_int', 'content_o_list_bytes', 'content_o_set_int', 'content_set_bool', 'content_set_bytes', 'content_set_float', 'content_set_int', 'content_set_str', 'content_str', 'content_union_1', 'content_union_2', 'content_union_3', 'dialogue_reference', 'message_id', 'performative', 'target')
def __init__(self, performative: Performative, dialogue_reference: Tuple[(str, str)]=('', ''), message_id: int=1, target: int=0, **kwargs: Any):
super().__init__(dialogue_reference=dialogue_reference, message_id=message_id, target=target, performative=TProtocolMessage.Performative(performative), **kwargs)
def valid_performatives(self) -> Set[str]:
return self._performatives
def dialogue_reference(self) -> Tuple[(str, str)]:
enforce(self.is_set('dialogue_reference'), 'dialogue_reference is not set.')
return cast(Tuple[(str, str)], self.get('dialogue_reference'))
def message_id(self) -> int:
enforce(self.is_set('message_id'), 'message_id is not set.')
return cast(int, self.get('message_id'))
def performative(self) -> Performative:
enforce(self.is_set('performative'), 'performative is not set.')
return cast(TProtocolMessage.Performative, self.get('performative'))
def target(self) -> int:
enforce(self.is_set('target'), 'target is not set.')
return cast(int, self.get('target'))
def content_bool(self) -> bool:
enforce(self.is_set('content_bool'), "'content_bool' content is not set.")
return cast(bool, self.get('content_bool'))
def content_bytes(self) -> bytes:
enforce(self.is_set('content_bytes'), "'content_bytes' content is not set.")
return cast(bytes, self.get('content_bytes'))
def content_ct(self) -> CustomDataModel:
enforce(self.is_set('content_ct'), "'content_ct' content is not set.")
return cast(CustomDataModel, self.get('content_ct'))
def content_dict_bool_bool(self) -> Dict[(bool, bool)]:
enforce(self.is_set('content_dict_bool_bool'), "'content_dict_bool_bool' content is not set.")
return cast(Dict[(bool, bool)], self.get('content_dict_bool_bool'))
def content_dict_bool_bytes(self) -> Dict[(bool, bytes)]:
enforce(self.is_set('content_dict_bool_bytes'), "'content_dict_bool_bytes' content is not set.")
return cast(Dict[(bool, bytes)], self.get('content_dict_bool_bytes'))
def content_dict_bool_float(self) -> Dict[(bool, float)]:
enforce(self.is_set('content_dict_bool_float'), "'content_dict_bool_float' content is not set.")
return cast(Dict[(bool, float)], self.get('content_dict_bool_float'))
def content_dict_bool_int(self) -> Dict[(bool, int)]:
enforce(self.is_set('content_dict_bool_int'), "'content_dict_bool_int' content is not set.")
return cast(Dict[(bool, int)], self.get('content_dict_bool_int'))
def content_dict_bool_str(self) -> Dict[(bool, str)]:
enforce(self.is_set('content_dict_bool_str'), "'content_dict_bool_str' content is not set.")
return cast(Dict[(bool, str)], self.get('content_dict_bool_str'))
def content_dict_int_bool(self) -> Dict[(int, bool)]:
enforce(self.is_set('content_dict_int_bool'), "'content_dict_int_bool' content is not set.")
return cast(Dict[(int, bool)], self.get('content_dict_int_bool'))
def content_dict_int_bytes(self) -> Dict[(int, bytes)]:
enforce(self.is_set('content_dict_int_bytes'), "'content_dict_int_bytes' content is not set.")
return cast(Dict[(int, bytes)], self.get('content_dict_int_bytes'))
def content_dict_int_float(self) -> Dict[(int, float)]:
enforce(self.is_set('content_dict_int_float'), "'content_dict_int_float' content is not set.")
return cast(Dict[(int, float)], self.get('content_dict_int_float'))
def content_dict_int_int(self) -> Dict[(int, int)]:
enforce(self.is_set('content_dict_int_int'), "'content_dict_int_int' content is not set.")
return cast(Dict[(int, int)], self.get('content_dict_int_int'))
def content_dict_int_str(self) -> Dict[(int, str)]:
enforce(self.is_set('content_dict_int_str'), "'content_dict_int_str' content is not set.")
return cast(Dict[(int, str)], self.get('content_dict_int_str'))
def content_dict_str_bool(self) -> Dict[(str, bool)]:
enforce(self.is_set('content_dict_str_bool'), "'content_dict_str_bool' content is not set.")
return cast(Dict[(str, bool)], self.get('content_dict_str_bool'))
def content_dict_str_bytes(self) -> Dict[(str, bytes)]:
enforce(self.is_set('content_dict_str_bytes'), "'content_dict_str_bytes' content is not set.")
return cast(Dict[(str, bytes)], self.get('content_dict_str_bytes'))
def content_dict_str_float(self) -> Dict[(str, float)]:
enforce(self.is_set('content_dict_str_float'), "'content_dict_str_float' content is not set.")
return cast(Dict[(str, float)], self.get('content_dict_str_float'))
def content_dict_str_int(self) -> Dict[(str, int)]:
enforce(self.is_set('content_dict_str_int'), "'content_dict_str_int' content is not set.")
return cast(Dict[(str, int)], self.get('content_dict_str_int'))
def content_dict_str_str(self) -> Dict[(str, str)]:
enforce(self.is_set('content_dict_str_str'), "'content_dict_str_str' content is not set.")
return cast(Dict[(str, str)], self.get('content_dict_str_str'))
def content_float(self) -> float:
enforce(self.is_set('content_float'), "'content_float' content is not set.")
return cast(float, self.get('content_float'))
def content_int(self) -> int:
enforce(self.is_set('content_int'), "'content_int' content is not set.")
return cast(int, self.get('content_int'))
def content_list_bool(self) -> Tuple[(bool, ...)]:
enforce(self.is_set('content_list_bool'), "'content_list_bool' content is not set.")
return cast(Tuple[(bool, ...)], self.get('content_list_bool'))
def content_list_bytes(self) -> Tuple[(bytes, ...)]:
enforce(self.is_set('content_list_bytes'), "'content_list_bytes' content is not set.")
return cast(Tuple[(bytes, ...)], self.get('content_list_bytes'))
def content_list_float(self) -> Tuple[(float, ...)]:
enforce(self.is_set('content_list_float'), "'content_list_float' content is not set.")
return cast(Tuple[(float, ...)], self.get('content_list_float'))
def content_list_int(self) -> Tuple[(int, ...)]:
enforce(self.is_set('content_list_int'), "'content_list_int' content is not set.")
return cast(Tuple[(int, ...)], self.get('content_list_int'))
def content_list_str(self) -> Tuple[(str, ...)]:
enforce(self.is_set('content_list_str'), "'content_list_str' content is not set.")
return cast(Tuple[(str, ...)], self.get('content_list_str'))
def content_o_bool(self) -> Optional[bool]:
return cast(Optional[bool], self.get('content_o_bool'))
def content_o_ct(self) -> Optional[CustomDataModel4]:
return cast(Optional[CustomDataModel4], self.get('content_o_ct'))
def content_o_dict_str_int(self) -> Optional[Dict[(str, int)]]:
return cast(Optional[Dict[(str, int)]], self.get('content_o_dict_str_int'))
def content_o_list_bytes(self) -> Optional[Tuple[(bytes, ...)]]:
return cast(Optional[Tuple[(bytes, ...)]], self.get('content_o_list_bytes'))
def content_o_set_int(self) -> Optional[FrozenSet[int]]:
return cast(Optional[FrozenSet[int]], self.get('content_o_set_int'))
def content_set_bool(self) -> FrozenSet[bool]:
enforce(self.is_set('content_set_bool'), "'content_set_bool' content is not set.")
return cast(FrozenSet[bool], self.get('content_set_bool'))
def content_set_bytes(self) -> FrozenSet[bytes]:
enforce(self.is_set('content_set_bytes'), "'content_set_bytes' content is not set.")
return cast(FrozenSet[bytes], self.get('content_set_bytes'))
def content_set_float(self) -> FrozenSet[float]:
enforce(self.is_set('content_set_float'), "'content_set_float' content is not set.")
return cast(FrozenSet[float], self.get('content_set_float'))
def content_set_int(self) -> FrozenSet[int]:
enforce(self.is_set('content_set_int'), "'content_set_int' content is not set.")
return cast(FrozenSet[int], self.get('content_set_int'))
def content_set_str(self) -> FrozenSet[str]:
enforce(self.is_set('content_set_str'), "'content_set_str' content is not set.")
return cast(FrozenSet[str], self.get('content_set_str'))
def content_str(self) -> str:
enforce(self.is_set('content_str'), "'content_str' content is not set.")
return cast(str, self.get('content_str'))
def content_union_1(self) -> Union[(CustomDataModel1, bytes, int, float, bool, str, FrozenSet[int], Tuple[(bool, ...)], Dict[(str, int)])]:
enforce(self.is_set('content_union_1'), "'content_union_1' content is not set.")
return cast(Union[(CustomDataModel1, bytes, int, float, bool, str, FrozenSet[int], Tuple[(bool, ...)], Dict[(str, int)])], self.get('content_union_1'))
def content_union_2(self) -> Union[(FrozenSet[bytes], FrozenSet[int], FrozenSet[str], Tuple[(float, ...)], Tuple[(bool, ...)], Tuple[(bytes, ...)], Dict[(str, int)], Dict[(int, float)], Dict[(bool, bytes)], int)]:
enforce(self.is_set('content_union_2'), "'content_union_2' content is not set.")
return cast(Union[(FrozenSet[bytes], FrozenSet[int], FrozenSet[str], Tuple[(float, ...)], Tuple[(bool, ...)], Tuple[(bytes, ...)], Dict[(str, int)], Dict[(int, float)], Dict[(bool, bytes)], int)], self.get('content_union_2'))
def content_union_3(self) -> Union[(CustomDataModel2, CustomDataModel3)]:
enforce(self.is_set('content_union_3'), "'content_union_3' content is not set.")
return cast(Union[(CustomDataModel2, CustomDataModel3)], self.get('content_union_3'))
def _is_consistent(self) -> bool:
try:
enforce(isinstance(self.dialogue_reference, tuple), "Invalid type for 'dialogue_reference'. Expected 'tuple'. Found '{}'.".format(type(self.dialogue_reference)))
enforce(isinstance(self.dialogue_reference[0], str), "Invalid type for 'dialogue_reference[0]'. Expected 'str'. Found '{}'.".format(type(self.dialogue_reference[0])))
enforce(isinstance(self.dialogue_reference[1], str), "Invalid type for 'dialogue_reference[1]'. Expected 'str'. Found '{}'.".format(type(self.dialogue_reference[1])))
enforce((type(self.message_id) is int), "Invalid type for 'message_id'. Expected 'int'. Found '{}'.".format(type(self.message_id)))
enforce((type(self.target) is int), "Invalid type for 'target'. Expected 'int'. Found '{}'.".format(type(self.target)))
enforce(isinstance(self.performative, TProtocolMessage.Performative), "Invalid 'performative'. Expected either of '{}'. Found '{}'.".format(self.valid_performatives, self.performative))
actual_nb_of_contents = (len(self._body) - DEFAULT_BODY_SIZE)
expected_nb_of_contents = 0
if (self.performative == TProtocolMessage.Performative.PERFORMATIVE_CT):
expected_nb_of_contents = 1
enforce(isinstance(self.content_ct, CustomDataModel), "Invalid type for content 'content_ct'. Expected 'DataModel'. Found '{}'.".format(type(self.content_ct)))
elif (self.performative == TProtocolMessage.Performative.PERFORMATIVE_PT):
expected_nb_of_contents = 5
enforce(isinstance(self.content_bytes, bytes), "Invalid type for content 'content_bytes'. Expected 'bytes'. Found '{}'.".format(type(self.content_bytes)))
enforce((type(self.content_int) is int), "Invalid type for content 'content_int'. Expected 'int'. Found '{}'.".format(type(self.content_int)))
enforce(isinstance(self.content_float, float), "Invalid type for content 'content_float'. Expected 'float'. Found '{}'.".format(type(self.content_float)))
enforce(isinstance(self.content_bool, bool), "Invalid type for content 'content_bool'. Expected 'bool'. Found '{}'.".format(type(self.content_bool)))
enforce(isinstance(self.content_str, str), "Invalid type for content 'content_str'. Expected 'str'. Found '{}'.".format(type(self.content_str)))
elif (self.performative == TProtocolMessage.Performative.PERFORMATIVE_PCT):
expected_nb_of_contents = 10
enforce(isinstance(self.content_set_bytes, frozenset), "Invalid type for content 'content_set_bytes'. Expected 'frozenset'. Found '{}'.".format(type(self.content_set_bytes)))
enforce(all((isinstance(element, bytes) for element in self.content_set_bytes)), "Invalid type for frozenset elements in content 'content_set_bytes'. Expected 'bytes'.")
enforce(isinstance(self.content_set_int, frozenset), "Invalid type for content 'content_set_int'. Expected 'frozenset'. Found '{}'.".format(type(self.content_set_int)))
enforce(all(((type(element) is int) for element in self.content_set_int)), "Invalid type for frozenset elements in content 'content_set_int'. Expected 'int'.")
enforce(isinstance(self.content_set_float, frozenset), "Invalid type for content 'content_set_float'. Expected 'frozenset'. Found '{}'.".format(type(self.content_set_float)))
enforce(all((isinstance(element, float) for element in self.content_set_float)), "Invalid type for frozenset elements in content 'content_set_float'. Expected 'float'.")
enforce(isinstance(self.content_set_bool, frozenset), "Invalid type for content 'content_set_bool'. Expected 'frozenset'. Found '{}'.".format(type(self.content_set_bool)))
enforce(all((isinstance(element, bool) for element in self.content_set_bool)), "Invalid type for frozenset elements in content 'content_set_bool'. Expected 'bool'.")
enforce(isinstance(self.content_set_str, frozenset), "Invalid type for content 'content_set_str'. Expected 'frozenset'. Found '{}'.".format(type(self.content_set_str)))
enforce(all((isinstance(element, str) for element in self.content_set_str)), "Invalid type for frozenset elements in content 'content_set_str'. Expected 'str'.")
enforce(isinstance(self.content_list_bytes, tuple), "Invalid type for content 'content_list_bytes'. Expected 'tuple'. Found '{}'.".format(type(self.content_list_bytes)))
enforce(all((isinstance(element, bytes) for element in self.content_list_bytes)), "Invalid type for tuple elements in content 'content_list_bytes'. Expected 'bytes'.")
enforce(isinstance(self.content_list_int, tuple), "Invalid type for content 'content_list_int'. Expected 'tuple'. Found '{}'.".format(type(self.content_list_int)))
enforce(all(((type(element) is int) for element in self.content_list_int)), "Invalid type for tuple elements in content 'content_list_int'. Expected 'int'.")
enforce(isinstance(self.content_list_float, tuple), "Invalid type for content 'content_list_float'. Expected 'tuple'. Found '{}'.".format(type(self.content_list_float)))
enforce(all((isinstance(element, float) for element in self.content_list_float)), "Invalid type for tuple elements in content 'content_list_float'. Expected 'float'.")
enforce(isinstance(self.content_list_bool, tuple), "Invalid type for content 'content_list_bool'. Expected 'tuple'. Found '{}'.".format(type(self.content_list_bool)))
enforce(all((isinstance(element, bool) for element in self.content_list_bool)), "Invalid type for tuple elements in content 'content_list_bool'. Expected 'bool'.")
enforce(isinstance(self.content_list_str, tuple), "Invalid type for content 'content_list_str'. Expected 'tuple'. Found '{}'.".format(type(self.content_list_str)))
enforce(all((isinstance(element, str) for element in self.content_list_str)), "Invalid type for tuple elements in content 'content_list_str'. Expected 'str'.")
elif (self.performative == TProtocolMessage.Performative.PERFORMATIVE_PMT):
expected_nb_of_contents = 15
enforce(isinstance(self.content_dict_int_bytes, dict), "Invalid type for content 'content_dict_int_bytes'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_int_bytes)))
for (key_of_content_dict_int_bytes, value_of_content_dict_int_bytes) in self.content_dict_int_bytes.items():
enforce((type(key_of_content_dict_int_bytes) is int), "Invalid type for dictionary keys in content 'content_dict_int_bytes'. Expected 'int'. Found '{}'.".format(type(key_of_content_dict_int_bytes)))
enforce(isinstance(value_of_content_dict_int_bytes, bytes), "Invalid type for dictionary values in content 'content_dict_int_bytes'. Expected 'bytes'. Found '{}'.".format(type(value_of_content_dict_int_bytes)))
enforce(isinstance(self.content_dict_int_int, dict), "Invalid type for content 'content_dict_int_int'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_int_int)))
for (key_of_content_dict_int_int, value_of_content_dict_int_int) in self.content_dict_int_int.items():
enforce((type(key_of_content_dict_int_int) is int), "Invalid type for dictionary keys in content 'content_dict_int_int'. Expected 'int'. Found '{}'.".format(type(key_of_content_dict_int_int)))
enforce((type(value_of_content_dict_int_int) is int), "Invalid type for dictionary values in content 'content_dict_int_int'. Expected 'int'. Found '{}'.".format(type(value_of_content_dict_int_int)))
enforce(isinstance(self.content_dict_int_float, dict), "Invalid type for content 'content_dict_int_float'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_int_float)))
for (key_of_content_dict_int_float, value_of_content_dict_int_float) in self.content_dict_int_float.items():
enforce((type(key_of_content_dict_int_float) is int), "Invalid type for dictionary keys in content 'content_dict_int_float'. Expected 'int'. Found '{}'.".format(type(key_of_content_dict_int_float)))
enforce(isinstance(value_of_content_dict_int_float, float), "Invalid type for dictionary values in content 'content_dict_int_float'. Expected 'float'. Found '{}'.".format(type(value_of_content_dict_int_float)))
enforce(isinstance(self.content_dict_int_bool, dict), "Invalid type for content 'content_dict_int_bool'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_int_bool)))
for (key_of_content_dict_int_bool, value_of_content_dict_int_bool) in self.content_dict_int_bool.items():
enforce((type(key_of_content_dict_int_bool) is int), "Invalid type for dictionary keys in content 'content_dict_int_bool'. Expected 'int'. Found '{}'.".format(type(key_of_content_dict_int_bool)))
enforce(isinstance(value_of_content_dict_int_bool, bool), "Invalid type for dictionary values in content 'content_dict_int_bool'. Expected 'bool'. Found '{}'.".format(type(value_of_content_dict_int_bool)))
enforce(isinstance(self.content_dict_int_str, dict), "Invalid type for content 'content_dict_int_str'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_int_str)))
for (key_of_content_dict_int_str, value_of_content_dict_int_str) in self.content_dict_int_str.items():
enforce((type(key_of_content_dict_int_str) is int), "Invalid type for dictionary keys in content 'content_dict_int_str'. Expected 'int'. Found '{}'.".format(type(key_of_content_dict_int_str)))
enforce(isinstance(value_of_content_dict_int_str, str), "Invalid type for dictionary values in content 'content_dict_int_str'. Expected 'str'. Found '{}'.".format(type(value_of_content_dict_int_str)))
enforce(isinstance(self.content_dict_bool_bytes, dict), "Invalid type for content 'content_dict_bool_bytes'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_bool_bytes)))
for (key_of_content_dict_bool_bytes, value_of_content_dict_bool_bytes) in self.content_dict_bool_bytes.items():
enforce(isinstance(key_of_content_dict_bool_bytes, bool), "Invalid type for dictionary keys in content 'content_dict_bool_bytes'. Expected 'bool'. Found '{}'.".format(type(key_of_content_dict_bool_bytes)))
enforce(isinstance(value_of_content_dict_bool_bytes, bytes), "Invalid type for dictionary values in content 'content_dict_bool_bytes'. Expected 'bytes'. Found '{}'.".format(type(value_of_content_dict_bool_bytes)))
enforce(isinstance(self.content_dict_bool_int, dict), "Invalid type for content 'content_dict_bool_int'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_bool_int)))
for (key_of_content_dict_bool_int, value_of_content_dict_bool_int) in self.content_dict_bool_int.items():
enforce(isinstance(key_of_content_dict_bool_int, bool), "Invalid type for dictionary keys in content 'content_dict_bool_int'. Expected 'bool'. Found '{}'.".format(type(key_of_content_dict_bool_int)))
enforce((type(value_of_content_dict_bool_int) is int), "Invalid type for dictionary values in content 'content_dict_bool_int'. Expected 'int'. Found '{}'.".format(type(value_of_content_dict_bool_int)))
enforce(isinstance(self.content_dict_bool_float, dict), "Invalid type for content 'content_dict_bool_float'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_bool_float)))
for (key_of_content_dict_bool_float, value_of_content_dict_bool_float) in self.content_dict_bool_float.items():
enforce(isinstance(key_of_content_dict_bool_float, bool), "Invalid type for dictionary keys in content 'content_dict_bool_float'. Expected 'bool'. Found '{}'.".format(type(key_of_content_dict_bool_float)))
enforce(isinstance(value_of_content_dict_bool_float, float), "Invalid type for dictionary values in content 'content_dict_bool_float'. Expected 'float'. Found '{}'.".format(type(value_of_content_dict_bool_float)))
enforce(isinstance(self.content_dict_bool_bool, dict), "Invalid type for content 'content_dict_bool_bool'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_bool_bool)))
for (key_of_content_dict_bool_bool, value_of_content_dict_bool_bool) in self.content_dict_bool_bool.items():
enforce(isinstance(key_of_content_dict_bool_bool, bool), "Invalid type for dictionary keys in content 'content_dict_bool_bool'. Expected 'bool'. Found '{}'.".format(type(key_of_content_dict_bool_bool)))
enforce(isinstance(value_of_content_dict_bool_bool, bool), "Invalid type for dictionary values in content 'content_dict_bool_bool'. Expected 'bool'. Found '{}'.".format(type(value_of_content_dict_bool_bool)))
enforce(isinstance(self.content_dict_bool_str, dict), "Invalid type for content 'content_dict_bool_str'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_bool_str)))
for (key_of_content_dict_bool_str, value_of_content_dict_bool_str) in self.content_dict_bool_str.items():
enforce(isinstance(key_of_content_dict_bool_str, bool), "Invalid type for dictionary keys in content 'content_dict_bool_str'. Expected 'bool'. Found '{}'.".format(type(key_of_content_dict_bool_str)))
enforce(isinstance(value_of_content_dict_bool_str, str), "Invalid type for dictionary values in content 'content_dict_bool_str'. Expected 'str'. Found '{}'.".format(type(value_of_content_dict_bool_str)))
enforce(isinstance(self.content_dict_str_bytes, dict), "Invalid type for content 'content_dict_str_bytes'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_str_bytes)))
for (key_of_content_dict_str_bytes, value_of_content_dict_str_bytes) in self.content_dict_str_bytes.items():
enforce(isinstance(key_of_content_dict_str_bytes, str), "Invalid type for dictionary keys in content 'content_dict_str_bytes'. Expected 'str'. Found '{}'.".format(type(key_of_content_dict_str_bytes)))
enforce(isinstance(value_of_content_dict_str_bytes, bytes), "Invalid type for dictionary values in content 'content_dict_str_bytes'. Expected 'bytes'. Found '{}'.".format(type(value_of_content_dict_str_bytes)))
enforce(isinstance(self.content_dict_str_int, dict), "Invalid type for content 'content_dict_str_int'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_str_int)))
for (key_of_content_dict_str_int, value_of_content_dict_str_int) in self.content_dict_str_int.items():
enforce(isinstance(key_of_content_dict_str_int, str), "Invalid type for dictionary keys in content 'content_dict_str_int'. Expected 'str'. Found '{}'.".format(type(key_of_content_dict_str_int)))
enforce((type(value_of_content_dict_str_int) is int), "Invalid type for dictionary values in content 'content_dict_str_int'. Expected 'int'. Found '{}'.".format(type(value_of_content_dict_str_int)))
enforce(isinstance(self.content_dict_str_float, dict), "Invalid type for content 'content_dict_str_float'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_str_float)))
for (key_of_content_dict_str_float, value_of_content_dict_str_float) in self.content_dict_str_float.items():
enforce(isinstance(key_of_content_dict_str_float, str), "Invalid type for dictionary keys in content 'content_dict_str_float'. Expected 'str'. Found '{}'.".format(type(key_of_content_dict_str_float)))
enforce(isinstance(value_of_content_dict_str_float, float), "Invalid type for dictionary values in content 'content_dict_str_float'. Expected 'float'. Found '{}'.".format(type(value_of_content_dict_str_float)))
enforce(isinstance(self.content_dict_str_bool, dict), "Invalid type for content 'content_dict_str_bool'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_str_bool)))
for (key_of_content_dict_str_bool, value_of_content_dict_str_bool) in self.content_dict_str_bool.items():
enforce(isinstance(key_of_content_dict_str_bool, str), "Invalid type for dictionary keys in content 'content_dict_str_bool'. Expected 'str'. Found '{}'.".format(type(key_of_content_dict_str_bool)))
enforce(isinstance(value_of_content_dict_str_bool, bool), "Invalid type for dictionary values in content 'content_dict_str_bool'. Expected 'bool'. Found '{}'.".format(type(value_of_content_dict_str_bool)))
enforce(isinstance(self.content_dict_str_str, dict), "Invalid type for content 'content_dict_str_str'. Expected 'dict'. Found '{}'.".format(type(self.content_dict_str_str)))
for (key_of_content_dict_str_str, value_of_content_dict_str_str) in self.content_dict_str_str.items():
enforce(isinstance(key_of_content_dict_str_str, str), "Invalid type for dictionary keys in content 'content_dict_str_str'. Expected 'str'. Found '{}'.".format(type(key_of_content_dict_str_str)))
enforce(isinstance(value_of_content_dict_str_str, str), "Invalid type for dictionary values in content 'content_dict_str_str'. Expected 'str'. Found '{}'.".format(type(value_of_content_dict_str_str)))
elif (self.performative == TProtocolMessage.Performative.PERFORMATIVE_MT):
expected_nb_of_contents = 3
enforce((isinstance(self.content_union_1, CustomDataModel1) or isinstance(self.content_union_1, bool) or isinstance(self.content_union_1, bytes) or isinstance(self.content_union_1, dict) or isinstance(self.content_union_1, float) or isinstance(self.content_union_1, frozenset) or (type(self.content_union_1) is int) or isinstance(self.content_union_1, str) or isinstance(self.content_union_1, tuple)), "Invalid type for content 'content_union_1'. Expected either of '['DataModel1', 'bool', 'bytes', 'dict', 'float', 'frozenset', 'int', 'str', 'tuple']'. Found '{}'.".format(type(self.content_union_1)))
if isinstance(self.content_union_1, frozenset):
enforce(all(((type(element) is int) for element in self.content_union_1)), "Invalid type for elements of content 'content_union_1'. Expected 'int'.")
if isinstance(self.content_union_1, tuple):
enforce(all((isinstance(element, bool) for element in self.content_union_1)), "Invalid type for tuple elements in content 'content_union_1'. Expected 'bool'.")
if isinstance(self.content_union_1, dict):
for (key_of_content_union_1, value_of_content_union_1) in self.content_union_1.items():
enforce((isinstance(key_of_content_union_1, str) and (type(value_of_content_union_1) is int)), "Invalid type for dictionary key, value in content 'content_union_1'. Expected 'str', 'int'.")
enforce((isinstance(self.content_union_2, dict) or isinstance(self.content_union_2, frozenset) or (type(self.content_union_2) is int) or isinstance(self.content_union_2, tuple)), "Invalid type for content 'content_union_2'. Expected either of '['dict', 'frozenset', 'int', 'tuple']'. Found '{}'.".format(type(self.content_union_2)))
if isinstance(self.content_union_2, frozenset):
enforce((all((isinstance(element, bytes) for element in self.content_union_2)) or all(((type(element) is int) for element in self.content_union_2)) or all((isinstance(element, str) for element in self.content_union_2))), "Invalid type for frozenset elements in content 'content_union_2'. Expected either 'bytes' or 'int' or 'str'.")
if isinstance(self.content_union_2, tuple):
enforce((all((isinstance(element, bool) for element in self.content_union_2)) or all((isinstance(element, bytes) for element in self.content_union_2)) or all((isinstance(element, float) for element in self.content_union_2))), "Invalid type for tuple elements in content 'content_union_2'. Expected either 'bool' or 'bytes' or 'float'.")
if isinstance(self.content_union_2, dict):
for (key_of_content_union_2, value_of_content_union_2) in self.content_union_2.items():
enforce(((isinstance(key_of_content_union_2, bool) and isinstance(value_of_content_union_2, bytes)) or ((type(key_of_content_union_2) is int) and isinstance(value_of_content_union_2, float)) or (isinstance(key_of_content_union_2, str) and (type(value_of_content_union_2) is int))), "Invalid type for dictionary key, value in content 'content_union_2'. Expected 'bool','bytes' or 'int','float' or 'str','int'.")
enforce((isinstance(self.content_union_3, CustomDataModel2) or isinstance(self.content_union_3, CustomDataModel3)), "Invalid type for content 'content_union_3'. Expected either of '['DataModel2', 'DataModel3']'. Found '{}'.".format(type(self.content_union_3)))
elif (self.performative == TProtocolMessage.Performative.PERFORMATIVE_O):
expected_nb_of_contents = 0
if self.is_set('content_o_ct'):
expected_nb_of_contents += 1
content_o_ct = cast(CustomDataModel4, self.content_o_ct)
enforce(isinstance(content_o_ct, CustomDataModel4), "Invalid type for content 'content_o_ct'. Expected 'DataModel4'. Found '{}'.".format(type(content_o_ct)))
if self.is_set('content_o_bool'):
expected_nb_of_contents += 1
content_o_bool = cast(bool, self.content_o_bool)
enforce(isinstance(content_o_bool, bool), "Invalid type for content 'content_o_bool'. Expected 'bool'. Found '{}'.".format(type(content_o_bool)))
if self.is_set('content_o_set_int'):
expected_nb_of_contents += 1
content_o_set_int = cast(FrozenSet[int], self.content_o_set_int)
enforce(isinstance(content_o_set_int, frozenset), "Invalid type for content 'content_o_set_int'. Expected 'frozenset'. Found '{}'.".format(type(content_o_set_int)))
enforce(all(((type(element) is int) for element in content_o_set_int)), "Invalid type for frozenset elements in content 'content_o_set_int'. Expected 'int'.")
if self.is_set('content_o_list_bytes'):
expected_nb_of_contents += 1
content_o_list_bytes = cast(Tuple[(bytes, ...)], self.content_o_list_bytes)
enforce(isinstance(content_o_list_bytes, tuple), "Invalid type for content 'content_o_list_bytes'. Expected 'tuple'. Found '{}'.".format(type(content_o_list_bytes)))
enforce(all((isinstance(element, bytes) for element in content_o_list_bytes)), "Invalid type for tuple elements in content 'content_o_list_bytes'. Expected 'bytes'.")
if self.is_set('content_o_dict_str_int'):
expected_nb_of_contents += 1
content_o_dict_str_int = cast(Dict[(str, int)], self.content_o_dict_str_int)
enforce(isinstance(content_o_dict_str_int, dict), "Invalid type for content 'content_o_dict_str_int'. Expected 'dict'. Found '{}'.".format(type(content_o_dict_str_int)))
for (key_of_content_o_dict_str_int, value_of_content_o_dict_str_int) in content_o_dict_str_int.items():
enforce(isinstance(key_of_content_o_dict_str_int, str), "Invalid type for dictionary keys in content 'content_o_dict_str_int'. Expected 'str'. Found '{}'.".format(type(key_of_content_o_dict_str_int)))
enforce((type(value_of_content_o_dict_str_int) is int), "Invalid type for dictionary values in content 'content_o_dict_str_int'. Expected 'int'. Found '{}'.".format(type(value_of_content_o_dict_str_int)))
elif (self.performative == TProtocolMessage.Performative.PERFORMATIVE_EMPTY_CONTENTS):
expected_nb_of_contents = 0
enforce((expected_nb_of_contents == actual_nb_of_contents), 'Incorrect number of contents. Expected {}. Found {}'.format(expected_nb_of_contents, actual_nb_of_contents))
if (self.message_id == 1):
enforce((self.target == 0), "Invalid 'target'. Expected 0 (because 'message_id' is 1). Found {}.".format(self.target))
except (AEAEnforceError, ValueError, KeyError) as e:
_default_logger.error(str(e))
return False
return True |
def create_generic_coordinates(net, mg=None, library='igraph', respect_switches=False, geodata_table='bus_geodata', buses=None, overwrite=False):
_prepare_geodata_table(net, geodata_table, overwrite)
if (library == 'igraph'):
if (not IGRAPH_INSTALLED):
soft_dependency_error('build_igraph_from_pp()', 'igraph')
(graph, meshed, roots) = build_igraph_from_pp(net, respect_switches, buses=buses)
coords = coords_from_igraph(graph, roots, meshed)
elif (library == 'networkx'):
if (mg is None):
nxg = top.create_nxgraph(net, respect_switches=respect_switches, include_out_of_service=True)
else:
nxg = copy.deepcopy(mg)
coords = coords_from_nxgraph(nxg)
else:
raise ValueError(("Unknown library %s - chose 'igraph' or 'networkx'" % library))
if len(coords):
net[geodata_table].x = coords[1]
net[geodata_table].y = coords[0]
net[geodata_table].index = (net.bus.index if (buses is None) else buses)
return net |
def get_predictor(args, mode):
predictor = None
if (mode == GENEPRED_MODE_SEARCH):
predictor = None
elif (mode == GENEPRED_MODE_PRODIGAL):
predictor = ProdigalPredictor(args)
else:
raise EmapperException(('Unknown gene prediction mode %s' % mode))
return predictor |
class OptionPlotoptionsLineDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def make_commkey(key, session_id, ticks=50):
key = int(key)
session_id = int(session_id)
k = 0
for i in range(32):
if (key & (1 << i)):
k = ((k << 1) | 1)
else:
k = (k << 1)
k += session_id
k = pack(b'I', k)
k = unpack(b'BBBB', k)
k = pack(b'BBBB', (k[0] ^ ord('Z')), (k[1] ^ ord('K')), (k[2] ^ ord('S')), (k[3] ^ ord('O')))
k = unpack(b'HH', k)
k = pack(b'HH', k[1], k[0])
B = (255 & ticks)
k = unpack(b'BBBB', k)
k = pack(b'BBBB', (k[0] ^ B), (k[1] ^ B), B, (k[3] ^ B))
return k |
('config_name, overrides, expected, warning_file', [param('include_nested_group_name_', [], [ResultDefault(config_path='group1/group2/file1', package='group1.file1', parent='group1/group_item1_name_'), ResultDefault(config_path='group1/group_item1_name_', parent='include_nested_group_name_', package='group1', is_self=True), ResultDefault(config_path='include_nested_group_name_', package='', is_self=True)], 'group1/group_item1_name_', id='include_nested_group_name_'), param('include_nested_group_name_', ['group1/.file1=file2'], [ResultDefault(config_path='group1/group2/file2', package='group1.file2', parent='group1/group_item1_name_'), ResultDefault(config_path='group1/group_item1_name_', parent='include_nested_group_name_', package='group1', is_self=True), ResultDefault(config_path='include_nested_group_name_', package='', is_self=True)], 'group1/group_item1_name_', id='include_nested_group_name_'), param('include_nested_config_item_name_', [], [ResultDefault(config_path='group1/group2/file1', package='group1.file1', parent='group1/config_item_name_'), ResultDefault(config_path='group1/config_item_name_', package='group1', parent='include_nested_config_item_name_', is_self=True), ResultDefault(config_path='include_nested_config_item_name_', package='', is_self=True, primary=True)], 'group1/config_item_name_', id='include_nested_config_item_name_')])
def test_include_nested_group_name_(config_name: str, overrides: List[str], expected: List[ResultDefault], warning_file: str) -> None:
url = '
msg = f'''In {warning_file}: Defaults List contains deprecated keyword _name_, see {url}
'''
with warns(UserWarning, match=re.escape(msg)):
_test_defaults_list_impl(config_name=config_name, overrides=overrides, expected=expected) |
.isa('neon')
def test_neon_memcpy(compiler):
def memcpy_neon(n: size, dst: (R[n] DRAM), src: (R[n] DRAM)):
for i in seq(0, ((n + 3) / 4)):
if ((n - (4 * i)) >= 4):
tmp: (f32[4] Neon)
neon_vld_4xf32(tmp, src[(4 * i):((4 * i) + 4)])
neon_vst_4xf32(dst[(4 * i):((4 * i) + 4)], tmp)
else:
for j in seq(0, (n - (4 * i))):
dst[((4 * i) + j)] = src[((4 * i) + j)]
fn = compiler.compile(memcpy_neon, skip_on_fail=True, CMAKE_C_FLAGS='-mcpu=apple-a14')
for n in (7, 8, 9, 31, 32, 33, 127, 128, 129):
inp = np.array([float(i) for i in range(n)], dtype=np.float32)
out = np.array([float(0) for _ in range(n)], dtype=np.float32)
fn(None, n, out, inp)
assert np.array_equal(inp, out) |
class OptionSeriesGaugeSonificationContexttracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def select_random_faucet():
private_key_base = 'deaddeaddeaddead5fb92d83ed54c0ea1eb74e72a84ef980d42953caaa6d'
selected_faucet_index = random.randrange(0, (499 + 1), 1)
hex_selector_bytes = ('%0.4x' % selected_faucet_index)
faucet_private_key = (private_key_base + hex_selector_bytes)
return (selected_faucet_index, faucet_private_key) |
class RelationshipsForWafRule(ModelComposed):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'waf_tags': (RelationshipWafTagsWafTags,), 'waf_rule_revisions': (RelationshipWafRuleRevisionWafRuleRevisions,)}
_property
def discriminator():
return None
attribute_map = {'waf_tags': 'waf_tags', 'waf_rule_revisions': 'waf_rule_revisions'}
read_only_vars = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [RelationshipWafRuleRevisions, RelationshipWafTags], 'allOf': [], 'oneOf': []} |
.xfail(reason='Infura rate limiting - the test suite needs a refactor', strict=False)
def test_existing_different_chains(network):
network.connect('mainnet')
with pytest.warns(BrownieCompilerWarning):
Contract.from_explorer('0x9f8f72aa9304c8b593d555f12ef6589cc3a579a2')
network.disconnect()
network.connect('ropsten')
with pytest.raises(ValueError):
Contract('0x9f8f72aa9304c8b593d555f12ef6589cc3a579a2') |
def test_align_extract_taxon_fasta_from_alignments(o_dir, e_dir, request):
program = 'bin/align/phyluce_align_extract_taxon_fasta_from_alignments'
output = os.path.join(o_dir, 'mafft-gblocks-clean-gallus.fasta')
cmd = [os.path.join(request.config.rootdir, program), '--alignments', os.path.join(e_dir, 'mafft-gblocks-clean'), '--output', output, '--input-format', 'nexus', '--taxon', 'gallus_gallus']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
assert (proc.returncode == 0), print('{}'.format(stderr.decode('utf-8')))
assert output, 'There are is no output'
expected_file = os.path.join(e_dir, 'mafft-gblocks-clean-gallus.fasta')
observed = SeqIO.to_dict(SeqIO.parse(output, 'fasta'))
expected = SeqIO.to_dict(SeqIO.parse(expected_file, 'fasta'))
for (name, observed) in observed.items():
assert (expected[name].seq == observed.seq) |
def test_headerdb_persist_header_returns_new_canonical_chain(headerdb, genesis_header):
(gen_result, _) = headerdb.persist_header(genesis_header)
assert (gen_result == (genesis_header,))
chain_a = mk_header_chain(genesis_header, 3)
chain_b = mk_header_chain(genesis_header, 2)
chain_c = mk_header_chain(genesis_header, 5)
for header in chain_a:
(res, _) = headerdb.persist_header(header)
assert (res == (header,))
for header in chain_b:
(res, _) = headerdb.persist_header(header)
assert (res == ())
for (idx, header) in enumerate(chain_c, 1):
(res, _) = headerdb.persist_header(header)
if (idx <= 3):
assert (res == ())
elif (idx == 4):
assert (res == chain_c[:idx])
assert_headers_eq(res[(- 1)], header)
else:
assert (res == (header,)) |
def extractYasuitlBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Dream Life', 'Dream Life -Dreams in a Different World-', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def extractThesolmannBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class SplitLargeConcatTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(SplitLargeConcatTestCase, self).__init__(*args, **kwargs)
self.test_count = 0
def _make_tensors(self, num_inputs, input_shape, dtype, input_names=None):
if (input_names is not None):
assert (num_inputs == len(input_names))
input_tensors = []
for i in range(num_inputs):
name = (input_names[i] if (input_names is not None) else f'input_{i}')
t = Tensor(shape=input_shape, dtype=dtype, name=name, is_input=True)
input_tensors.append(t)
return input_tensors
def _test_split_large_concat_simple(self, cat_dim, num_inputs, input_shape, split_count, test_name, dtype='float16'):
_LOGGER.info(f'test_split_large_concat with num_inputs={num_inputs!r}, input_shape={input_shape!r}')
input_tensors = self._make_tensors(num_inputs, input_shape, dtype)
concat_op = ops.concatenate()
Y = concat_op(input_tensors, cat_dim)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
target = detect_target()
dll_name = f'test_{self.test_count}.so'
module = compile_model(Y, target, './tmp', test_name, dll_name=dll_name)
sorted_graph = module.debug_sorted_graph
self.assertEqual(len(sorted_graph), (num_inputs + 1))
sorted_ops = graph_utils.get_sorted_ops(sorted_graph)
self.assertEqual(len(sorted_ops), split_count)
inputs_pt = [get_random_torch_tensor(input_shape, dtype) for _ in range(num_inputs)]
y_pt = torch.cat(inputs_pt, cat_dim)
input_name_to_index = module.get_input_name_to_index_map()
inputs = [0 for i in range(len(inputs_pt))]
input_names = [x._attrs['name'] for x in input_tensors]
for (x_name, x_pt) in zip(input_names, inputs_pt):
inputs[input_name_to_index[x_name]] = x_pt
y = get_torch_empty_tensor(y_pt.size(), dtype)
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(y_pt, y, atol=0.01, rtol=0.01))
self.test_count += 1
def test_split_large_concat_simple(self):
self._test_split_large_concat_simple(cat_dim=1, num_inputs=136, input_shape=(2, 3), split_count=4, test_name='split_large_concat_simple')
self._test_split_large_concat_simple(cat_dim=1, num_inputs=34, input_shape=(2, 3), split_count=1, test_name='split_large_concat_simple')
self._test_split_large_concat_simple(cat_dim=1, num_inputs=35, input_shape=(2, 3), split_count=2, test_name='split_large_concat_simple')
def _test_split_large_concat_with_add(self, cat_dim, num_inputs, input_shape, test_name, dtype='float16'):
_LOGGER.info(f'test_split_large_concat with num_inputs={num_inputs!r}, input_shape={input_shape!r}')
input_tensors = self._make_tensors(num_inputs, input_shape, dtype)
concat_op = ops.concatenate()
Y1 = concat_op(input_tensors, cat_dim)
x_n_shape = [1]
X_ns = self._make_tensors(1, x_n_shape, dtype, ['input_x_n'])
X_n = X_ns[0]
Y = ops.elementwise(FuncEnum.ADD)(Y1, X_n)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
target = detect_target()
dll_name = f'test_{self.test_count}.so'
module = compile_model(Y, target, './tmp', test_name, dll_name=dll_name)
inputs_pt = [get_random_torch_tensor(input_shape, dtype) for _ in range(num_inputs)]
x_n_pt = get_random_torch_tensor(x_n_shape, dtype)
y1_pt = torch.cat(inputs_pt, cat_dim)
inputs_pt.append(x_n_pt)
y_pt = (y1_pt + x_n_pt)
input_name_to_index = module.get_input_name_to_index_map()
inputs = [0 for i in range(len(inputs_pt))]
input_names = [x._attrs['name'] for x in (input_tensors + [X_n])]
for (x_name, x_pt) in zip(input_names, inputs_pt):
inputs[input_name_to_index[x_name]] = x_pt
y = get_torch_empty_tensor(y_pt.size(), dtype)
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(y_pt, y, atol=0.01, rtol=0.01))
self.test_count += 1
def test_split_large_concat_with_add(self):
self._test_split_large_concat_with_add(cat_dim=1, num_inputs=136, input_shape=(2, 3, 4), test_name='split_large_concat_with_add')
def _test_split_large_concat_with_strided_add(self, cat_dim, num_inputs, input_shape, test_name, dtype='float16'):
_LOGGER.info(f'test_split_large_concat with num_inputs={num_inputs!r}, input_shape={input_shape!r}')
add_input_tensors = self._make_tensors(2, input_shape, dtype, ['add_input_0', 'add_input_1'])
Y1 = ops.elementwise(FuncEnum.ADD)(add_input_tensors[0], add_input_tensors[1])
concat_input_tensors = self._make_tensors(num_inputs, input_shape, dtype)
concat_op = ops.concatenate()
Y2 = concat_op(([Y1] + concat_input_tensors), cat_dim)
x_n_shape = [1]
X_ns = self._make_tensors(1, x_n_shape, dtype, ['input_x_n'])
X_n = X_ns[0]
Y = ops.elementwise(FuncEnum.ADD)(Y2, X_n)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
target = detect_target()
dll_name = f'test_{self.test_count}.so'
module = compile_model(Y, target, './tmp', test_name, dll_name=dll_name)
add_inputs_pt = [get_random_torch_tensor(input_shape, dtype) for _ in range(2)]
y1_pt = (add_inputs_pt[0] + add_inputs_pt[1])
concat_inputs_pt = [get_random_torch_tensor(input_shape, dtype) for _ in range(num_inputs)]
x_n_pt = get_random_torch_tensor(x_n_shape, dtype)
y2_pt = torch.cat(([y1_pt] + concat_inputs_pt), cat_dim)
y_pt = (y2_pt + x_n_pt)
input_tensors = ((add_input_tensors + concat_input_tensors) + [X_n])
inputs_pt = ((add_inputs_pt + concat_inputs_pt) + [x_n_pt])
input_name_to_index = module.get_input_name_to_index_map()
inputs = [0 for i in range(len(inputs_pt))]
input_names = [x._attrs['name'] for x in input_tensors]
for (x_name, x_pt) in zip(input_names, inputs_pt):
inputs[input_name_to_index[x_name]] = x_pt
y = get_torch_empty_tensor(y_pt.size(), dtype)
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(y_pt, y, atol=0.01, rtol=0.01))
self.test_count += 1
def test_split_large_concat_with_strided_add(self):
self._test_split_large_concat_with_strided_add(cat_dim=1, num_inputs=136, input_shape=(2, 3), test_name='split_large_concat_with_strided_add')
def _test_split_large_concat_with_strided_add_complex(self, cat_dim, num_inputs, input_shape, test_name, dtype='float16'):
_LOGGER.info(f'test_split_large_concat with num_inputs={num_inputs!r}, input_shape={input_shape!r}')
add_input_tensor_names = [f'add_input_{i}' for i in range((num_inputs * 2))]
add_input_tensors = self._make_tensors((num_inputs * 2), input_shape, dtype, add_input_tensor_names)
add_output_tensors = []
for i in range(num_inputs):
a = ops.elementwise(FuncEnum.ADD)(add_input_tensors[(i * 2)], add_input_tensors[((i * 2) + 1)])
add_output_tensors.append(a)
other_input_tensors = self._make_tensors(num_inputs, input_shape, dtype)
concat_op = ops.concatenate()
concat_input_tensors = []
for i in range(num_inputs):
concat_input_tensors.append(add_output_tensors[i])
concat_input_tensors.append(other_input_tensors[i])
Y = concat_op(concat_input_tensors, cat_dim)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
target = detect_target()
dll_name = f'test_{self.test_count}.so'
module = compile_model(Y, target, './tmp', test_name, dll_name=dll_name)
add_inputs_pt = [get_random_torch_tensor(input_shape, dtype) for _ in range((num_inputs * 2))]
add_outputs_pt = []
for i in range(num_inputs):
add_outputs_pt.append((add_inputs_pt[(i * 2)] + add_inputs_pt[((i * 2) + 1)]))
other_inputs_pt = [get_random_torch_tensor(input_shape, dtype) for _ in range(num_inputs)]
concat_inputs_pt = []
for i in range(num_inputs):
concat_inputs_pt.append(add_outputs_pt[i])
concat_inputs_pt.append(other_inputs_pt[i])
y_pt = torch.cat(concat_inputs_pt, cat_dim)
input_tensors = (add_input_tensors + other_input_tensors)
inputs_pt = (add_inputs_pt + other_inputs_pt)
input_name_to_index = module.get_input_name_to_index_map()
inputs = [0 for i in range(len(inputs_pt))]
input_names = [x._attrs['name'] for x in input_tensors]
for (x_name, x_pt) in zip(input_names, inputs_pt):
inputs[input_name_to_index[x_name]] = x_pt
y = get_torch_empty_tensor(y_pt.size(), dtype)
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(y_pt, y, atol=0.01, rtol=0.01))
self.test_count += 1
def test_split_large_concat_with_strided_add_complex(self):
self._test_split_large_concat_with_strided_add_complex(cat_dim=1, num_inputs=136, input_shape=(2, 3), test_name='split_large_concat_with_strided_add_complex')
def _test_split_large_concat_with_reuse(self, cat_dim, num_inputs, input_shape, test_name, dtype='float16'):
_LOGGER.info(f'test_split_large_concat with num_inputs={num_inputs!r}, input_shape={input_shape!r}')
add_input_tensor_names = [f'add_input_{i}' for i in range((num_inputs * 2))]
add_input_tensors = self._make_tensors((num_inputs * 2), input_shape, dtype, add_input_tensor_names)
add_output_tensors = []
for i in range(num_inputs):
a = ops.elementwise(FuncEnum.ADD)(add_input_tensors[(i * 2)], add_input_tensors[((i * 2) + 1)])
add_output_tensors.append(a)
other_input_tensors = self._make_tensors(num_inputs, input_shape, dtype)
add_inputs_shuffle = list(range(len(add_input_tensors)))
random.shuffle(add_inputs_shuffle)
add_inputs_for_concat = [add_input_tensors[i] for i in add_inputs_shuffle[0:10]]
concat_input_tensors = ((add_output_tensors + other_input_tensors) + add_inputs_for_concat)
concat_inputs_shuffle = list(range(len(concat_input_tensors)))
random.shuffle(concat_inputs_shuffle)
real_concat_input_tensors = [concat_input_tensors[i] for i in concat_inputs_shuffle]
concat_op = ops.concatenate()
Y = concat_op(real_concat_input_tensors, cat_dim)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
target = detect_target()
dll_name = f'test_{self.test_count}.so'
module = compile_model(Y, target, './tmp', test_name, dll_name=dll_name)
add_inputs_pt = [get_random_torch_tensor(input_shape, dtype) for _ in range((num_inputs * 2))]
add_outputs_pt = []
for i in range(num_inputs):
add_outputs_pt.append((add_inputs_pt[(i * 2)] + add_inputs_pt[((i * 2) + 1)]))
add_inputs_for_concat_pt = [add_inputs_pt[i] for i in add_inputs_shuffle[0:10]]
other_inputs_pt = [get_random_torch_tensor(input_shape, dtype) for _ in range(num_inputs)]
concat_inputs_pt = ((add_outputs_pt + other_inputs_pt) + add_inputs_for_concat_pt)
real_concat_inputs_pt = [concat_inputs_pt[i] for i in concat_inputs_shuffle]
y_pt = torch.cat(real_concat_inputs_pt, cat_dim)
input_tensors = (add_input_tensors + other_input_tensors)
inputs_pt = (add_inputs_pt + other_inputs_pt)
input_name_to_index = module.get_input_name_to_index_map()
inputs = [0 for i in range(len(inputs_pt))]
input_names = [x._attrs['name'] for x in input_tensors]
for (x_name, x_pt) in zip(input_names, inputs_pt):
inputs[input_name_to_index[x_name]] = x_pt
y = get_torch_empty_tensor(y_pt.size(), dtype)
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(y_pt, y, atol=0.01, rtol=0.01))
self.test_count += 1
def test_split_large_concat_with_reuse(self):
self._test_split_large_concat_with_reuse(cat_dim=1, num_inputs=136, input_shape=(2, 3), test_name='split_large_concat_with_reuse')
def _test_split_large_concat_with_slice(self, cat_dim, num_slice_inputs, slice_input_shape, start_indices, end_indices, num_add_inputs, add_input_shape, test_name, dtype='float16'):
slice_input_tensor_names = [f'slice_input_{i}' for i in range(num_slice_inputs)]
slice_input_tensors = self._make_tensors(num_slice_inputs, slice_input_shape, dtype, slice_input_tensor_names)
slice_output_tensors = []
for slice_input_tensor in slice_input_tensors:
t = ops.dynamic_slice()(slice_input_tensor, start_indices=start_indices, end_indices=end_indices)
slice_output_tensors.append(t)
add_input_tensor_names = [f'add_input_{i}' for i in range((num_add_inputs * 2))]
add_input_tensors = self._make_tensors((num_add_inputs * 2), add_input_shape, dtype, add_input_tensor_names)
add_output_tensors = []
for i in range(num_add_inputs):
a = ops.elementwise(FuncEnum.ADD)(add_input_tensors[(i * 2)], add_input_tensors[((i * 2) + 1)])
add_output_tensors.append(a)
concat_input_tensors = (slice_output_tensors + add_output_tensors)
concat_op = ops.concatenate()
Y = concat_op(concat_input_tensors, cat_dim)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
target = detect_target()
dll_name = f'test_{self.test_count}.so'
module = compile_model(Y, target, './tmp', test_name, dll_name=dll_name)
slice_inputs_pt = [get_random_torch_tensor(slice_input_shape, dtype) for _ in range(num_slice_inputs)]
slice_indices = [slice(i, j) for (i, j) in zip(start_indices, end_indices)]
slice_outputs_pt = [inp_pt[slice_indices] for inp_pt in slice_inputs_pt]
add_inputs_pt = [get_random_torch_tensor(add_input_shape, dtype) for _ in range((num_add_inputs * 2))]
add_outputs_pt = []
for i in range(num_add_inputs):
add_outputs_pt.append((add_inputs_pt[(i * 2)] + add_inputs_pt[((i * 2) + 1)]))
concat_inputs_pt = (slice_outputs_pt + add_outputs_pt)
y_pt = torch.cat(concat_inputs_pt, cat_dim)
input_tensors = (slice_input_tensors + add_input_tensors)
inputs_pt = (slice_inputs_pt + add_inputs_pt)
input_name_to_index = module.get_input_name_to_index_map()
inputs = [0 for i in range(len(inputs_pt))]
input_names = [x._attrs['name'] for x in input_tensors]
for (x_name, x_pt) in zip(input_names, inputs_pt):
inputs[input_name_to_index[x_name]] = x_pt
y = get_torch_empty_tensor(y_pt.size(), dtype)
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(y_pt, y, atol=0.01, rtol=0.01))
self.test_count += 1
def test_split_large_concat_with_slice(self):
self._test_split_large_concat_with_slice(cat_dim=1, num_slice_inputs=161, slice_input_shape=(20, 20), start_indices=[0, 0], end_indices=[None, 10], num_add_inputs=5, add_input_shape=(20, (161 * 10)), test_name='split_large_concat_with_dynamic_slice')
def _test_split_large_concat_with_reshape(self, num_inputs, input_shape, reshape_shape, cat_dim, test_name, dtype='float16'):
X = Tensor(shape=reshape_shape, dtype=dtype, name='x', is_input=True)
reshape_output = ops.reshape()(X, input_shape)
normal_input_tensors = self._make_tensors(num_inputs, input_shape, dtype)
concat_input_tensors = ([reshape_output] + normal_input_tensors)
concat_op = ops.concatenate()
Y = concat_op(concat_input_tensors, cat_dim)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
target = detect_target()
dll_name = f'test_{self.test_count}.so'
module = compile_model(Y, target, './tmp', test_name, dll_name=dll_name)
self.test_count += 1
x_pt = get_random_torch_tensor(reshape_shape, dtype)
reshape_output_pt = torch.reshape(x_pt, input_shape)
normal_inputs_pt = [get_random_torch_tensor(input_shape, dtype) for _ in range(num_inputs)]
concat_inputs_pt = ([reshape_output_pt] + normal_inputs_pt)
y_pt = torch.cat(concat_inputs_pt, cat_dim)
input_name_to_index = module.get_input_name_to_index_map()
inputs = [0 for i in range(len(concat_inputs_pt))]
input_names = ([X._attrs['name']] + [i._attrs['name'] for i in normal_input_tensors])
for (i_name, i_pt) in zip(input_names, ([x_pt] + normal_inputs_pt)):
inputs[input_name_to_index[i_name]] = i_pt
y = get_torch_empty_tensor(y_pt.size(), dtype)
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(y_pt, y, atol=0.01, rtol=0.01))
def test_split_large_concat_with_reshape(self):
self._test_split_large_concat_with_reshape(num_inputs=180, input_shape=(10, 40), reshape_shape=(10, 2, 20), cat_dim=1, test_name='split_large_concat_with_reshape')
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
def test_split_large_concat_float(self):
self._test_split_large_concat_simple(cat_dim=1, num_inputs=35, input_shape=(2, 3), split_count=2, test_name='split_large_concat_simple_float', dtype='float')
self._test_split_large_concat_with_add(cat_dim=1, num_inputs=136, input_shape=(2, 3, 4), test_name='split_large_concat_with_add_float', dtype='float')
self._test_split_large_concat_with_strided_add(cat_dim=1, num_inputs=136, input_shape=(2, 3), test_name='split_large_concat_with_strided_add_float', dtype='float')
self._test_split_large_concat_with_strided_add_complex(cat_dim=1, num_inputs=136, input_shape=(2, 3), test_name='split_large_concat_with_strided_add_complex_float', dtype='float')
self._test_split_large_concat_with_reuse(cat_dim=1, num_inputs=136, input_shape=(2, 3), test_name='split_large_concat_with_reuse_float', dtype='float')
self._test_split_large_concat_with_slice(cat_dim=1, num_slice_inputs=161, slice_input_shape=(20, 20), start_indices=[0, 0], end_indices=[None, 10], num_add_inputs=5, add_input_shape=(20, (161 * 10)), test_name='split_large_concat_with_dynamic_slice_float', dtype='float') |
def test_parse_server_js_define_new():
html = '\n some data;require("TimeSliceImpl").guard(function(){new (require("ServerJS"))().handle({"define":[["DTSGInitialData",[],{"token":""},100]],"require":[...]});}, "ServerJS define", {"root":true})();\n more data\n <script><script>require("TimeSliceImpl").guard(function(){var s=new (require("ServerJS"))();s.handle({"define":[["DTSGInitData",[],{"token":"","async_get_token":""},3333]],"require":[...]});require("Run").onAfterLoad(function(){s.cleanup(require("TimeSliceImpl"))});}, "ServerJS define", {"root":true})();</script>\n other irrelevant data\n '
define = parse_server_js_define(html)
assert (define == {'DTSGInitialData': {'token': ''}, 'DTSGInitData': {'async_get_token': '', 'token': ''}}) |
def environment_python_interpreter():
ENV_PYTHON = {'CPython': 'python%(major)d.%(minor)d', 'Jython': 'jython', 'PyPy': 'pypy', 'IronPython': 'ipy'}
python = (ENV_PYTHON[platform.python_implementation()] % {'major': sys.version_info[0], 'minor': sys.version_info[1], 'patch': sys.version_info[2]})
return ('/usr/bin/env %s' % python) |
def test_wrap_method():
default_config: dict = load_env_config(dummy_wrappers_module, 'dummy_env_config_with_dummy_wrappers.yml')
env_config: dict = default_config['env']
env_config['core_env'] = {'_target_': DummyCoreEnvironment, 'observation_space': ObservationConversion().space()}
env = DummyEnvironment(**env_config)
env_a: DummyWrapperA = DummyWrapperA.wrap(env, arg_a=1)
assert isinstance(env_a, DummyWrapperA)
try:
DummyWrapperB.wrap(env)
raise Exception("Wrapping shouldn't work without specifying the needed arguments.")
except TypeError:
pass
env_b: DummyWrapperB = DummyWrapperB.wrap(env, arg_b=2, arg_c=3)
assert isinstance(env_b, DummyWrapperB) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.