code stringlengths 281 23.7M |
|---|
class OptionSeriesHistogramDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def extractTranslatingboredomWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('rcfn chapters', 'Rebirth of a Cannon Fodder in a Novel', 'translated'), ('rcfn', 'Rebirth of a Cannon Fodder in a Novel', 'translated'), ('mhsc chapters', 'Me and my Husband Sleep in a Coffin', 'translated'), ('mhsc', 'Me and my Husband Sleep in a Coffin', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
_toolkit([ToolkitName.qt, ToolkitName.wx])
class TestCustomImageEnumEditor(BaseTestMixin, unittest.TestCase):
def setUp(self):
BaseTestMixin.setUp(self)
def tearDown(self):
BaseTestMixin.tearDown(self)
def setup_gui(self, model, view):
with create_ui(model, dict(view=view)) as ui:
process_cascade_events()
(yield ui.get_editors('value')[0])
def test_custom_editor_more_cols(self):
enum_edit = EnumModel()
view = View(UItem('value', editor=ImageEnumEditor(values=['top left', 'top right', 'bottom left', 'bottom right'], prefix=':', suffix='_origin', path='dummy_path', cols=4), style='custom'), resizable=True)
with reraise_exceptions(), self.setup_gui(enum_edit, view):
pass
def test_custom_editor_selection(self):
enum_edit = EnumModel()
with reraise_exceptions(), self.setup_gui(enum_edit, get_view('custom')) as editor:
self.assertEqual(get_button_strings(editor.control), ['top left', 'top right', 'bottom left', 'bottom right'])
self.assertEqual(enum_edit.value, 'top left')
self.assertEqual(get_all_button_selected_status(editor.control), [True, False, False, False])
click_on_image(get_button_control(editor.control, 1))
process_cascade_events()
self.assertEqual(enum_edit.value, 'top right')
def test_custom_editor_value_changed(self):
enum_edit = EnumModel()
with reraise_exceptions(), self.setup_gui(enum_edit, get_view('custom')) as editor:
self.assertEqual(get_button_strings(editor.control), ['top left', 'top right', 'bottom left', 'bottom right'])
self.assertEqual(enum_edit.value, 'top left')
self.assertEqual(get_all_button_selected_status(editor.control), [True, False, False, False])
enum_edit.value = 'top right'
process_cascade_events()
self.assertEqual(get_all_button_selected_status(editor.control), [False, True, False, False]) |
def generate_test_paths(n_samples, dq_H_E, dq_B_W, paths_start_at_origin=True, include_outliers_B_H=False, outlier_probability_B_H=0.1, include_noise_B_H=False, noise_sigma_trans_B_H=0.01, noise_sigma_rot_B_H=0.1, include_outliers_W_E=False, outlier_probability_W_E=0.1, include_noise_W_E=False, noise_sigma_trans_W_E=0.1, noise_sigma_rot_W_E=0.01):
dq_B_H_vec = generate_test_path(n_samples, include_outliers_B_H, outlier_probability_B_H, include_noise_B_H, noise_sigma_trans_B_H, noise_sigma_rot_B_H)
if paths_start_at_origin:
dq_B_H_vec = align_paths_at_index(dq_B_H_vec)
dq_B_H_vec_for_W_E = generate_test_path(n_samples, include_outliers_W_E, outlier_probability_W_E, include_noise_W_E, noise_sigma_trans_W_E, noise_sigma_rot_W_E)
dq_W_E_vec = compute_dual_quaternions_with_offset(dq_B_H_vec_for_W_E, dq_H_E, dq_B_W)
if paths_start_at_origin:
dq_W_E_vec = align_paths_at_index(dq_W_E_vec)
return (dq_B_H_vec, dq_W_E_vec) |
def test_warn_if_transform_df_contains_categories_not_seen_in_fit(df_enc, df_enc_rare):
msg = 'During the encoding, NaN values were introduced in the feature(s) var_A.'
with pytest.warns(UserWarning) as record:
encoder = WoEEncoder(unseen='ignore')
encoder.fit(df_enc[['var_A', 'var_B']], df_enc['target'])
encoder.transform(df_enc_rare[['var_A', 'var_B']])
assert (len(record) == 1)
assert (record[0].message.args[0] == msg)
with pytest.raises(ValueError) as record:
encoder = WoEEncoder(unseen='raise')
encoder.fit(df_enc[['var_A', 'var_B']], df_enc['target'])
encoder.transform(df_enc_rare[['var_A', 'var_B']])
assert (str(record.value) == msg) |
def _validate_rule(action_type: Optional[str], storage_destination_id: Optional[str], masking_strategy: Optional[Dict[(str, Union[(str, Dict[(str, str)])])]]) -> None:
if (not action_type):
raise common_exceptions.RuleValidationError('action_type is required.')
if (action_type == ActionType.erasure.value):
if (storage_destination_id is not None):
raise common_exceptions.RuleValidationError('Erasure Rules cannot have storage destinations.')
if (masking_strategy is None):
raise common_exceptions.RuleValidationError('Erasure Rules must have masking strategies.')
if (action_type in [ActionType.update.value]):
raise common_exceptions.RuleValidationError(f'{action_type} Rules are not supported at this time.') |
def test_request_stream(test_client_factory):
async def app(scope, receive, send):
request = Request(scope, receive)
body = b''
async for chunk in request.stream():
body += chunk
response = JSONResponse({'body': body.decode()})
(await response(scope, receive, send))
client = test_client_factory(app)
response = client.get('/')
assert (response.json() == {'body': ''})
response = client.post('/', json={'a': '123'})
assert (response.json() == {'body': '{"a": "123"}'})
response = client.post('/', data='abc')
assert (response.json() == {'body': 'abc'}) |
class LoggingFormatVersionInteger(ModelNormal):
allowed_values = {('format_version',): {'v1': 1, 'v2': 2}}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'format_version': (int,)}
_property
def discriminator():
return None
attribute_map = {'format_version': 'format_version'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def eval_function_completions(responses: List[str], definition: str, test: Optional[str]=None, entry_point: Optional[str]=None, assertions: Optional[Union[(str, Callable[([str], Tuple[(str, float)])])]]=None, timeout: Optional[float]=3, use_docker: Optional[bool]=True) -> Dict:
n = len(responses)
if (assertions is None):
success_list = []
for i in range(n):
response = _remove_check(responses[i])
code = (f'''{response}
{test}
check({entry_point})''' if response.startswith('def') else f'''{definition}{response}
{test}
check({entry_point})''')
success = (execute_code(code, timeout=timeout, use_docker=use_docker)[0] == 0)
success_list.append(success)
return {'expected_success': (1 - pow((1 - (sum(success_list) / n)), n)), 'success': any((s for s in success_list))}
if (callable(assertions) and (n > 1)):
(assertions, gen_cost) = assertions(definition)
else:
(assertions, gen_cost) = (None, 0)
if ((n > 1) or (test is None)):
for i in range(n):
response = responses[i] = _remove_check(responses[i])
code = (f'''{response}
{assertions}''' if response.startswith('def') else f'''{definition}{response}
{assertions}''')
succeed_assertions = (execute_code(code, timeout=timeout, use_docker=use_docker)[0] == 0)
if succeed_assertions:
break
else:
succeed_assertions = False
(i, response) = (0, responses[0])
if (test is None):
return {'index_selected': i, 'succeed_assertions': succeed_assertions, 'gen_cost': gen_cost, 'assertions': assertions}
code_test = (f'''{response}
{test}
check({entry_point})''' if response.startswith('def') else f'''{definition}{response}
{test}
check({entry_point})''')
success = (execute_code(code_test, timeout=timeout, use_docker=use_docker)[0] == 0)
return {'index_selected': i, 'succeed_assertions': succeed_assertions, 'success': success, 'gen_cost': gen_cost, 'assertions': assertions} |
class BeaconConfig():
def __init__(self, config_block: bytes) -> None:
self.config_block: bytes = config_block
self.settings_tuple = tuple(iter_settings(config_block))
self.xorkey: Optional[bytes] = None
self.xorencoded: bool = False
self.pe_export_stamp: Optional[int] = None
self.pe_compile_stamp: Optional[int] = None
self.architecture: Optional[str] = None
self._settings: Optional[Mapping[(str, Any)]] = None
self._settings_by_index: Optional[Mapping[(int, Any)]] = None
self._raw_settings: Optional[Mapping[(str, Any)]] = None
self._raw_settings_by_index: Optional[Mapping[(int, Any)]] = None
def from_file(cls, fobj: BinaryIO, xor_keys: List[bytes]=None, all_xor_keys: bool=False) -> 'BeaconConfig':
for (config_block, extra_info) in iter_beacon_config_blocks(fobj, xor_keys=xor_keys, all_xor_keys=all_xor_keys):
bconfig = cls(config_block)
bconfig.xorkey = extra_info['xorkey']
bconfig.xorencoded = extra_info['xorencoded']
try:
fh = (XorEncodedFile.from_file(fobj) if bconfig.xorencoded else fobj)
except ValueError:
fh = fobj
(bconfig.pe_compile_stamp, bconfig.pe_export_stamp) = pe.find_compile_stamps(fh)
bconfig.architecture = pe.find_architecture(fh)
return bconfig
raise ValueError('No valid Beacon configuration found')
def from_path(cls, path: Union[(str, os.PathLike)], xor_keys: List[bytes]=None, all_xor_keys: bool=False) -> 'BeaconConfig':
with open(path, 'rb') as fobj:
return cls.from_file(fobj, xor_keys=xor_keys, all_xor_keys=all_xor_keys)
def from_bytes(cls, data: bytes, xor_keys: List[bytes]=None, all_xor_keys: bool=False) -> 'BeaconConfig':
return cls.from_file(io.BytesIO(data), xor_keys=xor_keys, all_xor_keys=all_xor_keys)
def __repr__(self) -> str:
return f'<BeaconConfig {self.domains}>'
def setting_enums(self) -> list:
return [s.index.value for s in self.settings_tuple]
def max_setting_enum(self) -> int:
return max(self.setting_enums)
def settings_map(self, index_type='enum', pretty=False, parse=True) -> MappingProxyType:
settings = OrderedDict()
for setting in self.settings_tuple:
val = setting.value
if (index_type == 'name'):
key = setting.index.name
elif (index_type == 'const'):
key = setting.index.value
else:
key = setting.index
if (parse or pretty):
if (setting.type == SettingsType.TYPE_SHORT):
val = u16be(val)
elif (setting.type == SettingsType.TYPE_INT):
val = u32be(val)
if pretty:
pretty_func = SETTING_TO_PRETTYFUNC.get(setting.index)
if pretty_func:
val = pretty_func(val)
settings[key] = val
return MappingProxyType(settings)
def raw_settings(self) -> Mapping[(str, Any)]:
if (self._raw_settings is None):
self._raw_settings = self.settings_map(index_type='name')
return self._raw_settings
def raw_settings_by_index(self) -> Mapping[(int, Any)]:
if (self._raw_settings_by_index is None):
self._raw_settings_by_index = self.settings_map(index_type='const')
return self._raw_settings_by_index
def settings(self) -> Mapping[(str, Any)]:
if (self._settings is None):
self._settings = self.settings_map(index_type='name', pretty=True)
return self._settings
def settings_by_index(self) -> Mapping[(int, Any)]:
if (self._settings_by_index is None):
self._settings_by_index = self.settings_map(index_type='const', pretty=True)
return self._settings_by_index
def domain_uri_pairs(self) -> List[Tuple[(str, str)]]:
domains = self.raw_settings.get('SETTING_DOMAINS')
if (not isinstance(domains, bytes)):
return []
return list(grouper(null_terminated_str(domains).split(','), 2))
def uris(self) -> List[str]:
return list(dict.fromkeys((uri for (_domain, uri) in self.domain_uri_pairs)))
def domains(self) -> List[str]:
return list(dict.fromkeys((domain for (domain, _uri) in self.domain_uri_pairs)))
def submit_uri(self) -> Optional[str]:
return self.settings.get('SETTING_SUBMITURI', None)
def killdate(self) -> Optional[str]:
s = self.settings
killdate = s.get('SETTING_KILLDATE', 0)
if killdate:
date_str = str(killdate)
year = int(date_str[:4])
month = int(date_str[4:6])
day = int(date_str[6:8])
killdate = f'{year:02d}-{month:02d}-{day:02d}'
else:
killdate = None
year = s.get('SETTING_KILLDATE_YEAR', 0)
month = s.get('SETTING_KILLDATE_MONTH', 0)
day = s.get('SETTING_KILLDATE_DAY', 0)
if (year and month and day):
killdate = f'{year:02d}-{month:02d}-{day:02d}'
return killdate
def protocol(self) -> Optional[str]:
protocol = self.raw_settings.get('SETTING_PROTOCOL', None)
if (protocol is None):
return None
return BeaconProtocol(protocol).name
def port(self) -> Optional[int]:
return self.raw_settings.get('SETTING_PORT', None)
def watermark(self) -> Optional[int]:
return self.raw_settings.get('SETTING_WATERMARK', None)
def is_trial(self) -> bool:
return (self.raw_settings.get('SETTING_CRYPTO_SCHEME') == CryptoScheme.CRYPTO_TRIAL_PRODUCT)
def version(self) -> BeaconVersion:
if self.pe_export_stamp:
return BeaconVersion.from_pe_export_stamp(self.pe_export_stamp)
return BeaconVersion.from_max_setting_enum(self.max_setting_enum)
def public_key(self) -> bytes:
return self.raw_settings.get('SETTING_PUBKEY', b'').rstrip(b'\x00')
def sleeptime(self) -> Optional[int]:
return self.raw_settings.get('SETTING_SLEEPTIME', None)
def jitter(self) -> Optional[int]:
return self.raw_settings.get('SETTING_JITTER', None) |
class SubclassDefaultsSuper(HasTraits):
a_str = Str()
an_expr = Expression('[]')
a_list = List()
an_instance = Instance(Wrapper)
a_wrapper_1 = WrapperTrait('bar')
a_wrapper_2 = WrapperTrait()
clone_wrapper_1 = CloneWrapperTrait('bar')
clone_wrapper_2 = CloneWrapperTrait()
disallow_default = DisallowDefaultValue()
self_trait = self() |
def fill_axis(xs: List[int], values: List[Bytes32], length: int) -> List[Bytes32]:
data = [[bytes_to_int(a[i:(i + (FIELD_ELEMENT_BITS // 8))]) for a in values] for i in range(0, 32, (FIELD_ELEMENT_BITS // 8))]
newdata = [fill(xs, d, length) for d in data]
return [b''.join([int_to_bytes(n[i], (FIELD_ELEMENT_BITS // 8)) for n in newdata]) for i in range(length)] |
_register_make
_set_nxm_headers([ofproto_v1_0.NXM_OF_IP_DST, ofproto_v1_0.NXM_OF_IP_DST_W])
_field_header([ofproto_v1_0.NXM_OF_IP_DST, ofproto_v1_0.NXM_OF_IP_DST_W])
class MFIPDst(MFField):
pack_str = MF_PACK_STRING_BE32
def __init__(self, header, value, mask=None):
super(MFIPDst, self).__init__(header, MFIPDst.pack_str)
self.value = value
self.mask = mask
def make(cls, header):
return cls(header, MFIPDst.pack_str)
def put(self, buf, offset, rule):
return self.putm(buf, offset, rule.flow.nw_dst, rule.wc.nw_dst_mask) |
class sdict(Dict[(KT, VT)]):
__slots__ = ()
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
__getitem__ = dict.get
def __getattr__(self, key: str) -> Optional[VT]:
if key.startswith('__'):
raise AttributeError
return self.get(key, None)
__repr__ = (lambda self: ('<sdict %s>' % dict.__repr__(self)))
__getstate__ = (lambda self: None)
__copy__ = (lambda self: sdict(self))
__deepcopy__ = (lambda self, memo: sdict(copy.deepcopy(dict(self)))) |
def tickets_view(page, is_my_view=False, subscribed=False):
form = SearchTicketForm()
status = request.args.get('status')
department = request.args.get('department')
category = request.args.get('category')
content = request.args.get('content')
user_id = request.args.get('user_id')
assigned_id = request.args.get('assigned_id')
created_id = request.args.get('created_id')
if form.validate_on_submit():
redirect_url = FlicketTicket.form_redirect(form, url='flicket_bp.tickets')
return redirect(redirect_url)
arg_sort = request.args.get('sort')
if arg_sort:
args = request.args.copy()
del args['sort']
response = make_response(redirect(url_for('flicket_bp.tickets', **args)))
response.set_cookie('tickets_sort', arg_sort, max_age=2419200, path=url_for('flicket_bp.tickets', **args))
return response
sort = request.cookies.get('tickets_sort')
if sort:
set_cookie = True
else:
sort = 'priority_desc'
set_cookie = False
(ticket_query, form) = FlicketTicket.query_tickets(form, department=department, category=category, status=status, user_id=user_id, content=content, assigned_id=assigned_id, created_id=created_id)
if is_my_view:
ticket_query = FlicketTicket.my_tickets(ticket_query)
ticket_query = FlicketTicket.sorted_tickets(ticket_query, sort)
if subscribed:
ticket_query = FlicketTicket.my_subscribed_tickets(ticket_query)
number_results = ticket_query.count()
ticket_query = ticket_query.paginate(page=page, per_page=app.config['posts_per_page'])
title = gettext('Tickets')
if is_my_view:
title = gettext('My Tickets')
if content:
form.content.data = content
response = make_response(render_template('flicket_tickets.html', title=title, form=form, tickets=ticket_query, page=page, number_results=number_results, status=status, department=department, category=category, user_id=user_id, created_id=created_id, assigned_id=assigned_id, sort=sort, base_url='flicket_bp.tickets'))
if set_cookie:
response.set_cookie('tickets_sort', sort, max_age=2419200, path=url_for('flicket_bp.tickets'))
return response |
def validate_bls_withdrawal_credentials(bls_withdrawal_credentials: str) -> bytes:
bls_withdrawal_credentials_bytes = normalize_bls_withdrawal_credentials_to_bytes(bls_withdrawal_credentials)
if is_eth1_address_withdrawal_credentials(bls_withdrawal_credentials_bytes):
raise ValidationError((load_text(['err_is_already_eth1_form']) + '\n'))
try:
assert (len(bls_withdrawal_credentials_bytes) == 32)
assert (bls_withdrawal_credentials_bytes[:1] == BLS_WITHDRAWAL_PREFIX)
except (ValueError, AssertionError):
raise ValidationError((load_text(['err_not_bls_form']) + '\n'))
return bls_withdrawal_credentials_bytes |
(np.multiply)
def mul(x, y, out=None, out_like=None, sizing='optimal', method='raw', **kwargs):
def _mul_raw(x, y, n_frac):
precision_cast = ((lambda m: np.array(m, dtype=object)) if (n_frac >= _n_word_max) else (lambda m: m))
raw_cast = ((lambda m: np.array(m, dtype=object)) if ((x.n_word + y.n_word) >= _n_word_max) else (lambda m: m))
return ((raw_cast(x.val) * raw_cast(y.val)) * precision_cast((2 ** ((n_frac - x.n_frac) - y.n_frac))))
if (not isinstance(x, Fxp)):
x = Fxp(x)
if (not isinstance(y, Fxp)):
y = Fxp(y)
signed = (x.signed or y.signed)
n_frac = (x.n_frac + y.n_frac)
n_word = (x.n_word + y.n_word)
n_int = ((n_word - int(signed)) - n_frac)
optimal_size = (signed, n_word, n_int, n_frac)
return _function_over_two_vars(repr_func=np.multiply, raw_func=_mul_raw, x=x, y=y, out=out, out_like=out_like, sizing=sizing, method=method, optimal_size=optimal_size, **kwargs) |
def _check_surfer_integrity(field, shape, data_range):
if (field.shape != shape):
raise IOError("Grid shape {} doesn't match shape read from header {}.".format(field.shape, shape))
field_range = [field.min(), field.max()]
if (not np.allclose(field_range, data_range)):
raise IOError("Grid data range {} doesn't match range read from header {}.".format(field_range, data_range)) |
def ModelFactory(name, RangeFactory):
class ModelWithRanges(HasTraits):
percentage = RangeFactory(0.0, 100.0)
open_closed = RangeFactory(0.0, 100.0, exclude_low=True)
closed_open = RangeFactory(0.0, 100.0, exclude_high=True)
open = RangeFactory(0.0, 100.0, exclude_low=True, exclude_high=True)
closed = RangeFactory(0.0, 100.0)
steam_temperature = RangeFactory(low=100.0)
ice_temperature = Range(high=0.0)
ModelWithRanges.__name__ = name
return ModelWithRanges |
def _leaf_detail_kind(kind: str) -> SharedTextKind:
text_kind = SharedTextKind.from_string(kind)
if (text_kind is None):
raise UserError(f'Invalid kind {kind}')
if (text_kind == SharedTextKind.source):
text_kind = SharedTextKind.source_detail
elif (text_kind == SharedTextKind.sink):
text_kind = SharedTextKind.sink_detail
return text_kind |
class IPv6TransportEndpoints(TransportEndpointsBase):
def _addEndpoint(self, addr):
if (not udp6):
raise SnmpsimError('This system does not support UDP/IP6')
if ((addr.find(']:') != (- 1)) and (addr[0] == '[')):
(h, p) = addr.split(']:')
try:
(h, p) = (h[1:], int(p))
except Exception:
raise SnmpsimError(('improper IPv6/UDP endpoint %s' % addr))
elif ((addr[0] == '[') and (addr[(- 1)] == ']')):
(h, p) = (addr[1:(- 1)], 161)
else:
(h, p) = (addr, 161)
return (udp6.Udp6Transport().openServerMode((h, p)), addr) |
class TestWorkflowUtils(unittest.TestCase):
def setUp(self) -> None:
if (not os.path.exists(BLOB_DIR)):
os.mkdir(BLOB_DIR)
def tearDown(self) -> None:
if os.path.exists(BLOB_DIR):
shutil.rmtree(BLOB_DIR)
def test_extract_workflows_from_file(self):
workflows = workflow_utils.extract_workflows_from_file(os.path.join(os.path.dirname(__file__), 'for_test_workflow_utils/workflow.py'))
self.assertEqual(2, len(workflows))
self.assertEqual('workflow1', workflows[0].name)
self.assertEqual('workflow2', workflows[1].name)
def test_extract_workflows_from_zip(self):
file_path = os.path.join(os.path.dirname(__file__), 'for_test_workflow_utils/workflow.py')
with tempfile.TemporaryDirectory() as temp_dir:
(filename, _) = os.path.splitext(os.path.split(file_path)[(- 1)])
dest_dir = (Path(temp_dir) / filename)
dest_dir.mkdir(parents=True, exist_ok=True)
shutil.copy2(file_path, dest_dir)
zip_file_name = '{}.zip'.format(filename)
zip_file_path = (Path(temp_dir) / zip_file_name)
make_dir_zipfile(dest_dir, zip_file_path)
workflows = workflow_utils.extract_workflows_from_zip(zip_file_path, temp_dir)
self.assertEqual(2, len(workflows))
.object(config_constants, 'BLOB_MANAGER', BLOB_MANAGER_DEFAULT_VALUE)
def test_upload_workflow_snapshot(self):
file_path = os.path.join(os.path.dirname(__file__), 'for_test_workflow_utils/workflow.py')
artifact = os.path.join(os.path.dirname(__file__), 'for_test_workflow_utils/artifact')
workflow_utils.upload_workflow_snapshot(file_path=file_path, artifacts=[artifact])
self.assertTrue(os.path.exists(os.path.join(BLOB_DIR, 'workflow.zip'))) |
def start_client():
body_snsr_loc_chrc[0].ReadValue({}, reply_handler=body_sensor_val_cb, error_handler=generic_error_cb, dbus_interface=GATT_CHRC_IFACE)
hr_msrmt_prop_iface = dbus.Interface(hr_msrmt_chrc[0], DBUS_PROP_IFACE)
hr_msrmt_prop_iface.connect_to_signal('PropertiesChanged', hr_msrmt_changed_cb)
hr_msrmt_chrc[0].StartNotify(reply_handler=hr_msrmt_start_notify_cb, error_handler=generic_error_cb, dbus_interface=GATT_CHRC_IFACE) |
class OptionPlotoptionsPyramidSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def generate_graph(workflow_meta: WorkflowMeta):
workflow_graph: AIGraph = extract_graph(workflow_meta)
(workflow_nodes, id_nodes, name_nodes) = build_nodes(workflow_graph)
(parent_edges, children_edges) = build_edges(workflow_graph, workflow_nodes, id_nodes, name_nodes)
return build_graph(name_nodes, parent_edges, children_edges) |
class EfuseDefineRegisters(EfuseRegistersBase):
EFUSE_MEM_SIZE = (508 + 4)
DR_REG_EFUSE_BASE =
EFUSE_PGM_DATA0_REG = DR_REG_EFUSE_BASE
EFUSE_CHECK_VALUE0_REG = (DR_REG_EFUSE_BASE + 32)
EFUSE_CLK_REG = (DR_REG_EFUSE_BASE + 456)
EFUSE_CONF_REG = (DR_REG_EFUSE_BASE + 460)
EFUSE_STATUS_REG = (DR_REG_EFUSE_BASE + 464)
EFUSE_CMD_REG = (DR_REG_EFUSE_BASE + 468)
EFUSE_RD_RS_ERR0_REG = (DR_REG_EFUSE_BASE + 404)
EFUSE_RD_RS_ERR1_REG = (DR_REG_EFUSE_BASE + 408)
EFUSE_RD_REPEAT_ERR0_REG = (DR_REG_EFUSE_BASE + 380)
EFUSE_RD_REPEAT_ERR1_REG = (DR_REG_EFUSE_BASE + 384)
EFUSE_RD_REPEAT_ERR2_REG = (DR_REG_EFUSE_BASE + 388)
EFUSE_RD_REPEAT_ERR3_REG = (DR_REG_EFUSE_BASE + 392)
EFUSE_RD_REPEAT_ERR4_REG = (DR_REG_EFUSE_BASE + 396)
EFUSE_DAC_CONF_REG = (DR_REG_EFUSE_BASE + 488)
EFUSE_RD_TIM_CONF_REG = (DR_REG_EFUSE_BASE + 492)
EFUSE_WR_TIM_CONF1_REG = (DR_REG_EFUSE_BASE + 500)
EFUSE_WR_TIM_CONF2_REG = (DR_REG_EFUSE_BASE + 504)
EFUSE_DATE_REG = (DR_REG_EFUSE_BASE + 508)
EFUSE_WRITE_OP_CODE = 23130
EFUSE_READ_OP_CODE = 23205
EFUSE_PGM_CMD_MASK = 3
EFUSE_PGM_CMD = 2
EFUSE_READ_CMD = 1
BLOCK_ERRORS = [(EFUSE_RD_REPEAT_ERR0_REG, None, None, None), (EFUSE_RD_RS_ERR0_REG, 7, 0, 3), (EFUSE_RD_RS_ERR0_REG, 7, 4, 7), (EFUSE_RD_RS_ERR0_REG, 7, 8, 11), (EFUSE_RD_RS_ERR0_REG, 7, 12, 15), (EFUSE_RD_RS_ERR0_REG, 7, 16, 19), (EFUSE_RD_RS_ERR0_REG, 7, 20, 23), (EFUSE_RD_RS_ERR0_REG, 7, 24, 27), (EFUSE_RD_RS_ERR0_REG, 7, 28, 31), (EFUSE_RD_RS_ERR1_REG, 7, 0, 3), (EFUSE_RD_RS_ERR1_REG, 7, 4, 7)]
EFUSE_DAC_CONF_REG = (DR_REG_EFUSE_BASE + 488)
EFUSE_DAC_CLK_DIV_S = 0
EFUSE_DAC_CLK_DIV_M = (255 << EFUSE_DAC_CLK_DIV_S)
EFUSE_RD_TIM_CONF_REG = (DR_REG_EFUSE_BASE + 492)
EFUSE_TSUR_A_S = 16
EFUSE_TSUR_A_M = (255 << EFUSE_TSUR_A_S)
EFUSE_TRD_S = 8
EFUSE_TRD_M = (255 << EFUSE_TRD_S)
EFUSE_THR_A_S = 0
EFUSE_THR_A_M = (255 << EFUSE_THR_A_S)
EFUSE_WR_TIM_CONF0_REG = (DR_REG_EFUSE_BASE + 496)
EFUSE_TPGM_S = 16
EFUSE_TPGM_M = (65535 << EFUSE_TPGM_S)
EFUSE_TPGM_INACTIVE_S = 8
EFUSE_TPGM_INACTIVE_M = (255 << EFUSE_TPGM_INACTIVE_S)
EFUSE_THP_A_S = 0
EFUSE_THP_A_M = (255 << EFUSE_THP_A_S)
EFUSE_PWR_ON_NUM_S = 8
EFUSE_PWR_ON_NUM_M = (65535 << EFUSE_PWR_ON_NUM_S)
EFUSE_TSUP_A_S = 0
EFUSE_TSUP_A_M = (255 << EFUSE_TSUP_A_S)
EFUSE_PWR_OFF_NUM_S = 0
EFUSE_PWR_OFF_NUM_M = (65535 << EFUSE_PWR_OFF_NUM_S)
EFUSE_PROGRAMMING_TIMING_PARAMETERS = {80: (2, 800, 2, 4), 40: (1, 400, 1, 2), 20: (1, 200, 1, 1)}
VDDQ_TIMING_PARAMETERS = {80: (160, 41472, 256), 40: (80, 20736, 128), 20: (40, 10368, 64)}
EFUSE_READING_PARAMETERS = {80: (2, 4, 2), 40: (1, 2, 1), 20: (1, 1, 1)} |
def test_default_keyword_in_message_and_keyword_only_in_message() -> None:
def f(a: int, *, b: bool, c: float=0.0) -> str:
pass
assert (parse_function_annotations(f, default_keyword_in_message=False, keyword_only_in_message=False) == ({'a': int}, {'sample': str}, False))
assert (parse_function_annotations(f, default_keyword_in_message=False, keyword_only_in_message=True) == ({'a': int, 'b': bool}, {'sample': str}, False))
assert (parse_function_annotations(f, default_keyword_in_message=True, keyword_only_in_message=False) == ({'a': int, 'c': float}, {'sample': str}, False))
assert (parse_function_annotations(f, default_keyword_in_message=True, keyword_only_in_message=True) == ({'a': int, 'b': bool, 'c': float}, {'sample': str}, False)) |
class InstallWithCompile(install):
def run(self):
from babel.messages.frontend import compile_catalog
compiler = compile_catalog(self.distribution)
option_dict = self.distribution.get_option_dict('compile_catalog')
compiler.domain = [option_dict['domain'][1]]
compiler.directory = option_dict['directory'][1]
compiler.run()
super().run() |
class MySink(Node):
TOPIC = Topic(ZMQMessage)
config: MySinkConfig
def setup(self) -> None:
self.output_file = open(self.config.output_filename, 'wb')
self.num_received = 0
(TOPIC)
async def sink(self, message: ZMQMessage) -> None:
self.output_file.write(message.data)
self.output_file.write(DATA_DELIMITER)
self.num_received += 1
if (self.num_received == NUM_MESSAGES):
raise NormalTermination()
def cleanup(self) -> None:
self.output_file.close() |
class Wildcard(FunctionSignature):
name = 'wildcard'
argument_types = [TypeHint.String, TypeHint.String.require_literal()]
return_value = TypeHint.Boolean
additional_types = TypeHint.String.require_literal()
def to_regex(cls, *wildcards):
expressions = []
head = '^'
tail = '$'
for wildcard in wildcards:
pieces = [re.escape(p) for p in fold_case(wildcard).split('*')]
regex = ((head + '.*?'.join(pieces)) + tail)
tail_skip = '.*?$'
if regex.endswith(tail_skip):
regex = regex[:(- len(tail_skip))]
expressions.append(regex)
return '|'.join(expressions)
def get_callback(cls, source_ast, *wildcard_literals):
wc_values = [wc.value for wc in wildcard_literals]
pattern = cls.to_regex(*wc_values)
compiled = re.compile(pattern, regex_flags())
def callback(source, *_):
if is_string(source):
return (compiled.match(source) is not None)
return callback
def alternate_render(cls, arguments, precedence=None, **kwargs):
if ((len(arguments) == 2) and isinstance(arguments[1], String)):
(lhs, rhs) = arguments
return Comparison(lhs, Comparison.EQ, rhs).render(precedence, **kwargs)
def run(cls, source, *wildcards):
if is_string(source):
pattern = cls.to_regex(*wildcards)
compiled = re.compile(pattern, regex_flags())
return (compiled.match(source) is not None) |
class OnEmailVerificationRequestedTask(TaskBase):
__name__ = 'on_email_verification_requested'
async def run(self, email_verification_id: str, workspace_id: str, code: str):
workspace = (await self._get_workspace(uuid.UUID(workspace_id)))
async with self.get_workspace_session(workspace) as session:
email_verification_repository = EmailVerificationRepository(session)
email_verification = (await email_verification_repository.get_by_id(uuid.UUID(email_verification_id)))
if (email_verification is None):
raise ObjectDoesNotExistTaskError(EmailVerification, email_verification_id)
user = email_verification.user
tenant = (await self._get_tenant(user.tenant_id, workspace))
context = VerifyEmailContext(tenant=schemas.tenant.Tenant.model_validate(tenant), user=schemas.user.UserEmailContext.model_validate(user), code=code)
async with self._get_email_subject_renderer(workspace) as email_subject_renderer:
subject = (await email_subject_renderer.render(EmailTemplateType.VERIFY_EMAIL, context))
async with self._get_email_template_renderer(workspace) as email_template_renderer:
html = (await email_template_renderer.render(EmailTemplateType.VERIFY_EMAIL, context))
self.email_provider.send_email(sender=tenant.get_email_sender(), recipient=(email_verification.email, None), subject=subject, html=html)
if isinstance(self.email_provider, Null):
logger.warning('Email verification requested with NULL email provider', email_verification_id=email_verification_id, code=code) |
_bp.route((app.config['FLICKET'] + 'ticket_create/'), methods=['GET', 'POST'])
_required
def ticket_create():
last_category = session.get('ticket_create_last_category')
form = CreateTicketForm(category=last_category)
if form.validate_on_submit():
new_ticket = FlicketTicketExt.create_ticket(title=form.title.data, user=g.user, content=form.content.data, category=form.category.data, priority=form.priority.data, hours=form.hours.data, files=request.files.getlist('file'))
flash(gettext('New Ticket created.'), category='success')
session['ticket_create_last_category'] = form.category.data
return redirect(url_for('flicket_bp.ticket_view', ticket_id=new_ticket.id))
title = gettext('Create Ticket')
return render_template('flicket_create.html', title=title, form=form) |
def psc_test_data(db):
baker.make('search.AwardSearch', award_id=1, latest_transaction_id=1)
baker.make('search.AwardSearch', award_id=2, latest_transaction_id=2)
baker.make('search.AwardSearch', award_id=3, latest_transaction_id=3)
baker.make('search.AwardSearch', award_id=4, latest_transaction_id=4)
baker.make('search.TransactionSearch', transaction_id=1, award_id=1, is_fpds=True, federal_action_obligation=1, generated_pragmatic_obligation=1, action_date='2020-01-01', fiscal_action_date='2020-04-01', product_or_service_code='1234')
baker.make('search.TransactionSearch', transaction_id=2, award_id=2, is_fpds=True, federal_action_obligation=1, generated_pragmatic_obligation=1, action_date='2020-01-02', fiscal_action_date='2020-04-02', product_or_service_code='1234')
baker.make('search.TransactionSearch', transaction_id=3, award_id=3, is_fpds=True, federal_action_obligation=2, generated_pragmatic_obligation=2, action_date='2020-01-03', fiscal_action_date='2020-04-03', product_or_service_code='9876')
baker.make('search.TransactionSearch', transaction_id=4, award_id=4, is_fpds=True, federal_action_obligation=2, generated_pragmatic_obligation=2, action_date='2020-01-04', fiscal_action_date='2020-04-04', product_or_service_code='9876')
baker.make('references.PSC', code='1234', description='PSC DESCRIPTION UP')
baker.make('references.PSC', code='9876', description='PSC DESCRIPTION DOWN') |
class RoundedTriangleEdgeSettings(Settings):
absolute_params = {'height': 50.0, 'radius': 30.0, 'r_hole': 2.0}
relative_params = {'outset': 0.0}
def edgeObjects(self, boxes, chars: str='t', add: bool=True):
edges = [RoundedTriangleEdge(boxes, self), RoundedTriangleFingerHolesEdge(boxes, self)]
return self._edgeObjects(edges, boxes, chars, add) |
def create_pixbuf_from_file_at_size(filename, width, height):
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(filename, width, height)
if ((pixbuf.get_width() != width) or (pixbuf.get_height() != height)):
pixbuf = pixbuf.scale_simple(width, height, GdkPixbuf.InterpType.BILINEAR)
return pixbuf |
class Listener(object):
uri = 'tcp://eddn-relay.elite-markets.net:9500'
supportedSchema = '
def __init__(self, zmqContext=None, minBatchTime=5.0, maxBatchTime=10.0, reconnectTimeout=180.0, burstLimit=200):
assert (burstLimit > 0)
if (not zmqContext):
zmqContext = zmq.Context()
self.zmqContext = zmqContext
self.subscriber = None
self.minBatchTime = minBatchTime
self.maxBatchTime = maxBatchTime
self.reconnectTimeout = reconnectTimeout
self.burstLimit = burstLimit
self.reset_counters()
self.connect()
def connect(self):
if self.subscriber:
self.subscriber.close()
del self.subscriber
self.subscriber = newsub = self.zmqContext.socket(zmq.SUB)
newsub.setsockopt(zmq.SUBSCRIBE, b'')
newsub.connect(self.uri)
self.lastRecv = time.time()
self.lastJsData = None
def disconnect(self):
del self.subscriber
def clear_errors(self):
self.errors = defaultdict(int)
def reset_counters(self):
self.clear_errors()
self.stats = defaultdict(int)
def wait_for_data(self, softCutoff, hardCutoff):
now = time.time()
cutoff = min(softCutoff, hardCutoff)
if (self.lastRecv < (now - self.reconnectTimeout)):
if self.lastRecv:
self.errors['reconnects'] += 1
self.connect()
now = time.time()
nextCutoff = min((now + self.minBatchTime), cutoff)
if (now > nextCutoff):
return False
timeout = ((nextCutoff - now) * 1000)
events = self.subscriber.poll(timeout=timeout)
if (events == 0):
return False
return True
def get_batch(self, onerror=None):
now = time.time()
hardCutoff = (now + self.maxBatchTime)
softCutoff = (now + self.minBatchTime)
supportedSchema = self.supportedSchema
sub = self.subscriber
(stats, errors) = (self.stats, self.errors)
batch = defaultdict(list)
while self.wait_for_data(softCutoff, hardCutoff):
bursts = 0
for _ in range(self.burstLimit):
self.lastJsData = None
try:
zdata = sub.recv(flags=zmq.NOBLOCK, copy=False)
stats['recvs'] += 1
except zmq.error.Again:
break
bursts += 1
try:
jsdata = zlib.decompress(zdata)
except Exception as e:
errors['deflate'] += 1
if onerror:
onerror(('zlib.decompress: %s: %s' % (type(e), e)))
continue
bdata = jsdata.decode()
try:
data = json.loads(bdata)
except ValueError as e:
errors['loads'] += 1
if onerror:
onerror(('json.loads: %s: %s' % (type(e), e)))
continue
self.lastJsData = jsdata
try:
schema = data['$schemaRef']
except KeyError:
errors['schemaref'] += 1
if onerror:
onerror('missing schema ref')
continue
if (schema != supportedSchema):
errors['schema'] += 1
if onerror:
onerror(('unsupported schema: ' + schema))
continue
try:
header = data['header']
message = data['message']
system = message['systemName'].upper()
station = message['stationName'].upper()
item = message['itemName'].upper()
buy = int(message['buyPrice'])
sell = int(message['sellPrice'])
demand = message['demand']
supply = message['stationStock']
timestamp = message['timestamp']
uploader = header['uploaderID']
software = header['softwareName']
swVersion = header['softwareVersion']
except (KeyError, ValueError) as e:
errors['json'] += 1
if onerror:
onerror(('invalid json: %s: %s' % (type(e), e)))
continue
stats['prices'] += 1
timestamp = timestamp.replace('T', ' ').replace('+00:00', '')
oldEntryList = batch[(system, station, item)]
if oldEntryList:
if (oldEntryList[0].timestamp > timestamp):
stats['timeseq'] += 1
continue
stats['timeseq']
else:
oldEntryList.append(None)
oldEntryList[0] = MarketPrice(system, station, item, buy, sell, demand, supply, timestamp, uploader, software, swVersion)
if (bursts >= self.burstLimit):
stats['numburst'] += 1
stats['maxburst'] = max(stats['maxburst'], bursts)
softCutoff = min(softCutoff, (time.time() + 0.5))
stats['batches'] += 1
if (not batch):
stats['emptybatches'] += 1
else:
stats['batchlen'] += len(batch)
return [entry[0] for entry in batch.values()] |
_converter(torch.ops.aten.avg_pool2d.default)
def aten_ops_avg_pool2d(target: Target, args: Tuple[(Argument, ...)], kwargs: Dict[(str, Argument)], name: str) -> ConverterOutput:
input_val = args[0]
if (not isinstance(input_val, AITTensor)):
raise RuntimeError(f'Non-tensor inputs for {name}: {input_val}')
kernel_size = args[1]
stride = args[2]
padding = (args[3] if (len(args) > 3) else 0)
kernel_size = identical_elem_tuple_to_int(kernel_size)
stride = identical_elem_tuple_to_int(stride)
padding = identical_elem_tuple_to_int(padding)
return avg_pool2d(kernel_size=kernel_size, stride=stride, pad=padding)(input_val) |
def test_data_quality_test_conflict_prediction() -> None:
test_dataset = pd.DataFrame({'category_feature': ['n', 'n', 'p', 'n'], 'numerical_feature': [0, 0, 2, 5], 'prediction': [0, 1, 0, 1]})
mapping = ColumnMapping(categorical_features=['category_feature'], numerical_features=['numerical_feature'])
suite = TestSuite(tests=[TestConflictPrediction()])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=mapping)
suite._inner_suite.raise_for_error()
assert (not suite)
test_dataset = pd.DataFrame({'category_feature': ['n', 'd', 'p', 'n'], 'numerical_feature': [0, 1, 2, 5], 'prediction': [0, 0, 0, 1]})
suite = TestSuite(tests=[TestConflictPrediction()])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=mapping)
assert suite
assert suite.show()
assert suite.json() |
class FBCopyCommand(fb.FBCommand):
def name(self):
return 'copy'
def description(self):
return 'Copy data to your Mac.'
def options(self):
return [fb.FBCommandArgument(short='-f', long='--filename', arg='filename', help='The output filename.'), fb.FBCommandArgument(short='-n', long='--no-open', arg='noOpen', boolean=True, default=False, help='Do not open the file.')]
def args(self):
return [fb.FBCommandArgument(arg='target', type='(id)', help='The object to copy.')]
def run(self, arguments, options):
_copy(arguments[0], options.filename, options.noOpen) |
class Test_Core_Treeview(unittest.TestCase):
def test_renderer(self):
main_tree = Tree()
main_tree.dist = 0
(t, ts) = face_grid.get_example_tree()
t_grid = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(t_grid, 0, 'aligned')
(t, ts) = bubble_map.get_example_tree()
t_bubble = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(t_bubble, 0, 'aligned')
t.show()
(t, ts) = item_faces.get_example_tree()
t_items = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(t_items, 0, 'aligned')
(t, ts) = node_style.get_example_tree()
t_nodest = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(t_nodest, 0, 'aligned')
(t, ts) = node_background.get_example_tree()
t_bg = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(t_bg, 0, 'aligned')
(t, ts) = face_positions.get_example_tree()
t_fpos = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(t_fpos, 0, 'aligned')
(t, ts) = phylotree_visualization.get_example_tree()
t_phylo = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(t_phylo, 0, 'aligned')
(t, ts) = face_rotation.get_example_tree()
temp_facet = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(temp_facet, 0, 'aligned')
(t, ts) = seq_motif_faces.get_example_tree()
temp_facet = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(temp_facet, 0, 'aligned')
(t, ts) = barchart_and_piechart_faces.get_example_tree()
temp_facet = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(temp_facet, 0, 'aligned')
(t, ts) = (Tree(), TreeStyle())
t.populate(5)
for n in t.traverse():
n.dist = 0
temp_tface = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(temp_tface, 0, 'aligned')
ts.optimal_scale_level = 'full'
temp_tface = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(temp_tface, 0, 'aligned')
ts = TreeStyle()
t.populate(5)
ts.mode = 'c'
temp_tface = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(temp_tface, 0, 'aligned')
ts.optimal_scale_level = 'full'
temp_tface = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(temp_tface, 0, 'aligned')
(t, ts) = (Tree(), TreeStyle())
temp_tface = TreeFace(Tree('node;'), ts)
n = main_tree.add_child()
n.add_face(temp_tface, 0, 'aligned')
(t, ts) = (Tree(), TreeStyle())
ts.mode = 'c'
temp_tface = TreeFace(Tree('node;'), ts)
n = main_tree.add_child()
n.add_face(temp_tface, 0, 'aligned')
(t, ts) = (Tree(), TreeStyle())
ts.mode = 'c'
temp_tface = TreeFace(Tree(), ts)
n = main_tree.add_child()
n.add_face(temp_tface, 0, 'aligned')
(t, ts) = (Tree(), TreeStyle())
temp_tface = TreeFace(Tree(), ts)
n = main_tree.add_child()
n.add_face(temp_tface, 0, 'aligned')
chars = ['.p', 'j', 'jJ']
def layout(node):
global CONT
if (CONT >= len(chars)):
CONT = 0
if node.is_leaf:
node.img_style['size'] = 0
F2 = AttrFace('name', tight_text=True)
F = TextFace(chars[CONT], tight_text=True)
F.inner_border.width = 0
F2.inner_border.width = 0
faces.add_face_to_node(F2, node, 1, position='branch-right')
CONT += 1
t = Tree()
t.populate(20, random_branches=True)
ts = TreeStyle()
ts.layout_fn = layout
ts.mode = 'c'
ts.show_leaf_name = False
temp_tface = TreeFace(t, ts)
n = main_tree.add_child()
n.add_face(temp_tface, 0, 'aligned')
ms = TreeStyle()
ms.mode = 'r'
ms.show_leaf_name = False
main_tree.render('test.png', tree_style=ms)
main_tree.render('test.svg', tree_style=ms)
main_tree.render('test.pdf', tree_style=ms) |
class KiwoomOpenApiPlusManagerApplication(QObjectLogging):
class ConnectionStatus(Enum):
DISCONNECTED = 1
CONNECTED = 2
class ServerType(Enum):
SIMULATION = 1
REAL = 2
UNKNOWN = 3
class RestartType(Enum):
NO_RESTART = 1
RESTART_ONLY = 2
RESTART_AND_RESTORE = 3
RESTART_AND_CONNECT = 4
RESTART_WITH_UPDATE = 5
RESTART_WITH_UPDATE_AND_RESTORE = 6
RESTART_WITH_UPDATE_AND_CONNECT = 7
shouldRestart = Signal(RestartType)
def __init__(self, args=()):
self._args = list(args)
self._argument_parser = KiwoomOpenApiPlusManagerApplicationArgumentParser()
(self._parsed_args, self._remaining_args) = self._argument_parser.parse_known_args(self._args[1:])
self._verbose = self._parsed_args.verbose
set_verbosity(self._verbose)
self._host = self._parsed_args.host
self._port = self._parsed_args.port
self._enable_ssl = self._parsed_args.enable_ssl
self._key_file = self._parsed_args.client_key_file
self._cert_file = self._parsed_args.client_cert_file
self._root_certs_file = self._parsed_args.client_root_certs_file
self.logger.debug('Creating manager application')
self._app = (QApplication.instance() or QApplication((self._args[:1] + self._remaining_args)))
super().__init__()
self._signal_handler = KiwoomOpenApiPlusSignalHandler(self, self)
self._signal_handler.signaled.connect(self._onSignal)
from koapy.backend.kiwoom_open_api_plus.pyside2.KiwoomOpenApiPlusDialogHandler import KiwoomOpenApiPlusDialogHandler
self._dialog_handler = KiwoomOpenApiPlusDialogHandler(self, self)
self._max_workers = config.get_int('koapy.backend.kiwoom_open_api_plus.grpc.client.max_workers', 8)
self._thread_pool = QThreadPool(self)
self._thread_pool.setMaxThreadCount(self._max_workers)
self._thread_pool_executor = QThreadPoolExecutor(self._thread_pool, self)
if self._enable_ssl:
root_certificates = None
if self._root_certs_file:
with open(self._root_certs_file, 'rb') as f:
root_certificates = f.read()
private_key = None
if self._key_file:
with open(self._key_file, 'rb') as f:
private_key = f.read()
certificate_chain = None
if self._cert_file:
with open(self._cert_file, 'rb') as f:
certificate_chain = f.read()
self._credentials = grpc.ssl_channel_credentials(root_certificates=root_certificates, private_key=private_key, certificate_chain=certificate_chain)
else:
self._credentials = None
self._server_process = None
self._client = None
self._reinitializeServerProcessAndGrpcClient()
self.shouldRestart.connect(self._onShouldRestart)
self._tray = self._createSystemTrayIcon()
self._onEventConnect(0)
self._tray.show()
def _closeClient(self):
self._client.close()
def _closeClientIfExists(self):
if (hasattr(self, '_client') and (self._client is not None)):
self._closeClient()
def _closeServerProcess(self):
self._server_process.close()
self._server_process.waitForFinished()
def _closeServerProcessIfExists(self):
if (hasattr(self, '_server_process') and (self._server_process is not None)):
self._closeServerProcess()
def _reinitializeServerProcessAndGrpcClient(self):
self._closeClientIfExists()
self._closeServerProcessIfExists()
self._server_process = KiwoomOpenApiPlusServerApplicationProcess(self._args[1:], self)
self._server_process.start()
self._server_process.waitForStarted()
self._client = KiwoomOpenApiPlusServiceClient(host=self._host, port=self._port, credentials=self._credentials, thread_pool=self._thread_pool_executor)
self._client_timeout = 30
assert self._client.is_ready(self._client_timeout), 'Client is not ready'
self._client.OnEventConnect.connect(self._onEventConnect)
def _createIcon(self):
icon = QIcon()
filePath = Path(__file__)
iconDir = (filePath.parent / '../data/icon/manager').resolve()
def addFilesForMode(icon, iconDir, mode):
icon.addFile(str((iconDir / 'favicon-16x16.png')), QSize(16, 16), mode)
icon.addFile(str((iconDir / 'favicon-32x32.png')), QSize(32, 32), mode)
icon.addFile(str((iconDir / 'apple-touch-icon.png')), QSize(180, 180), mode)
icon.addFile(str((iconDir / 'android-chrome-192x192.png')), QSize(192, 192), mode)
icon.addFile(str((iconDir / 'android-chrome-512x512.png')), QSize(512, 512), mode)
addFilesForMode(icon, (iconDir / 'normal'), QIcon.Normal)
addFilesForMode(icon, (iconDir / 'disabled'), QIcon.Disabled)
addFilesForMode(icon, (iconDir / 'active'), QIcon.Active)
return icon
def _createToolTip(self):
toolTip = 'KOAPY Manager Application'
return toolTip
def _createContextMenu(self):
menu = QMenu()
menu.addSection('Connection')
connectAction = menu.addAction('Login and connect')
connectAction.triggered.connect(self._onConnectActionTriggered)
showAccountWindowAction = menu.addAction('Show account window')
showAccountWindowAction.triggered.connect(self._onShowAccountWindowActionTriggered)
enableAutoLoginAction = menu.addAction('Enable auto login')
enableAutoLoginAction.triggered.connect(self._onEnableAutoLoginActionTriggered)
checkForUpdateAction = menu.addAction('Check for update')
checkForUpdateAction.triggered.connect(self._onCheckForUpdateActionTriggered)
menu.addSection('Status')
text = self._getConnectionStatusText(self.ConnectionStatus.DISCONNECTED)
self._connectionStatusAction = menu.addAction(text)
self._connectionStatusAction.setEnabled(False)
text = self._getServerTypeText(self.ServerType.UNKNOWN)
self._serverStatusAction = menu.addAction(text)
self._serverStatusAction.setEnabled(False)
menu.addSection('Links')
iconDir = (Path(__file__).parent / '../data/icon/external')
iconDir = iconDir.resolve()
icon = QIcon(str((iconDir / 'readthedocs.png')))
documentationAction = menu.addAction(icon, 'Documentation')
documentationAction.triggered.connect(self._openReadTheDocs)
icon = QIcon(str((iconDir / 'github.png')))
githubAction = menu.addAction(icon, 'Github')
githubAction.triggered.connect(self._openGithub)
menu.addSection('Kiwoom Links')
openApiAction = menu.addAction('Kiwoom OpenAPI+ Home')
openApiAction.triggered.connect(self._openOpenApiHome)
openApiAction = menu.addAction('Kiwoom OpenAPI+ Document')
openApiAction.triggered.connect(self._openOpenApiDocument)
qnaAction = menu.addAction('Kiwoom OpenAPI+ Qna')
qnaAction.triggered.connect(self._openOpenApiQna)
menu.addSection('Exit')
restartAction = menu.addAction('Restart')
restartAction.triggered.connect(self._onRestartActionTriggered)
exitAction = menu.addAction('Exit')
exitAction.triggered.connect(self._onExitActionTriggered)
return menu
def _createSystemTrayIcon(self):
tray = QSystemTrayIcon()
self._icon = self._createIcon()
self._tooltip = self._createToolTip()
self._menu = self._createContextMenu()
tray.setIcon(self._icon)
tray.setToolTip(self._tooltip)
tray.setContextMenu(self._menu)
tray.activated.connect(self._onTrayIconActivated)
return tray
def _updateTrayIconMode(self, mode: QIcon.Mode=QIcon.Normal):
icon = QIcon(self._icon.pixmap(16, mode))
self._tray.setIcon(icon)
def _getConnectionStatusText(self, status: ConnectionStatus):
text = {self.ConnectionStatus.DISCONNECTED: 'Status: Disconnected', self.ConnectionStatus.CONNECTED: 'Status: Connected'}[status]
return text
def _updateConnectionStatus(self, status: ConnectionStatus):
text = self._getConnectionStatusText(status)
self._connectionStatusAction.setText(text)
def _getServerTypeText(self, server_type: ServerType):
text = {self.ServerType.SIMULATION: 'Server: Simulation', self.ServerType.REAL: 'Server: Real', self.ServerType.UNKNOWN: 'Server: Unknown'}[server_type]
return text
def _updateServerType(self, server_type: ServerType):
text = self._getServerTypeText(server_type)
self._serverStatusAction.setText(text)
def _onTrayIconActivated(self, reason):
pass
def _onExitActionTriggered(self):
self.exit()
def _onRestartActionTriggered(self):
self._emitShouldRestart(self.RestartType.RESTART_AND_RESTORE)
def _onConnectActionTriggered(self):
self._connect()
def _onShowAccountWindowActionTriggered(self):
self._showAccountWindow()
def _onEnableAutoLoginActionTriggered(self):
self._enableAutoLogin()
def _onCheckForUpdateActionTriggered(self):
self._emitShouldRestart(self.RestartType.RESTART_WITH_UPDATE_AND_RESTORE)
def _openOpenApiHome(self):
openApiHomeUrl = '
url = QUrl(openApiHomeUrl)
QDesktopServices.openUrl(url)
def _openOpenApiDocument(self):
openApiHomeUrl = '
url = QUrl(openApiHomeUrl)
QDesktopServices.openUrl(url)
def _openOpenApiQna(self):
openApiQnaUrl = '
url = QUrl(openApiQnaUrl)
QDesktopServices.openUrl(url)
def _openGithub(self):
githubUrl = '
url = QUrl(githubUrl)
QDesktopServices.openUrl(url)
def _openReadTheDocs(self):
docUrl = '
url = QUrl(docUrl)
QDesktopServices.openUrl(url)
def _ensureConnectedAndThen(self, callback=None):
if (not self._client.IsConnected()):
self.logger.debug('Connecting to OpenAPI server')
self._client.EnsureConnectedAndThen(callback)
def _connect(self):
self._ensureConnectedAndThen()
def _showAccountWindow(self):
self._ensureConnectedAndThen((lambda errcode: self._client.ShowAccountWindow()))
def _enableAutoLogin(self):
self._ensureConnectedAndThen((lambda errcode: self._client.EnsureAutoLoginEnabled()))
def _emitShouldRestart(self, restart_type: Optional[RestartType]=None):
if (restart_type is None):
restart_type = self.RestartType.RESTART_ONLY
self.shouldRestart.emit(restart_type)
def _emitShouldRestartAndConnect(self):
self._emitShouldRestart(self.RestartType.RESTART_AND_CONNECT)
def _onShouldRestart(self, restart_type: RestartType):
self._restart(restart_type)
def _onSignal(self, signal, frame):
self.logger.debug('Received %r for manager application', signal)
self.exit(signal)
def _tryReconnect(self):
now = datetime.datetime.now()
buffer = datetime.timedelta(minutes=5)
target = now
timediff = datetime.timedelta()
maintanance_start_time = now.replace(hour=5, minute=5, second=0, microsecond=0)
maintanance_end_time = now.replace(hour=5, minute=10, second=0, microsecond=0)
maintanance_start_time_sunday = now.replace(hour=4, minute=0, second=0, microsecond=0)
maintanance_end_time_sunday = now.replace(hour=4, minute=30, second=0, microsecond=0)
is_maintanance = ((maintanance_start_time - buffer) < now < (maintanance_end_time + buffer))
is_maintanance_sunday = ((maintanance_start_time_sunday - buffer) < now < (maintanance_end_time_sunday + buffer))
is_sunday = (now.weekday() == 6)
if is_maintanance:
target = (maintanance_end_time + buffer)
elif (is_sunday and is_maintanance_sunday):
target = (maintanance_end_time_sunday + buffer)
timediff = (target - now)
total_seconds = timediff.total_seconds()
if (total_seconds > 0):
self.logger.warning('Connection lost due to maintanance, waiting until %s, then will try to reconnect', target)
timer = Timer(total_seconds, self._emitShouldRestartAndConnect)
timer.start()
else:
self.logger.warning('Connection lost unexpectedly, will try to reconnect right away')
self._emitShouldRestartAndConnect()
def _onEventConnect(self, errcode):
state = self._client.GetConnectState()
if (state == 1):
self._updateTrayIconMode(QIcon.Normal)
self._updateConnectionStatus(self.ConnectionStatus.CONNECTED)
server = self._client.GetServerGubun()
if (server == '1'):
self._updateServerType(self.ServerType.SIMULATION)
else:
self._updateServerType(self.ServerType.REAL)
else:
self._updateTrayIconMode(QIcon.Disabled)
self._updateConnectionStatus(self.ConnectionStatus.DISCONNECTED)
self._updateServerType(self.ServerType.UNKNOWN)
if (errcode == KiwoomOpenApiPlusNegativeReturnCodeError.OP_ERR_SOCKET_CLOSED):
self.logger.error('Socket closed')
self._tryReconnect()
elif (errcode == KiwoomOpenApiPlusNegativeReturnCodeError.OP_ERR_CONNECT):
self.logger.error('Failed to connect')
def _close(self):
self._closeClientIfExists()
self._closeServerProcessIfExists()
def close(self):
return self._close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def _execContext(self):
with ExitStack() as stack:
stack.enter_context(self._signal_handler)
stack.enter_context(self._dialog_handler)
stack.enter_context(self._thread_pool_executor)
stack.enter_context(self)
(yield)
def __getattr__(self, name):
return getattr(self._app, name)
def _exec(self):
with self._execContext():
self.logger.debug('Started manager application')
return self._app.exec_()
def _exit(self, return_code=0):
self.logger.debug('Exiting manager application')
return self._app.exit(return_code)
def _isConnected(self):
return (self._client.is_ready() and self._client.IsConnected())
def _getAPIModulePath(self):
from koapy.backend.kiwoom_open_api_plus.core.KiwoomOpenApiPlusTypeLibSpec import API_MODULE_PATH
module_path = API_MODULE_PATH
return module_path
def _getAutoLoginDatPath(self):
module_path = self._getAPIModulePath()
autologin_dat = ((module_path / 'system') / 'Autologin.dat')
return autologin_dat
def _isAutoLoginEnabled(self):
autologin_dat = self._getAutoLoginDatPath()
return autologin_dat.exists()
def _disableAutoLogin(self):
self.logger.info('Disabling auto login')
autologin_dat = self._getAutoLoginDatPath()
if autologin_dat.exists():
self.logger.info('Removing %s', autologin_dat)
os.remove(autologin_dat)
self.logger.info('Disabled auto login')
return True
else:
self.logger.info('Autologin is already disabled')
return False
def _restart(self, restart_type: Optional[RestartType]=None):
self.logger.debug('Restarting server application')
if (restart_type is None):
restart_type = self.RestartType.RESTART_ONLY
is_connected = self._isConnected()
is_autologin_enabled = self._isAutoLoginEnabled()
should_connect_to_restore = ((restart_type in [self.RestartType.RESTART_AND_RESTORE, self.RestartType.RESTART_WITH_UPDATE_AND_RESTORE]) and is_connected)
should_connect_anyway = (restart_type in [self.RestartType.RESTART_AND_CONNECT, self.RestartType.RESTART_WITH_UPDATE_AND_CONNECT])
should_connect = (should_connect_to_restore or should_connect_anyway)
should_update = (restart_type in [self.RestartType.RESTART_WITH_UPDATE, self.RestartType.RESTART_WITH_UPDATE_AND_RESTORE, self.RestartType.RESTART_WITH_UPDATE_AND_CONNECT])
if should_update:
self._reinitializeServerProcessAndGrpcClient()
if is_autologin_enabled:
self._disableAutoLogin()
self._client.CommConnectAndThen()
is_updated = self._client.HandleVersionUpgradeUsingPywinauto(self._server_process.processId())
if is_updated:
self._reinitializeServerProcessAndGrpcClient()
if is_autologin_enabled:
self.logger.info('Enabling auto login back')
self._enableAutoLogin()
if is_updated:
self.logger.info('Done update, enabled auto login')
else:
self.logger.info('There was no version update, enabled auto login')
else:
self._reinitializeServerProcessAndGrpcClient()
if should_connect:
self.logger.debug('Re-establishing connection')
self._connect()
def exec_(self):
return self._exec()
def exit(self, return_code=0):
return self._exit(return_code)
def restart(self, restart_type: Optional[RestartType]=None):
return self._restart(restart_type)
def execAndExit(self):
code = self.exec_()
sys.exit(code)
def main(cls, args=None):
if (args is None):
args = sys.argv
app = cls(args)
app.execAndExit() |
class SourceMockup(Source):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
print(f'Climetlab SourceMockup : args={args}, kwargs={kwargs}')
super(SourceMockup, self).__init__(**kwargs)
def to_xarray(self, *args, **kwargs):
return TestingDatasetAsXarray(*self.args, **self.kwargs) |
class LegacyWafUpdateStatus(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'completed_at': (str,), 'created_at': (str,), 'data': (str,), 'message': (str,), 'status': (str,), 'updated_at': (str,)}
_property
def discriminator():
return None
attribute_map = {'completed_at': 'completed_at', 'created_at': 'created_at', 'data': 'data', 'message': 'message', 'status': 'status', 'updated_at': 'updated_at'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class TestMimeClient(ClientTestBase):
def test_reload_server_state(self):
data = self._fake_data(num_batches=1, batch_size=10)
clnt = self._get_mime_client(data)
self._test_reload_server_state(clnt)
def test_mime_generate_local_update(self):
data = self._fake_data(num_batches=5, batch_size=10)
clnt = self._get_mime_client(data)
model = utils.SampleNet(utils.TwoFC())
(mime_control_variate, _) = clnt.full_dataset_gradient(model)
server_opt_state = {}
_ = clnt.generate_local_update(message=Message(model=model, mime_control_variate=mime_control_variate, server_opt_state=server_opt_state))
error_msg = utils.verify_models_equivalent_after_training(clnt.ref_model, model)
assertEmpty(error_msg)
error_msg = utils.verify_gradients_equal(clnt.mime_control_variate, mime_control_variate)
assertEmpty(error_msg)
error_msg = utils.verify_optimizer_state_dict_equal(clnt.server_opt_state, server_opt_state)
assertEmpty(error_msg) |
class EmissionFactorTestCase(unittest.TestCase):
def test_emission_factors(self):
expected = {'battery charge': 0, 'battery discharge': 490., 'biomass': 230, 'coal': 820, 'gas': 490, 'geothermal': 38, 'hydro': 24, 'hydro charge': 0, 'hydro discharge': 490., 'nuclear': 12, 'oil': 650, 'solar': 45, 'unknown': 644, 'wind': 11}
self.assertEqual(emission_factors('KR'), expected)
expected = {'battery charge': 0, 'battery discharge': 66., 'biomass': 230.0, 'coal': 968., 'gas': 501.61, 'geothermal': 38, 'hydro': 10.7, 'hydro charge': 0, 'hydro discharge': 66., 'nuclear': 5.13, 'oil': 999.44, 'solar': 30.075, 'unknown': 700, 'wind': 12.62}
self.assertEqual(emission_factors('FR'), expected) |
class WindllHandler(ClipboardHandlerBase):
def _copy(text: str) -> None:
from ctypes.wintypes import BOOL, DWORD, HANDLE, HGLOBAL, HINSTANCE, HMENU, HWND, INT, LPCSTR, LPVOID, UINT
windll = ctypes.windll
msvcrt = ctypes.CDLL('msvcrt')
safeCreateWindowExA = CheckedCall(windll.user32.CreateWindowExA)
safeCreateWindowExA.argtypes = [DWORD, LPCSTR, LPCSTR, DWORD, INT, INT, INT, INT, HWND, HMENU, HINSTANCE, LPVOID]
safeCreateWindowExA.restype = HWND
safeDestroyWindow = CheckedCall(windll.user32.DestroyWindow)
safeDestroyWindow.argtypes = [HWND]
safeDestroyWindow.restype = BOOL
OpenClipboard = windll.user32.OpenClipboard
OpenClipboard.argtypes = [HWND]
OpenClipboard.restype = BOOL
safeCloseClipboard = CheckedCall(windll.user32.CloseClipboard)
safeCloseClipboard.argtypes = []
safeCloseClipboard.restype = BOOL
safeEmptyClipboard = CheckedCall(windll.user32.EmptyClipboard)
safeEmptyClipboard.argtypes = []
safeEmptyClipboard.restype = BOOL
safeGetClipboardData = CheckedCall(windll.user32.GetClipboardData)
safeGetClipboardData.argtypes = [UINT]
safeGetClipboardData.restype = HANDLE
safeSetClipboardData = CheckedCall(windll.user32.SetClipboardData)
safeSetClipboardData.argtypes = [UINT, HANDLE]
safeSetClipboardData.restype = HANDLE
safeGlobalAlloc = CheckedCall(windll.kernel32.GlobalAlloc)
safeGlobalAlloc.argtypes = [UINT, c_size_t]
safeGlobalAlloc.restype = HGLOBAL
safeGlobalLock = CheckedCall(windll.kernel32.GlobalLock)
safeGlobalLock.argtypes = [HGLOBAL]
safeGlobalLock.restype = LPVOID
safeGlobalUnlock = CheckedCall(windll.kernel32.GlobalUnlock)
safeGlobalUnlock.argtypes = [HGLOBAL]
safeGlobalUnlock.restype = BOOL
wcslen = CheckedCall(msvcrt.wcslen)
wcslen.argtypes = [c_wchar_p]
wcslen.restype = UINT
GMEM_MOVEABLE = 2
CF_UNICODETEXT = 13
def window() -> Iterator[HWND]:
hwnd = safeCreateWindowExA(0, b'STATIC', None, 0, 0, 0, 0, 0, None, None, None, None)
try:
(yield hwnd)
finally:
safeDestroyWindow(hwnd)
def clipboard(hwnd: HWND) -> Generator:
t = (time.time() + 0.5)
success = False
while (time.time() < t):
success = OpenClipboard(hwnd)
if success:
break
time.sleep(0.01)
if (not success):
raise RuntimeError('Error calling OpenClipboard')
try:
(yield)
finally:
safeCloseClipboard()
def copy_windows(text: str) -> None:
with window() as hwnd, clipboard(hwnd):
safeEmptyClipboard()
if text:
count = (wcslen(text) + 1)
handle = safeGlobalAlloc(GMEM_MOVEABLE, (count * sizeof(c_wchar)))
locked_handle = safeGlobalLock(handle)
ctypes.memmove(c_wchar_p(locked_handle), c_wchar_p(text), (count * sizeof(c_wchar)))
safeGlobalUnlock(handle)
safeSetClipboardData(CF_UNICODETEXT, handle)
copy_windows(text)
def _is_compatible(self) -> bool:
if (sys.platform != 'win32'):
logger.debug('%s is incompatible on non-Windows systems', self.name)
return False
logger.debug('%s is compatible', self.name)
return True |
(params=[('CG', 2, TensorFunctionSpace), ('BDM', 2, VectorFunctionSpace), ('Regge', 2, FunctionSpace)], ids=(lambda x: f'{x[2].__name__}({x[0]}{x[1]})'))
def tfs(request, parentmesh):
family = request.param[0]
if ((family != 'CG') and (parentmesh.ufl_cell().cellname() != 'triangle') and (parentmesh.ufl_cell().cellname() != 'tetrahedron')):
pytest.skip(f'{family} does not support {parentmesh.ufl_cell()} cells')
if (parentmesh.name == 'immersedsphere'):
if (family == 'Regge'):
pytest.xfail(f'{family} does not give correct point evaluation results on immersed manifolds')
elif (family == 'BDM'):
pytest.xfail(f'{family} cannot yet perform point evaluation on immersed manifolds')
return request.param |
class scan(_coconut_has_iter):
__slots__ = ('func', 'initial')
def __new__(cls, function, iterable, initial=_coconut_sentinel):
self = _coconut.super(scan, cls).__new__(cls, iterable)
self.func = function
self.initial = initial
return self
def __repr__(self):
return ('scan(%r, %s%s)' % (self.func, _coconut.repr(self.iter), ('' if (self.initial is _coconut_sentinel) else (', ' + _coconut.repr(self.initial)))))
def __reduce__(self):
return (self.__class__, (self.func, self.iter, self.initial))
def __copy__(self):
return self.__class__(self.func, self.get_new_iter(), self.initial)
def __iter__(self):
acc = self.initial
if (acc is not _coconut_sentinel):
(yield acc)
for item in self.iter:
if (acc is _coconut_sentinel):
acc = item
else:
acc = self.func(acc, item)
(yield acc)
def __len__(self):
if (not _coconut.isinstance(self.iter, _coconut.abc.Sized)):
return _coconut.NotImplemented
return _coconut.len(self.iter) |
.parametrize(('degree', 'family', 'tdim'), [(1, 'Lagrange', 3), (2, 'Lagrange', 3), (3, 'Lagrange', 3), (0, 'Quadrature', 2), (1, 'Quadrature', 2), (2, 'Quadrature', 2)])
def test_projection_symmetric_tensor(mesh, degree, family, tdim):
shape = (tdim, tdim)
remove = (2 if (tdim == 2) else [3, 6, 7])
if (family == 'Quadrature'):
Nq = ((2 * degree) + 1)
Q = FunctionSpace(mesh, TensorElement(family, mesh.ufl_cell(), degree=Nq, quad_scheme='default', shape=shape, symmetry=None))
Qs = FunctionSpace(mesh, TensorElement(family, mesh.ufl_cell(), degree=Nq, quad_scheme='default', shape=shape, symmetry=True))
sp = {'mat_type': 'matfree', 'ksp_type': 'preonly', 'pc_type': 'jacobi'}
fcp = {'quadrature_degree': Nq}
else:
Q = TensorFunctionSpace(mesh, family, degree=degree, shape=shape, symmetry=None)
Qs = TensorFunctionSpace(mesh, family, degree=degree, shape=shape, symmetry=True)
sp = {'mat_type': 'aij', 'ksp_type': 'preonly', 'pc_type': 'lu'}
fcp = {}
(x, y) = SpatialCoordinate(mesh)
bcomp = [x, y, (x + y)]
b = as_vector(bcomp[:tdim])
G = (((x + y) * Identity(tdim)) + outer(b, b))
P = project(G, Q, solver_parameters=sp, form_compiler_parameters=fcp, use_slate_for_inverse=False)
Ps = project(G, Qs, solver_parameters=sp, form_compiler_parameters=fcp, use_slate_for_inverse=False)
X = np.delete(np.reshape(P.dat.data_ro, ((- 1), Q.value_size)), remove, 1)
assert np.isclose(Ps.dat.data_ro, X).all() |
def print_result(data):
_table = '| %-4s | %-4s | %-30s | %-29s |'
print('\n* Found mice:')
print(('-' * 80))
print((_table % ('VID', 'PID', 'NAME', 'FULL NAME')))
print(('-' * 80))
for (pid, vid, name, full_name, _) in data:
print((_table % (pid, vid, name, full_name)))
print(('-' * 80))
print(('\n==> %i mice found!\n' % len(data))) |
def _tf_odapi_client(image, ip, port, model_name, signature_name='detection_signature', input_name='inputs', timeout=300):
start_time = time()
result = _generic_tf_serving_client(image, ip, port, model_name, signature_name, input_name, timeout)
log('time taken for image shape {} is {} secs'.format(image.shape, (time() - start_time)))
boxes = np.array(result.outputs['detection_boxes'].float_val)
classes = np.array(result.outputs['detection_classes'].float_val)
scores = np.array(result.outputs['detection_scores'].float_val)
boxes = boxes.reshape((len(scores), 4))
classes = np.squeeze(classes.astype(np.int32))
scores = np.squeeze(scores)
return (boxes, classes, scores) |
class OptionPlotoptionsErrorbarSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def youtube_recon(user_name):
if (not user_name):
return None
url = '
r = requests.get(url)
if (b'This channel does not exist.' in r.content):
return None
soup = BeautifulSoup(r.content, 'lxml')
avatar = soup.find('img', class_='channel-header-profile-image').get('src')
try:
title = soup.find('span', id='channel-title').contents
except:
title = None
return {'site': 'YouTube', 'avatar': avatar, 'title': title, 'url': url} |
def test_file_format_getting_python_value():
transformer = TypeEngine.get_transformer(FlyteFile)
ctx = FlyteContext.current_context()
lv = Literal(scalar=Scalar(blob=Blob(metadata=BlobMetadata(type=BlobType(format='txt', dimensionality=0)), uri='file:///tmp/test')))
pv = transformer.to_python_value(ctx, lv, expected_python_type=FlyteFile['txt'])
assert isinstance(pv, FlyteFile)
assert (pv.extension() == 'txt') |
class TestMyRLAgent(GymTestCase):
def test_fit(self):
step_result = ('obs', 'reward', 'done', 'info')
with patch.object(ProxyEnv, 'reset') as mocked_reset:
with patch.object(ProxyEnv, 'step', return_value=step_result) as mocked_step:
with patch.object(ProxyEnv, 'close') as mocked_close:
with patch.object(GoodPriceModel, 'get_price_expectation') as mocked_price_exp:
with patch.object(GoodPriceModel, 'update') as mocked_update:
with patch.object(self.logger, 'log') as mock_logger:
self.my_rl_agent.fit(self.proxy_env, self.nb_steps)
mocked_reset.assert_called_once()
mocked_price_exp.assert_called()
assert (mocked_price_exp.call_count == self.nb_steps)
mocked_step.assert_called()
assert (mocked_step.call_count == self.nb_steps)
mocked_update.assert_called()
assert (mocked_update.call_count == self.nb_steps)
mock_logger.assert_called()
mocked_close.assert_called() |
class CancelCollection(View):
def post(self, request):
nid_list = request.POST.getlist('nid')
request.user.collects.remove(*nid_list)
article_query: QuerySet = Articles.objects.filter(nid__in=nid_list)
article_query.update(collects_count=(F('collects_count') - 1))
return redirect('/backend/') |
.benchmark
def test_birkholz_set_synthesis(fixture_store):
for (i, fix) in enumerate(fixture_store):
print(i, fix)
tot_cycles = 0
converged = 0
bags = fixture_store['results_bag']
for (k, v) in bags.items():
if (not k.startswith('test_birkholz_set')):
continue
print(k)
try:
tot_cycles += v['cycles']
converged += (1 if v['is_converged'] else 0)
for (kk, vv) in v.items():
print('\t', kk, vv)
except KeyError:
print('\tFailed!')
print(f'Total cycles: {tot_cycles}')
print(f'Converged: {converged}/{len(bags)}') |
class OptionSeriesTreemapBreadcrumbsPosition(Options):
def align(self):
return self._config_get('left')
def align(self, text: str):
self._config(text, js_type=False)
def verticalAlign(self):
return self._config_get('top')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get('undefined')
def y(self, num: float):
self._config(num, js_type=False) |
def main():
rule_set = mh_style.get_rules()
rules = [rule() for rules in rule_set.values() for rule in rules]
mandatory_rules = [rule for rule in rules if rule.mandatory]
autofix_rules = [rule for rule in rules if ((not rule.mandatory) and rule.autofix)]
other_rules = [rule for rule in rules if ((not rule.mandatory) and (not rule.autofix))]
lines = []
in_section = False
with open('../docs/style_checker.html', 'r') as fd:
for raw_line in fd:
if ('END HOOK: ' in raw_line):
assert in_section
in_section = False
if ('MANDATORY RULES' in raw_line):
process(lines, mandatory_rules)
elif ('AUTOFIX RULES' in raw_line):
process(lines, autofix_rules)
else:
process(lines, other_rules)
elif ('HOOK: ' in raw_line):
assert (not in_section)
in_section = True
lines.append(raw_line.rstrip())
if (not in_section):
lines.append(raw_line.rstrip())
with open('../docs/style_checker.html', 'w') as fd:
fd.write(('\n'.join(lines) + '\n')) |
class TestDescriptor(Descriptor):
TEST_DESC_UUID = '-1234-5678-1234-56789abcdef2'
def __init__(self, bus, index, characteristic):
Descriptor.__init__(self, bus, index, self.TEST_DESC_UUID, ['read', 'write'], characteristic)
def ReadValue(self, options):
return [dbus.Byte('T'), dbus.Byte('e'), dbus.Byte('s'), dbus.Byte('t')] |
class KeyBindingCtrl(QtGui.QLineEdit):
def __init__(self, editor, parent=None):
super().__init__(parent)
self.setFocusPolicy(QtCore.Qt.FocusPolicy.StrongFocus)
self.setMinimumWidth(160)
self.setMaximumWidth(160)
self.setReadOnly(True)
self.editor = editor
editor.has_focus = False
def keyPressEvent(self, event):
if (event.key() not in (QtCore.Qt.Key.Key_Control, QtCore.Qt.Key.Key_Shift)):
self.editor.key = event
def paintEvent(self, event):
super().paintEvent(event)
if self.editor.has_focus:
w = self.width()
h = self.height()
p = QtGui.QPainter(self)
p.setRenderHint(QtGui.QPainter.RenderHint.Antialiasing, True)
pen = QtGui.QPen(QtGui.QColor('tomato'))
pen.setWidth(2)
p.setPen(pen)
p.drawRect(1, 1, (w - 2), (h - 2))
p.end()
def focusInEvent(self, event):
self.editor.has_focus = True
self.update()
def focusOutEvent(self, event):
self.editor.has_focus = False
self.update()
def mouseDoubleClickEvent(self, event):
self.editor.clear = True |
class NamespaceVersioning(BaseVersioning):
invalid_version_message = _('Invalid version in URL path. Does not match any version namespace.')
def determine_version(self, request, *args, **kwargs):
resolver_match = getattr(request, 'resolver_match', None)
if ((resolver_match is not None) and resolver_match.namespace):
possible_versions = resolver_match.namespace.split(':')
for version in possible_versions:
if self.is_allowed_version(version):
return version
if (not self.is_allowed_version(self.default_version)):
raise exceptions.NotFound(self.invalid_version_message)
return self.default_version
def reverse(self, viewname, args=None, kwargs=None, request=None, format=None, **extra):
if (request.version is not None):
viewname = self.get_versioned_viewname(viewname, request)
return super().reverse(viewname, args, kwargs, request, format, **extra)
def get_versioned_viewname(self, viewname, request):
return ((request.version + ':') + viewname) |
def clean_title(title) -> str:
ostype = platform.system()
if (ostype == 'Linux'):
title = title.replace('/', '_')
elif (ostype == 'Darwin'):
title = title.replace(':', ' ')
title = title.replace('\\', '_').replace('/', '_').replace('|', '_')
elif (ostype == 'Windows'):
title = title.replace('\\', '_').replace('/', '_').replace('|', '_')
title = title.replace('<', '-').replace('>', '-').replace(':', ' ')
title = title.replace('?', '').replace('"', '').replace('*', '')
title = title.replace('\n', '')
return title |
class OptionSeriesPictorialSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class OptionSeriesDependencywheelLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
def get_registry_event_details(io, metadata, event, extra_detail_io):
path_info = read_detail_string_info(io)
details_info = dict()
if (event.operation in [RegistryOperation.RegLoadKey.name, RegistryOperation.RegRenameKey.name]):
details_info['new_path_info'] = read_detail_string_info(io)
extra_detail_io = io
elif (event.operation in [RegistryOperation.RegOpenKey.name, RegistryOperation.RegCreateKey.name]):
io.seek(2, 1)
details_info['desired_access'] = read_u32(io)
elif (event.operation in [RegistryOperation.RegQueryKey.name, RegistryOperation.RegQueryValue.name]):
io.seek(2, 1)
details_info['length'] = read_u32(io)
details_info['information_class'] = read_u32(io)
elif (event.operation in [RegistryOperation.RegEnumValue.name, RegistryOperation.RegEnumKey.name]):
io.seek(2, 1)
details_info['length'] = read_u32(io)
details_info['index'] = read_u32(io)
details_info['information_class'] = read_u32(io)
elif (event.operation == RegistryOperation.RegSetInfoKey.name):
io.seek(2, 1)
details_info['key_set_information_class'] = read_u32(io)
io.seek(4, 1)
details_info['length'] = read_u16(io)
io.seek(2, 1)
extra_detail_io = io
elif (event.operation == RegistryOperation.RegSetValue.name):
io.seek(2, 1)
details_info['reg_type'] = read_u32(io)
details_info['length'] = read_u32(io)
details_info['data_length'] = read_u32(io)
extra_detail_io = io
event.path = read_detail_string(io, path_info)
if (metadata.should_get_details and (event.operation in RegistryExtraDetailsHandler)):
RegistryExtraDetailsHandler[event.operation](metadata, event, extra_detail_io, details_info) |
class OptionSeriesScatter3dDataDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('rgba(0, 0, 0, 0.6)')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(901)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class OptionSeriesTreemapSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def benchmark():
collect_statistics(True)
baseurls = [CML_BASEURL_S3, CML_BASEURL_CDS]
requests = [{'param': 'r', 'time': '1000', 'step': '0'}, {'param': 'r', 'time': '1000'}, {'param': 'r', 'time': ['1100', '1200', '1300', '1400']}, {'param': ['r', 'z'], 'time': ['0200', '1000', '1800', '2300'], 'levelist': ['500', '850']}, {'param': ['r', 'z'], 'levelist': ['500', '850']}, {'param': 'r'}, {'param': ['r', 'z', 't']}]
methods = get_methods_list()
failed = []
successfull = 0
import tqdm
from climetlab.indexing import PerUrlIndex
for request in tqdm.tqdm(requests):
for range_method in tqdm.tqdm(methods):
for baseurl in baseurls:
index = PerUrlIndex(f'{baseurl}/test-data/input/indexed-urls/large_grib_1.grb')
try:
retrieve_and_check(index, request, range_method, force=True)
successfull += 1
except Exception as e:
failed.append((index, request, range_method))
print('FAILED for ', index, request, range_method)
print(e)
stats = retrieve_statistics()
run_id = get_run_id()
logfiles = []
path = f'climetlab_benchmark{run_id}.json'
logfiles.append(path)
stats.write_to_json(path)
print(f'BENCHMARK FINISHED. Raw logs saved in {path}')
df = stats.to_pandas()
df['server'] = df['url'].apply(url_to_server)
df['speed'] = ((df['total'] / df['elapsed']) / (1024 * 1024))
df['method'] = df['full_method'].apply(radix)
df = df.rename(dict(size_parts='size_requested', size_blocks='size_downloaded'))
df['size_ratio'] = (df['size_downloaded'] / df['size_requested'])
path = f'climetlab_benchmark{run_id}.csv'
df.to_csv(path)
logfiles.append(path)
print(f'Benchmark finished ({successfull} successfull, {len(failed)} failed).')
print('All data in the log files are anonymous.Only the log file names contain personal data (machine name, IP, etc.).')
for f in logfiles:
print(f'Log file: {f}') |
def draw_scipy_ui(context: bpy.types.Context, layout: bpy.types.UILayout):
numpy_is_installed = moduleutil.is_installed('numpy')
scipy_is_installed = moduleutil.is_installed('scipy')
col = layout.column(align=True)
if scipy_is_installed:
col.label(text='SciPy is already installed', icon='INFO')
else:
col.label(text='SciPy is not installed', icon='ERROR')
row = col.row()
row.enabled = (numpy_is_installed and (not scipy_is_installed))
op = row.operator(operators.SCRIPT_OT_install_module.bl_idname, text='Install SciPy')
op.name = 'scipy'
op.options = '--no-deps'
op.reload_scripts = True |
class LevelModel(proteus.Transport.OneLevelTransport):
nCalls = 0
def __init__(self, uDict, phiDict, testSpaceDict, matType, dofBoundaryConditionsDict, dofBoundaryConditionsSetterDict, coefficients, elementQuadrature, elementBoundaryQuadrature, fluxBoundaryConditionsDict=None, advectiveFluxBoundaryConditionsSetterDict=None, diffusiveFluxBoundaryConditionsSetterDictDict=None, stressTraceBoundaryConditionsSetterDict=None, stabilization=None, shockCapturing=None, conservativeFluxDict=None, numericalFluxType=None, TimeIntegrationClass=None, massLumping=False, reactionLumping=False, options=None, name='defaultName', reuse_trial_and_test_quadrature=True, sd=True, movingDomain=False, bdyNullSpace=False):
self.bdyNullSpace = bdyNullSpace
self.useConstantH = False
from proteus import Comm
self.movingDomain = movingDomain
self.tLast_mesh = None
self.name = name
self.sd = sd
self.Hess = False
self.lowmem = True
self.timeTerm = True
self.testIsTrial = True
self.phiTrialIsTrial = True
self.u = uDict
self.ua = {}
self.phi = phiDict
self.dphi = {}
self.matType = matType
self.reuse_test_trial_quadrature = reuse_trial_and_test_quadrature
if self.reuse_test_trial_quadrature:
for ci in range(1, coefficients.nc):
assert (self.u[ci].femSpace.__class__.__name__ == self.u[0].femSpace.__class__.__name__), 'to reuse_test_trial_quad all femSpaces must be the same!'
self.mesh = self.u[0].femSpace.mesh
self.testSpace = testSpaceDict
self.dirichletConditions = dofBoundaryConditionsDict
self.dirichletNodeSetList = None
self.coefficients = coefficients
self.coefficients.initializeMesh(self.mesh)
self.nc = self.coefficients.nc
self.stabilization = stabilization
self.shockCapturing = shockCapturing
self.conservativeFlux = conservativeFluxDict
self.fluxBoundaryConditions = fluxBoundaryConditionsDict
self.advectiveFluxBoundaryConditionsSetterDict = advectiveFluxBoundaryConditionsSetterDict
self.diffusiveFluxBoundaryConditionsSetterDictDict = diffusiveFluxBoundaryConditionsSetterDictDict
self.stabilizationIsNonlinear = False
if (self.stabilization is not None):
for ci in range(self.nc):
if (ci in coefficients.mass):
for flag in list(coefficients.mass[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.advection):
for flag in list(coefficients.advection[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.diffusion):
for diffusionDict in list(coefficients.diffusion[ci].values()):
for flag in list(diffusionDict.values()):
if (flag != 'constant'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.potential):
for flag in list(coefficients.potential[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.reaction):
for flag in list(coefficients.reaction[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.hamiltonian):
for flag in list(coefficients.hamiltonian[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
self.elementBoundaryIntegrals = {}
for ci in range(self.nc):
self.elementBoundaryIntegrals[ci] = ((self.conservativeFlux is not None) or (numericalFluxType is not None) or (self.fluxBoundaryConditions[ci] == 'outFlow') or (self.fluxBoundaryConditions[ci] == 'mixedFlow') or (self.fluxBoundaryConditions[ci] == 'setFlow'))
self.nSpace_global = self.u[0].femSpace.nSpace_global
self.nDOF_trial_element = [u_j.femSpace.max_nDOF_element for u_j in list(self.u.values())]
self.nDOF_phi_trial_element = [phi_k.femSpace.max_nDOF_element for phi_k in list(self.phi.values())]
self.n_phi_ip_element = [phi_k.femSpace.referenceFiniteElement.interpolationConditions.nQuadraturePoints for phi_k in list(self.phi.values())]
self.nDOF_test_element = [femSpace.max_nDOF_element for femSpace in list(self.testSpace.values())]
self.nFreeDOF_global = [dc.nFreeDOF_global for dc in list(self.dirichletConditions.values())]
self.nVDOF_element = sum(self.nDOF_trial_element)
self.nFreeVDOF_global = sum(self.nFreeDOF_global)
NonlinearEquation.__init__(self, self.nFreeVDOF_global)
elementQuadratureDict = {}
elemQuadIsDict = isinstance(elementQuadrature, dict)
if elemQuadIsDict:
for I in self.coefficients.elementIntegralKeys:
if (I in elementQuadrature):
elementQuadratureDict[I] = elementQuadrature[I]
else:
elementQuadratureDict[I] = elementQuadrature['default']
else:
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[I] = elementQuadrature
if (self.stabilization is not None):
for I in self.coefficients.elementIntegralKeys:
if elemQuadIsDict:
if (I in elementQuadrature):
elementQuadratureDict[(('stab',) + I[1:])] = elementQuadrature[I]
else:
elementQuadratureDict[(('stab',) + I[1:])] = elementQuadrature['default']
else:
elementQuadratureDict[(('stab',) + I[1:])] = elementQuadrature
if (self.shockCapturing is not None):
for ci in self.shockCapturing.components:
if elemQuadIsDict:
if (('numDiff', ci, ci) in elementQuadrature):
elementQuadratureDict[('numDiff', ci, ci)] = elementQuadrature[('numDiff', ci, ci)]
else:
elementQuadratureDict[('numDiff', ci, ci)] = elementQuadrature['default']
else:
elementQuadratureDict[('numDiff', ci, ci)] = elementQuadrature
if massLumping:
for ci in list(self.coefficients.mass.keys()):
elementQuadratureDict[('m', ci)] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[(('stab',) + I[1:])] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
if reactionLumping:
for ci in list(self.coefficients.mass.keys()):
elementQuadratureDict[('r', ci)] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[(('stab',) + I[1:])] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
elementBoundaryQuadratureDict = {}
if isinstance(elementBoundaryQuadrature, dict):
for I in self.coefficients.elementBoundaryIntegralKeys:
if (I in elementBoundaryQuadrature):
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature[I]
else:
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature['default']
else:
for I in self.coefficients.elementBoundaryIntegralKeys:
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature
(self.elementQuadraturePoints, self.elementQuadratureWeights, self.elementQuadratureRuleIndeces) = Quadrature.buildUnion(elementQuadratureDict)
self.nQuadraturePoints_element = self.elementQuadraturePoints.shape[0]
self.nQuadraturePoints_global = (self.nQuadraturePoints_element * self.mesh.nElements_global)
(self.elementBoundaryQuadraturePoints, self.elementBoundaryQuadratureWeights, self.elementBoundaryQuadratureRuleIndeces) = Quadrature.buildUnion(elementBoundaryQuadratureDict)
self.nElementBoundaryQuadraturePoints_elementBoundary = self.elementBoundaryQuadraturePoints.shape[0]
self.nElementBoundaryQuadraturePoints_global = ((self.mesh.nElements_global * self.mesh.nElementBoundaries_element) * self.nElementBoundaryQuadraturePoints_elementBoundary)
self.q = {}
self.ebq = {}
self.ebq_global = {}
self.ebqe = {}
self.phi_ip = {}
self.q['x'] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element, 3), 'd')
self.ebqe['x'] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary, 3), 'd')
self.q[('u', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('grad(u)', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element, self.nSpace_global), 'd')
self.q[('r', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.ebqe[('u', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('grad(u)', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary, self.nSpace_global), 'd')
self.points_elementBoundaryQuadrature = set()
self.scalars_elementBoundaryQuadrature = set([('u', ci) for ci in range(self.nc)])
self.vectors_elementBoundaryQuadrature = set()
self.tensors_elementBoundaryQuadrature = set()
log(memory('element and element boundary Jacobians', 'OneLevelTransport'), level=4)
self.inflowBoundaryBC = {}
self.inflowBoundaryBC_values = {}
self.inflowFlux = {}
for cj in range(self.nc):
self.inflowBoundaryBC[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global,), 'i')
self.inflowBoundaryBC_values[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nDOF_trial_element[cj]), 'd')
self.inflowFlux[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.internalNodes = set(range(self.mesh.nNodes_global))
for ebNE in range(self.mesh.nExteriorElementBoundaries_global):
ebN = self.mesh.exteriorElementBoundariesArray[ebNE]
eN_global = self.mesh.elementBoundaryElementsArray[(ebN, 0)]
ebN_element = self.mesh.elementBoundaryLocalElementBoundariesArray[(ebN, 0)]
for i in range(self.mesh.nNodes_element):
if (i != ebN_element):
I = self.mesh.elementNodesArray[(eN_global, i)]
self.internalNodes -= set([I])
self.nNodes_internal = len(self.internalNodes)
self.internalNodesArray = np.zeros((self.nNodes_internal,), 'i')
for (nI, n) in enumerate(self.internalNodes):
self.internalNodesArray[nI] = n
del self.internalNodes
self.internalNodes = None
log('Updating local to global mappings', 2)
self.updateLocal2Global()
log('Building time integration object', 2)
log(memory('inflowBC, internalNodes,updateLocal2Global', 'OneLevelTransport'), level=4)
if (self.stabilization and self.stabilization.usesGradientStabilization):
self.timeIntegration = TimeIntegrationClass(self, integrateInterpolationPoints=True)
else:
self.timeIntegration = TimeIntegrationClass(self)
if (options is not None):
self.timeIntegration.setFromOptions(options)
log(memory('TimeIntegration', 'OneLevelTransport'), level=4)
log('Calculating numerical quadrature formulas', 2)
self.calculateQuadrature()
self.setupFieldStrides()
comm = Comm.get()
self.comm = comm
if (comm.size() > 1):
assert ((numericalFluxType is not None) and numericalFluxType.useWeakDirichletConditions), 'You must use a numerical flux to apply weak boundary conditions for parallel runs'
log(memory('stride+offset', 'OneLevelTransport'), level=4)
if (numericalFluxType is not None):
if ((options is None) or (options.periodicDirichletConditions is None)):
self.numericalFlux = numericalFluxType(self, dofBoundaryConditionsSetterDict, advectiveFluxBoundaryConditionsSetterDict, diffusiveFluxBoundaryConditionsSetterDictDict)
else:
self.numericalFlux = numericalFluxType(self, dofBoundaryConditionsSetterDict, advectiveFluxBoundaryConditionsSetterDict, diffusiveFluxBoundaryConditionsSetterDictDict, options.periodicDirichletConditions)
else:
self.numericalFlux = None
if ('penalty' in self.ebq_global):
for ebN in range(self.mesh.nElementBoundaries_global):
for k in range(self.nElementBoundaryQuadraturePoints_elementBoundary):
self.ebq_global['penalty'][(ebN, k)] = old_div(self.numericalFlux.penalty_constant, (self.mesh.elementBoundaryDiametersArray[ebN] ** self.numericalFlux.penalty_power))
if ('penalty' in self.ebqe):
for ebNE in range(self.mesh.nExteriorElementBoundaries_global):
ebN = self.mesh.exteriorElementBoundariesArray[ebNE]
for k in range(self.nElementBoundaryQuadraturePoints_elementBoundary):
self.ebqe['penalty'][(ebNE, k)] = old_div(self.numericalFlux.penalty_constant, (self.mesh.elementBoundaryDiametersArray[ebN] ** self.numericalFlux.penalty_power))
log(memory('numericalFlux', 'OneLevelTransport'), level=4)
self.elementEffectiveDiametersArray = self.mesh.elementInnerDiametersArray
from proteus import PostProcessingTools
self.velocityPostProcessor = PostProcessingTools.VelocityPostProcessingChooser(self)
log(memory('velocity postprocessor', 'OneLevelTransport'), level=4)
from proteus import Archiver
self.elementQuadratureDictionaryWriter = Archiver.XdmfWriter()
self.elementBoundaryQuadratureDictionaryWriter = Archiver.XdmfWriter()
self.exteriorElementBoundaryQuadratureDictionaryWriter = Archiver.XdmfWriter()
self.globalResidualDummy = None
compKernelFlag = 0
self.elementDiameter = self.mesh.elementDiametersArray
self.presinit = cPresInit.PresInit(self.nSpace_global, self.nQuadraturePoints_element, self.u[0].femSpace.elementMaps.localFunctionSpace.dim, self.u[0].femSpace.referenceFiniteElement.localFunctionSpace.dim, self.testSpace[0].referenceFiniteElement.localFunctionSpace.dim, self.nElementBoundaryQuadraturePoints_elementBoundary, compKernelFlag)
def calculateCoefficients(self):
pass
def calculateElementResidual(self):
if (self.globalResidualDummy is not None):
self.getResidual(self.u[0].dof, self.globalResidualDummy)
def getResidual(self, u, r):
import pdb
import copy
r.fill(0.0)
self.setUnknowns(u)
argsDict = cArgumentsDict.ArgumentsDict()
argsDict['mesh_trial_ref'] = self.u[0].femSpace.elementMaps.psi
argsDict['mesh_grad_trial_ref'] = self.u[0].femSpace.elementMaps.grad_psi
argsDict['mesh_dof'] = self.mesh.nodeArray
argsDict['mesh_l2g'] = self.mesh.elementNodesArray
argsDict['dV_ref'] = self.elementQuadratureWeights[('u', 0)]
argsDict['u_trial_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_trial_ref'] = self.u[0].femSpace.grad_psi
argsDict['u_test_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_test_ref'] = self.u[0].femSpace.grad_psi
argsDict['mesh_trial_trace_ref'] = self.u[0].femSpace.elementMaps.psi_trace
argsDict['mesh_grad_trial_trace_ref'] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict['dS_ref'] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict['u_trial_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_trial_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['u_test_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_test_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['normal_ref'] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict['boundaryJac_ref'] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict['nElements_global'] = self.mesh.nElements_global
argsDict['useMetrics'] = self.coefficients.useMetrics
argsDict['epsFactHeaviside'] = self.coefficients.epsFactHeaviside
argsDict['epsFactDirac'] = self.coefficients.epsFactDirac
argsDict['epsFactDiffusion'] = self.coefficients.epsFactDiffusion
argsDict['u_l2g'] = self.u[0].femSpace.dofMap.l2g
argsDict['elementDiameter'] = self.elementDiameter
argsDict['nodeDiametersArray'] = self.mesh.nodeDiametersArray
argsDict['u_dof'] = self.u[0].dof
argsDict['q_phi'] = self.coefficients.q_u_ls
argsDict['q_normal_phi'] = self.coefficients.q_n_ls
argsDict['ebqe_phi'] = self.coefficients.ebqe_u_ls
argsDict['ebqe_normal_phi'] = self.coefficients.ebqe_n_ls
argsDict['q_H'] = self.coefficients.q_H_vof
argsDict['q_u'] = self.q[('u', 0)]
argsDict['q_n'] = self.q[('grad(u)', 0)]
argsDict['ebqe_u'] = self.ebqe[('u', 0)]
argsDict['ebqe_n'] = self.ebqe[('grad(u)', 0)]
argsDict['q_r'] = self.q[('r', 0)]
argsDict['q_vos'] = self.coefficients.q_vos
argsDict['offset_u'] = self.offset[0]
argsDict['stride_u'] = self.stride[0]
argsDict['globalResidual'] = r
argsDict['nExteriorElementBoundaries_global'] = self.mesh.nExteriorElementBoundaries_global
argsDict['exteriorElementBoundariesArray'] = self.mesh.exteriorElementBoundariesArray
argsDict['elementBoundaryElementsArray'] = self.mesh.elementBoundaryElementsArray
argsDict['elementBoundaryLocalElementBoundariesArray'] = self.mesh.elementBoundaryLocalElementBoundariesArray
self.presinit.calculateResidual(argsDict)
log('Global residual', level=9, data=r)
self.coefficients.massConservationError = fabs(globalSum(sum(r.flat[:self.mesh.nNodes_owned])))
assert (self.coefficients.massConservationError == fabs(globalSum(r[:self.mesh.nNodes_owned].sum())))
log(' Mass Conservation Error', level=3, data=self.coefficients.massConservationError)
self.nonlinear_function_evaluations += 1
if (self.globalResidualDummy is None):
self.globalResidualDummy = np.zeros(r.shape, 'd')
def getJacobian(self, jacobian):
cfemIntegrals.zeroJacobian_CSR(self.nNonzerosInJacobian, jacobian)
argsDict = cArgumentsDict.ArgumentsDict()
argsDict['mesh_trial_ref'] = self.u[0].femSpace.elementMaps.psi
argsDict['mesh_grad_trial_ref'] = self.u[0].femSpace.elementMaps.grad_psi
argsDict['mesh_dof'] = self.mesh.nodeArray
argsDict['mesh_l2g'] = self.mesh.elementNodesArray
argsDict['dV_ref'] = self.elementQuadratureWeights[('u', 0)]
argsDict['u_trial_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_trial_ref'] = self.u[0].femSpace.grad_psi
argsDict['u_test_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_test_ref'] = self.u[0].femSpace.grad_psi
argsDict['mesh_trial_trace_ref'] = self.u[0].femSpace.elementMaps.psi_trace
argsDict['mesh_grad_trial_trace_ref'] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict['dS_ref'] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict['u_trial_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_trial_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['u_test_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_test_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['normal_ref'] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict['boundaryJac_ref'] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict['nElements_global'] = self.mesh.nElements_global
argsDict['useMetrics'] = self.coefficients.useMetrics
argsDict['epsFactHeaviside'] = self.coefficients.epsFactHeaviside
argsDict['epsFactDirac'] = self.coefficients.epsFactDirac
argsDict['epsFactDiffusion'] = self.coefficients.epsFactDiffusion
argsDict['u_l2g'] = self.u[0].femSpace.dofMap.l2g
argsDict['elementDiameter'] = self.elementDiameter
argsDict['nodeDiametersArray'] = self.mesh.nodeDiametersArray
argsDict['u_dof'] = self.u[0].dof
argsDict['q_phi'] = self.coefficients.q_u_ls
argsDict['q_normal_phi'] = self.coefficients.q_n_ls
argsDict['q_H'] = self.coefficients.q_H_vof
argsDict['q_vos'] = self.coefficients.q_vos
argsDict['csrRowIndeces_u_u'] = self.csrRowIndeces[(0, 0)]
argsDict['csrColumnOffsets_u_u'] = self.csrColumnOffsets[(0, 0)]
argsDict['globalJacobian'] = jacobian.getCSRrepresentation()[2]
self.presinit.calculateJacobian(argsDict)
log('Jacobian ', level=10, data=jacobian)
self.nonlinear_function_jacobian_evaluations += 1
return jacobian
def calculateElementQuadrature(self):
self.u[0].femSpace.elementMaps.getBasisValuesIP(self.u[0].femSpace.referenceFiniteElement.interpolationConditions.quadraturePointArray)
self.u[0].femSpace.elementMaps.getBasisValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.elementMaps.getBasisGradientValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.getBasisValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.getBasisGradientValuesRef(self.elementQuadraturePoints)
self.coefficients.initializeElementQuadrature(self.timeIntegration.t, self.q)
if (self.stabilization is not None):
self.stabilization.initializeElementQuadrature(self.mesh, self.timeIntegration.t, self.q)
self.stabilization.initializeTimeIntegration(self.timeIntegration)
if (self.shockCapturing is not None):
self.shockCapturing.initializeElementQuadrature(self.mesh, self.timeIntegration.t, self.q)
def calculateElementBoundaryQuadrature(self):
pass
def calculateExteriorElementBoundaryQuadrature(self):
self.u[0].femSpace.elementMaps.getBasisValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.elementMaps.getBasisGradientValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.getBasisValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.getBasisGradientValuesTraceRef(self.elementBoundaryQuadraturePoints)
def estimate_mt(self):
pass
def calculateAuxiliaryQuantitiesAfterStep(self):
pass
def calculateSolutionAtQuadrature(self):
pass
def updateAfterMeshMotion(self):
pass |
def anonymize_field(field: Dict[(str, Any)], named_schemas: NamedSchemas) -> Dict[(str, Any)]:
parsed_field: Dict[(str, Any)] = {}
if ('doc' in field):
parsed_field['doc'] = _md5(field['doc'])
if ('aliases' in field):
parsed_field['aliases'] = [_md5(alias) for alias in field['aliases']]
if ('default' in field):
parsed_field['default'] = field['default']
parsed_field['name'] = _md5(field['name'])
parsed_field['type'] = _anonymize_schema(field['type'], named_schemas)
return parsed_field |
class Conv2dBiasAct(Module):
def __init__(self, op_name, in_channels, out_channels, kernel_size, stride, padding=0, dilation=1, groups=1, dtype='float16'):
super().__init__()
self.weight = Parameter(shape=[out_channels, kernel_size, kernel_size, (in_channels // groups)], dtype=dtype)
self.bias = Parameter(shape=[out_channels], dtype=dtype)
op_func = getattr(ops, op_name)
self.op = op_func(stride=stride, pad=padding, dilate=dilation, group=groups)
def forward(self, *args):
assert (len(args) == 1)
x = args[0]
return self.op(x, self.weight.tensor(), self.bias.tensor()) |
def force_print(is_distributed: bool, *args, **kwargs) -> None:
if is_distributed:
try:
device_info = f' [device:{torch.cuda.current_device()}]'
print(*args, device_info, **kwargs, force=True)
except TypeError:
pass
else:
print(*args, **kwargs) |
def to_rnn_dict_space_environment(env: str, rnn_steps: int) -> GymMazeEnv:
env = GymMazeEnv(env=env)
if (rnn_steps > 1):
stack_config = [{'observation': 'observation', 'keep_original': False, 'tag': None, 'delta': False, 'stack_steps': rnn_steps}]
env = ObservationStackWrapper.wrap(env, stack_config=stack_config)
return env |
def test_perturbation_medium():
pp_real = td.ParameterPerturbation(heat=td.LinearHeatPerturbation(coeff=(- 0.01), temperature_ref=300, temperature_range=(200, 500)))
pp_complex = td.ParameterPerturbation(heat=td.LinearHeatPerturbation(coeff=0.01j, temperature_ref=300, temperature_range=(200, 500)), charge=td.LinearChargePerturbation(electron_coeff=(- 1e-21), electron_ref=0, electron_range=(0, 1e+20), hole_coeff=(- 2e-21), hole_ref=0, hole_range=(0, 5e+19)))
coords = dict(x=[1, 2], y=[3, 4], z=[5, 6])
temperature = td.SpatialDataArray((300 * np.ones((2, 2, 2))), coords=coords)
electron_density = td.SpatialDataArray((1e+18 * np.ones((2, 2, 2))), coords=coords)
hole_density = td.SpatialDataArray((2e+18 * np.ones((2, 2, 2))), coords=coords)
pmed = td.PerturbationMedium(permittivity=3, permittivity_perturbation=pp_real)
cmed = pmed.perturbed_copy()
assert isinstance(cmed, td.Medium)
cmed = pmed.perturbed_copy(temperature, electron_density)
cmed = pmed.perturbed_copy(temperature, electron_density, hole_density)
assert (cmed.name == pmed.name)
assert (cmed.frequency_range == pmed.frequency_range)
assert (cmed.subpixel == pmed.subpixel)
assert (cmed.allow_gain == pmed.allow_gain)
with pytest.raises(pydantic.ValidationError):
_ = pmed.perturbed_copy((2 * temperature))
pmed = td.PerturbationMedium(conductivity_perturbation=pp_real, subpixel=False)
cmed = pmed.perturbed_copy((0.9 * temperature))
assert (not cmed.subpixel)
with pytest.raises(pydantic.ValidationError):
_ = pmed.perturbed_copy((1.1 * temperature))
pmed = td.PerturbationMedium(conductivity_perturbation=pp_real, allow_gain=True)
_ = pmed.perturbed_copy((1.1 * temperature))
with pytest.raises(pydantic.ValidationError):
pmed = td.PerturbationMedium(permittivity=3, permittivity_perturbation=pp_complex)
pmed = td.PerturbationPoleResidue(poles=[(1j, 3), (2j, 4)], poles_perturbation=[(None, pp_real), (pp_complex, None)], subpixel=False, allow_gain=True)
cmed = pmed.perturbed_copy()
assert isinstance(cmed, td.PoleResidue)
cmed = pmed.perturbed_copy(temperature, None, hole_density)
cmed = pmed.perturbed_copy(temperature, electron_density, hole_density)
assert (cmed.name == pmed.name)
assert (cmed.frequency_range == pmed.frequency_range)
assert (cmed.subpixel == pmed.subpixel)
assert (cmed.allow_gain == pmed.allow_gain)
with pytest.raises(pydantic.ValidationError):
pmed = td.PerturbationPoleResidue(poles=[(1j, 3), (2j, 4)], poles_perturbation=[(None, pp_real)]) |
def _validate_wh_allocation(warehouse_allocation: WHAllocation):
if (not warehouse_allocation):
return
so_codes = list(warehouse_allocation.keys())
so_item_data = frappe.db.sql('\n\t\t\tselect item_code, sum(qty) as qty, parent as sales_order\n\t\t\tfrom `tabSales Order Item`\n\t\t\twhere\n\t\t\t\tparent in %s\n\t\t\tgroup by parent, item_code', (so_codes,), as_dict=True)
expected_item_qty = {}
for item in so_item_data:
expected_item_qty.setdefault(item.sales_order, {})[item.item_code] = item.qty
for (order, item_details) in warehouse_allocation.items():
item_wise_qty = defaultdict(int)
for item in item_details:
item_wise_qty[item['item_code']] += 1
for (item_code, total_qty) in item_wise_qty.items():
expected_qty = expected_item_qty.get(order, {}).get(item_code)
if (abs((total_qty - expected_qty)) > 0.1):
msg = _('Mismatch in quantity for order {}, item {} exepcted {} qty, received {}').format(order, item_code, expected_qty, total_qty)
frappe.throw(msg) |
def parse_args(cli_args):
parser = argparse.ArgumentParser(description='Running sagemaker task')
(args, unknowns) = parser.parse_known_args(cli_args)
flyte_cmd = []
env_vars = {}
i = 0
while (i < len(unknowns)):
unknown = unknowns[i]
logging.info(f'Processing argument {unknown}')
if (unknown.startswith(FLYTE_CMD_PREFIX) and unknown.endswith(FLYTE_ARG_SUFFIX)):
processed = unknown[len(FLYTE_CMD_PREFIX):][:(- len(FLYTE_ARG_SUFFIX))]
parts = processed.split('_', maxsplit=1)
flyte_cmd.append((parts[0], parts[1]))
i += 1
elif (unknown.startswith(FLYTE_ENV_VAR_PREFIX) and unknown.endswith(FLYTE_ARG_SUFFIX)):
processed = unknown[len(FLYTE_ENV_VAR_PREFIX):][:(- len(FLYTE_ARG_SUFFIX))]
i += 1
if (unknowns[i].startswith(FLYTE_ARG_PREFIX) is False):
env_vars[processed] = unknowns[i]
i += 1
else:
i += 1
return (flyte_cmd, env_vars) |
class RealSupportDist(dist.Distribution):
has_enumerate_support = False
support = dist.constraints.real
has_rsample = True
arg_constraints = {}
def rsample(self, sample_shape):
return torch.zeros(sample_shape)
def log_prob(self, value):
return torch.zeros(value.shape) |
def miller_loop(Q: Point2D[FQ12], P: Point2D[FQ12]) -> FQ12:
if ((Q is None) or (P is None)):
return FQ12.one()
R = Q
f = FQ12.one()
for i in range(log_ate_loop_count, (- 1), (- 1)):
f = ((f * f) * linefunc(R, R, P))
R = double(R)
if (ate_loop_count & (2 ** i)):
f = (f * linefunc(R, Q, P))
R = add(R, Q)
Q1 = ((Q[0] ** field_modulus), (Q[1] ** field_modulus))
nQ2 = ((Q1[0] ** field_modulus), (- (Q1[1] ** field_modulus)))
f = (f * linefunc(R, Q1, P))
R = add(R, Q1)
f = (f * linefunc(R, nQ2, P))
return (f ** (((field_modulus ** 12) - 1) // curve_order)) |
class ComponentTestB(Component):
name = 'test_b'
my_int = DBField(default=1)
my_list = DBField(default=[])
default_tag = TagField(default='initial_value')
single_tag = TagField(enforce_single=True)
multiple_tags = TagField()
default_single_tag = TagField(default='initial_value', enforce_single=True) |
class SetVlanPcp(BaseModifyPacketTest):
def runTest(self):
actions = [ofp.action.set_field(ofp.oxm.vlan_pcp(2))]
pkt = simple_tcp_packet(dl_vlan_enable=True, vlan_pcp=1)
exp_pkt = simple_tcp_packet(dl_vlan_enable=True, vlan_pcp=2)
self.verify_modify(actions, pkt, exp_pkt) |
def plot_roc_auc(*, curr_roc_curve: dict, ref_roc_curve: Optional[dict], color_options: ColorOptions) -> List[Tuple[(str, BaseWidgetInfo)]]:
additional_plots = []
cols = 1
subplot_titles = ['']
current_color = color_options.get_current_data_color()
reference_color = color_options.get_reference_data_color()
if (ref_roc_curve is not None):
cols = 2
subplot_titles = ['current', 'reference']
for label in curr_roc_curve.keys():
fig = make_subplots(rows=1, cols=cols, subplot_titles=subplot_titles, shared_yaxes=True)
trace = go.Scatter(x=curr_roc_curve[label]['fpr'], y=curr_roc_curve[label]['tpr'], mode='lines', name='ROC', marker=dict(size=6, color=current_color))
fig.add_trace(trace, 1, 1)
if (ref_roc_curve is not None):
trace = go.Scatter(x=ref_roc_curve[label]['fpr'], y=ref_roc_curve[label]['tpr'], mode='lines', name='ROC', marker=dict(size=6, color=reference_color))
fig.add_trace(trace, 1, 2)
fig.update_layout(yaxis_title='True Positive Rate', xaxis_title='False Positive Rate', showlegend=True)
additional_plots.append((f'ROC Curve for label {label}', plotly_figure(title='', figure=fig)))
return additional_plots |
def test_optional_prefix_re_cmd(testbot):
testbot.bot_config.BOT_PREFIX_OPTIONAL_ON_CHAT = False
assert ('bar' in testbot.exec_command('!plz dont match this'))
testbot.bot_config.BOT_PREFIX_OPTIONAL_ON_CHAT = True
assert ('bar' in testbot.exec_command('!plz dont match this'))
assert ('bar' in testbot.exec_command('plz dont match this')) |
def record(oid, tag, value, **context):
if ('dbConn' in moduleContext):
db_conn = moduleContext['dbConn']
else:
raise error.SnmpsimError('variation module not initialized')
db_table = moduleContext['dbTable']
if context['stopFlag']:
raise error.NoDataNotification()
sql_oid = '.'.join([('%10s' % x) for x in oid.split('.')])
if ('hexvalue' in context):
text_tag = context['hextag']
text_value = context['hexvalue']
else:
text_tag = SnmprecGrammar().get_tag_by_type(context['origValue'])
text_value = str(context['origValue'])
cursor = db_conn.cursor()
cursor.execute(("select oid from %s where oid='%s' limit 1" % (db_table, sql_oid)))
if cursor.fetchone():
cursor.execute(("update %s set tag='%s',value='%s' where oid='%s'" % (db_table, text_tag, text_value, sql_oid)))
else:
cursor.execute(("insert into %s values ('%s', '%s', '%s', 'read-write')" % (db_table, sql_oid, text_tag, text_value)))
cursor.close()
if (not context['count']):
return (str(context['startOID']), ':sql', db_table)
else:
raise error.NoDataNotification() |
()
class _LiteDRAMPatternChecker(Module, AutoCSR):
def __init__(self, dram_port, init=[]):
(ashift, awidth) = get_ashift_awidth(dram_port)
self.start = Signal()
self.done = Signal()
self.ticks = Signal(32)
self.errors = Signal(32)
self.run_cascade_in = Signal(reset=1)
self.run_cascade_out = Signal()
(addr_init, data_init) = zip(*init)
addr_mem = Memory(dram_port.address_width, len(addr_init), init=addr_init)
data_mem = Memory(dram_port.data_width, len(data_init), init=data_init)
addr_port = addr_mem.get_port(async_read=True)
data_port = data_mem.get_port(async_read=True)
self.specials += (addr_mem, data_mem, addr_port, data_port)
dma = LiteDRAMDMAReader(dram_port)
self.submodules += dma
cmd_counter = Signal(dram_port.address_width, reset_less=True)
cmd_fsm = FSM(reset_state='IDLE')
self.submodules += cmd_fsm
cmd_fsm.act('IDLE', If(self.start, NextValue(cmd_counter, 0), If(self.run_cascade_in, NextState('RUN')).Else(NextState('WAIT'))))
cmd_fsm.act('WAIT', If(self.run_cascade_in, NextState('RUN')), NextValue(self.ticks, (self.ticks + 1)))
cmd_fsm.act('RUN', dma.sink.valid.eq(1), If(dma.sink.ready, self.run_cascade_out.eq(1), NextValue(cmd_counter, (cmd_counter + 1)), If((cmd_counter == (len(init) - 1)), NextState('DONE')).Elif((~ self.run_cascade_in), NextState('WAIT'))))
cmd_fsm.act('DONE')
if isinstance(dram_port, LiteDRAMNativePort):
dma_sink_addr = dma.sink.address
elif isinstance(dram_port, LiteDRAMAXIPort):
dma_sink_addr = dma.sink.address[ashift:]
else:
raise NotImplementedError
self.comb += [addr_port.adr.eq(cmd_counter), dma_sink_addr.eq(addr_port.dat_r)]
data_counter = Signal(dram_port.address_width, reset_less=True)
expected_data = Signal.like(dma.source.data)
self.comb += [data_port.adr.eq(data_counter), expected_data.eq(data_port.dat_r)]
data_fsm = FSM(reset_state='IDLE')
self.submodules += data_fsm
data_fsm.act('IDLE', If(self.start, NextValue(data_counter, 0), NextValue(self.errors, 0), NextState('RUN')), NextValue(self.ticks, 0))
data_fsm.act('RUN', dma.source.ready.eq(1), If(dma.source.valid, NextValue(data_counter, (data_counter + 1)), If((dma.source.data != expected_data), NextValue(self.errors, (self.errors + 1))), If((data_counter == (len(init) - 1)), NextState('DONE'))), NextValue(self.ticks, (self.ticks + 1)))
data_fsm.act('DONE', self.done.eq(1)) |
class OperatorListView(OperatorAbstractView):
def __init__(self, obj, filter_func=None):
assert isinstance(obj, list)
obj = RdyToFlattenList(obj)
super(OperatorListView, self).__init__(obj, filter_func)
def combine_related(self, field_name):
f = self._fields[field_name]
return CombinedViewsWrapper(RdyToFlattenList([f.retrieve_and_wrap(obj) for obj in self.model]))
def get_field(self, field_name):
f = self._fields[field_name]
return RdyToFlattenList([f.get(obj) for obj in self.model])
def encode(self):
encoded_list = []
for obj in self.model:
encoded_item = {}
for (field_name, field) in self._fields.items():
if isinstance(field, fields.DataField):
encoded_item[field_name] = field.get(obj)
encoded_list.append(encoded_item)
return RdyToFlattenList(encoded_list)
def model(self):
if (self._filter_func is not None):
return RdyToFlattenList(filter(self._filter_func, self._obj))
else:
return self._obj |
class UtilityFuncs():
def selHierarchy(root):
pm.select(root, hi=1)
return pm.ls(sl=1)
def renameHierarchy(hierarchy, name):
for s in hierarchy:
pm.rename(s, (name + '#'))
return hierarchy
def duplicateObject(object):
dup = pm.duplicate(object)
return dup[0]
def typeCheck(instanceName, className):
if (not isinstance(instanceName, className)):
raise TypeError('%s should be an instance of %s', (instanceName, className))
def evaluate(command):
return eval(command)
def connect(sourceObj, sourceAttr, destObj, destAttr):
source = ((sourceObj + '.') + sourceAttr)
dest = ((destObj + '.') + destAttr)
pm.connectAttr(source, dest)
def rename_byType(nodes):
temp_list = []
for nd in nodes:
temp_name = (nd + pm.nodeType(nd))
temp_list.append(temp_name)
return temp_list
def rename(object, name_in):
return pm.rename(object, name_in)
def position(object):
return pm.xform(object, q=1, ws=1, t=1)
ctrlShapes = {'circle': 'pm.delete((pm.circle( nr=(0, 1, 0), c=(0, 0, 0), sw=360, r=1)), ch = 1)', 'arrowCtrl': 'pm.curve(per=True, d = 1, p = [ ( -1, -0., 0 ), ( 1, -0., 0 ),\n ( 1, 2.997922, 0 ),( 2, 2.997922, 0 ), ( 0, 4.997922, 0 ), ( -2, 2.997922, 0 ),\n ( -1, 2.997922, 0 ), ( -1, -0., 0 ) ],\n k = ([0 , 1 , 2 , 3 , 4 , 5 , 6 , 7]))', 'fourSidedArrowCtrl': 'pm.curve(per=True, d = 1, p = [(-0.31907, 1.758567, 0),\n (-0.31907, 0.272474, 0), (-1.758567, 0.272474, 0) ,\n (-1.758567, 1.172378, 0), (-2.930946, 0, 0 ), ( -1.758567, -1.172378, 0 ),\n ( -1.758567, -0.272474, 0 ),( -0.31907, -0.272474, 0 ), ( -0.31907, -1.758567, 0 ),\n ( -1.172378, -1.758567, 0 ), ( 0, -2.930946, 0 ), ( 1.172378, -1.758567, 0 ),\n ( 0.31907, -1.758567, 0 ),( 0.31907, -0.272474, 0 ),( 1.758567, -0.272474, 0 ),\n ( 1.758567, -1.172378, 0 ), ( 2.930946, 0, 0 ), ( 1.758567, 1.172378, 0 ),\n ( 1.7585607, 0.272474, 0 ), ( 0.31907, 0.272474, 0 ), ( 0.31907, 1.758567, 0 ),\n ( 1.172378, 1.758567, 0 ), ( 0, 2.930946, 0 ),( -1.172378, 1.758567, 0 ),\n ( -0.31907, 1.758567, 0) ],\n k = ([0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 , 11 , 12 , 13 , 14 , 15 , 16 ,\n 17 , 18 , 19 , 20 , 21 , 22 , 23 , 24]))', 'ikCtrl': 'pm.curve(per=True, d = 1, p = [ ( 0.552734, 0, -0.138183), ( 0.552734, 0, -0.184245),\n ( 0.552734, 0, -0.230306),\n ( 0.552734, 0, -0.276367), ( 0.644856, 0, -0.184245), ( 0.736978, 0, -0.0921223),\n ( 0.829101, 0, 0), ( 0.736978, 0, 0.0921223), ( 0.644856, 0, 0.184245),\n ( 0.552734, 0, 0.276367), ( 0.552734, 0, 0.230306), ( 0.552734, 0, 0.184245),\n ( 0.552734, 0, 0.138183), ( 0.517927, 0, 0.138183), ( 0.48312, 0, 0.138183),\n ( 0.448313, 0, 0.138183), ( 0.444285, 0, 0.150144), ( 0.436622, 0, 0.170644),\n ( 0.419439, 0, 0.209124), ( 0.402845, 0, 0.239713), ( 0.386952, 0, 0.264852),\n ( 0.371754, 0, 0.286013), ( 0.359029, 0, 0.301972), ( 0.342183, 0, 0.321041),\n ( 0.32585, 0, 0.337618), ( 0.305397, 0, 0.356146), ( 0.290641, 0, 0.368196),\n ( 0.270877, 0, 0.382837), ( 0.256838, 0, 0.392304), ( 0.233632, 0, 0.406427),\n ( 0.208595, 0, 0.419739), ( 0.181267, 0, 0.432208), ( 0.158735, 0, 0.440999),\n ( 0.138233, 0, 0.447895), ( 0.138183, 0, 0.481828), ( 0.138183, 0, 0.517281),\n ( 0.138183, 0, 0.552734), ( 0.184245, 0, 0.552734), ( 0.230306, 0, 0.552734),\n ( 0.276367, 0, 0.552734), ( 0.184245, 0, 0.644856), ( 0.0921223, 0, 0.736978),\n ( 0, 0, 0.829101), ( -0.0921223, 0, 0.736978), ( -0.184245, 0, 0.644856),\n ( -0.276367, 0, 0.552734), ( -0.230306, 0, 0.552734), ( -0.184245, 0, 0.552734),\n ( -0.138183, 0, 0.552734), ( -0.138183, 0, 0.517349), ( -0.138183, 0, 0.481964),\n ( -0.138183, 0, 0.446579), ( -0.157573, 0, 0.440389), ( -0.195184, 0, 0.425554),\n ( -0.226251, 0, 0.41026), ( -0.261537, 0, 0.389117), ( -0.287101, 0, 0.37091),\n ( -0.313357, 0, 0.349202), ( -0.327368, 0, 0.336149), ( -0.344095, 0, 0.318984),\n ( -0.366533, 0, 0.292752), ( -0.382675, 0, 0.271108), ( -0.404132, 0, 0.237612),\n ( -0.417852, 0, 0.212369), ( -0.431433, 0, 0.183106), ( -0.441634, 0, 0.156968),\n ( -0.449357, 0, 0.133453), ( -0.464563, 0, 0.135341), ( -0.489623, 0, 0.137181),\n ( -0.509494, 0, 0.137868), ( -0.526834, 0, 0.138116), ( -0.542441, 0, 0.138179),\n ( -0.552734, 0, 0.138183), ( -0.552734, 0, 0.184245), ( -0.552734, 0, 0.230306),\n ( -0.552734, 0, 0.276367), ( -0.644856, 0, 0.184245), ( -0.736978, 0, 0.0921223),\n ( -0.829101, 0, 0), ( -0.736978, 0, -0.0921223), ( -0.644856, 0, -0.184245),\n ( -0.552734, 0, -0.276367), ( -0.552734, 0, -0.230306), ( -0.552734, 0, -0.184245),\n ( -0.552734, 0, -0.138183), ( -0.518383, 0, -0.138183), ( -0.484033, 0, -0.138183),\n ( -0.448148, 0, -0.137417), ( -0.438965, 0, -0.164253), ( -0.430847, 0, -0.184482),\n ( -0.420951, 0, -0.206126), ( -0.412191, 0, -0.223225), ( -0.395996, 0, -0.251053),\n ( -0.388009, 0, -0.263343), ( -0.36993, 0, -0.288412), ( -0.352908, 0, -0.309157),\n ( -0.331158, 0, -0.33242), ( -0.311574, 0, -0.350787), ( -0.287785, 0, -0.370404),\n ( -0.266573, 0, -0.385789), ( -0.242718, 0, -0.401044), ( -0.216381, 0, -0.41566),\n ( -0.190836, 0, -0.427831), ( -0.163247, 0, -0.438946), ( -0.149238, 0, -0.443829),\n ( -0.138183, 0, -0.447335), ( -0.138183, 0, -0.482468), ( -0.138183, 0, -0.517601),\n ( -0.138183, 0, -0.552734), ( -0.184245, 0, -0.552734), ( -0.230306, 0, -0.552734),\n ( -0.276367, 0, -0.552734), ( -0.184245, 0, -0.644856), ( -0.0921223, 0, -0.736978),\n ( 0, 0, -0.829101), ( 0.0921223, 0, -0.736978), ( 0.184245, 0, -0.644856),\n ( 0.276367, 0, -0.552734), ( 0.230306, 0, -0.552734), ( 0.184245, 0, -0.552734),\n ( 0.138183, 0, -0.552734), ( 0.138183, 0, -0.517258), ( 0.138183, 0, -0.481783),\n ( 0.138183, 0, -0.446308), ( 0.168167, 0, -0.436473), ( 0.190718, 0, -0.427463),\n ( 0.207556, 0, -0.419785), ( 0.22845, 0, -0.409061), ( 0.259644, 0, -0.39037),\n ( 0.28708, 0, -0.37093), ( 0.309495, 0, -0.352609), ( 0.341156, 0, -0.322135),\n ( 0.358246, 0, -0.302914), ( 0.375889, 0, -0.280529), ( 0.387391, 0, -0.26426),\n ( 0.402652, 0, -0.240132), ( 0.411495, 0, -0.224515), ( 0.423963, 0, -0.199829),\n ( 0.430266, 0, -0.185834), ( 0.437317, 0, -0.16858), ( 0.444059, 0, -0.150009),\n ( 0.447312, 0, -0.14009), ( 0.480289, 0, -0.138183), ( 0.516511, 0, -0.138183),\n ( 0.552734, 0, -0.138183) ] ,\n k = ( [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,\n 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,\n 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,\n 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,\n 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,\n 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78,\n 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,\n 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,\n 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114,\n 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125,\n 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136,\n 137, 138, 139, 140, 141, 142, 143, 144] ))', 'bodyCtrl': 'pm.curve(per=True, d = 1, p = [( -1, 0, 1), ( -1, 0, -1), ( 1, 0, -1), ( 1, 0, 1),\n ( -1, 0, 1) ] , k = [0, 1, 2, 3, 4 ] )', 'elbowCtrl': 'pm.curve(d = 3, p = [ ( 0, -0.0728115, -0.263333), ( 0, 0.0676745, -0.30954),\n ( 0, 0.166422, -0.162811),( 0, 0.316242, 0.066353), ( 0, 0.263828, 0.160055),\n ( 0, 0.0048945, 0.30954), ( 0, -0.117923, 0.298165), ( 0, -0.316242, 0.027507),\n ( 0, -0.265623, -0.052244), ( 0, -0.0394945, -0.211749), ( 0, 0.190873, 0.097192),\n ( 0, -0.139762, 0.142256), ( 0, -0.0829025, 0.013979), ( 0, -0.0666985, -0.054076),\n ( 0, -0.0205975, 0.039797) ],\n k = [0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 12, 12] )'} |
class LeadGenPostSubmissionCheckResult(AbstractObject):
def __init__(self, api=None):
super(LeadGenPostSubmissionCheckResult, self).__init__()
self._isLeadGenPostSubmissionCheckResult = True
self._api = api
class Field(AbstractObject.Field):
api_call_result = 'api_call_result'
api_error_message = 'api_error_message'
shown_thank_you_page = 'shown_thank_you_page'
_field_types = {'api_call_result': 'string', 'api_error_message': 'string', 'shown_thank_you_page': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class DualQuaternionOperations(unittest.TestCase):
def test_addition(self):
qr_1 = Quaternion(1.0, 2.0, 3.0, 4.0)
qt_1 = Quaternion(1.0, 3.0, 3.0, 0.0)
dq_1 = DualQuaternion(qr_1, qt_1)
qr_2 = Quaternion(3.0, 5.0, 3.0, 2.0)
qt_2 = Quaternion((- 4.0), 2.0, 3.0, 0.0)
dq_2 = DualQuaternion(qr_2, qt_2)
dq_expected = np.array([4.0, 7.0, 6.0, 6.0, (- 3.0), 5.0, 6.0, 0.0]).T
npt.assert_allclose((dq_1 + dq_2).dq, dq_expected, rtol=1e-06)
def test_multiplication(self):
qr_1 = Quaternion(1.0, 2.0, 3.0, 4.0)
qt_1 = Quaternion(1.0, 3.0, 3.0, 0.0)
dq_1 = DualQuaternion(qr_1, qt_1)
qr_2 = Quaternion(1.0, 4.0, 5.0, 1.0)
qt_2 = Quaternion((- 4.0), 2.0, 3.0, 0.0)
dq_2 = DualQuaternion(qr_2, qt_2)
dq_expected = DualQuaternion((dq_1.q_rot * dq_2.q_rot), ((dq_1.q_rot * dq_2.q_dual) + (dq_1.q_dual * dq_2.q_rot)))
npt.assert_allclose((dq_1 * dq_2).dq, dq_expected.dq)
def test_multiplication_with_scalar(self):
qr = Quaternion(0.5, 0.5, (- 0.5), 0.5)
qt = Quaternion(1, 3, 3, 0)
dq = DualQuaternion(qr, qt)
dq_expected = np.array([1.25, 1.25, (- 1.25), 1.25, 2.5, 7.5, 7.5, 0]).T
npt.assert_allclose((dq * 2.5).dq, dq_expected)
def test_division(self):
qr_1 = Quaternion(1, 2, 3, 4)
qt_1 = Quaternion(1, 3, 3, 6)
dq_1 = DualQuaternion(qr_1, qt_1)
identity_dq = np.array([0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0]).T
npt.assert_allclose((dq_1 / dq_1).dq, identity_dq, atol=1e-06)
qr_2 = Quaternion(1, 4, 5, 1)
qt_2 = Quaternion((- 4), 2, 3, 4)
dq_2 = DualQuaternion(qr_2, qt_2)
def test_division_with_scalar(self):
qr = Quaternion(0.5, 0.5, (- 0.5), 0.5)
qt = Quaternion(1.0, 3.0, 3.0, 0.0)
dq = DualQuaternion(qr, qt)
dq_expected = np.array([0.25, 0.25, (- 0.25), 0.25, 0.5, 1.5, 1.5, 0.0]).T
npt.assert_allclose((dq / 2.0).dq, dq_expected)
def test_conjugate(self):
qr = Quaternion(1.0, 2.0, 3.0, 4.0)
qt = Quaternion(1.0, 3.0, 3.0, 5.0)
dq = DualQuaternion(qr, qt)
dq_expected = np.array([(- 1.0), (- 2.0), (- 3.0), 4.0, (- 1.0), (- 3.0), (- 3.0), 5.0]).T
npt.assert_allclose(dq.conjugate().dq, dq_expected, atol=1e-06)
def test_conjugate_identity(self):
qr = Quaternion(1, 2, 3, 4)
qt = Quaternion(1, 3, 3, 0)
dq = DualQuaternion(qr, qt)
q_rot_identity = (dq.q_rot * dq.q_rot.conjugate())
q_dual_identity = ((dq.q_rot.conjugate() * dq.q_dual) + (dq.q_dual.conjugate() * dq.q_rot))
identity_dq_expected = DualQuaternion(q_rot_identity, q_dual_identity)
npt.assert_allclose((dq * dq.conjugate()).dq, identity_dq_expected.dq, atol=1e-06)
def test_normalize(self):
qr = Quaternion(1, 2, 3, 4)
qt = Quaternion(1, 3, 3, 0)
dq = DualQuaternion(qr, qt)
dq.normalize()
dq_normalized = np.array([0., 0., 0., 0., 0., 0., 0., 0.0]).T
npt.assert_allclose(dq.dq, dq_normalized)
dq_2 = DualQuaternion.from_pose(1, 2, 3, 1, 1, 1, 1)
dq_2.normalize()
dq_2_normalized = np.array([0.5, 0.5, 0.5, 0.5, 0.0, 1.0, 0.5, (- 1.5)]).T
npt.assert_allclose(dq_2.dq, dq_2_normalized)
def test_scalar(self):
qr = Quaternion(1, 2, 3, 4)
qt = Quaternion(1, 3, 3, 1)
dq = DualQuaternion(qr, qt)
scalar = dq.scalar()
scalar_expected = np.array([0.0, 0.0, 0.0, 4.0, 0.0, 0.0, 0.0, 1.0]).T
npt.assert_allclose(scalar.dq, scalar_expected, atol=1e-06)
def test_inverse(self):
qr = Quaternion(1, 2, 3, 4)
qt = Quaternion(5, 6, 7, 8)
dq = DualQuaternion(qr, qt)
identity_dq = np.array([0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0]).T
npt.assert_allclose((dq * dq.inverse()).dq, identity_dq, atol=1e-06)
def test_equality(self):
qr = Quaternion(1, 2, 3, 4)
qt = Quaternion(1, 3, 3, 1)
dq_1 = DualQuaternion(qr, qt)
dq_2 = DualQuaternion(qr, qt)
self.assertEqual(dq_1, dq_2)
def test_conversions(self):
poses = [[1, 2, 3, 1.0, 0.0, 0.0, 0.0], [1, (- 2), (- 3), ((- 0.5) * math.sqrt(2)), 0.0, 0.0, ((- 0.5) * math.sqrt(2))]]
expected_matrices = [np.array([[1, 0, 0, 1], [0, (- 1), 0, 2], [0, 0, (- 1), 3], [0, 0, 0, 1]]), np.array([[1, 0, 0, 1], [0, 0, (- 1), (- 2)], [0, 1, 0, (- 3)], [0, 0, 0, 1]])]
for idx in range(len(poses)):
pose = poses[idx]
matrix_expected = expected_matrices[idx]
dq = DualQuaternion.from_pose_vector(pose)
pose_out = dq.to_pose()
matrix_out = dq.to_matrix()
np.set_printoptions(suppress=True)
npt.assert_allclose(pose[0:3], pose_out[0:3])
if (not np.allclose(pose[3:7], pose_out[3:7])):
npt.assert_allclose(pose[3:7], (- pose_out[3:7]))
npt.assert_allclose(matrix_out, matrix_expected, atol=1e-06)
def test_consecutive_transformations(self):
dq_1_2 = DualQuaternion.from_pose(0, 10, 1, 1, 0, 0, 0)
dq_2_3 = DualQuaternion.from_pose(2, 1, 3, 0, 1, 0, 0)
dq_1_3 = DualQuaternion.from_pose(2, 9, (- 2), 0, 0, 1, 0)
dq_1_3_computed = (dq_1_2 * dq_2_3)
npt.assert_allclose(dq_1_3_computed.dq, dq_1_3.dq)
def test_transforming_points(self):
dq_1_2 = DualQuaternion.from_pose(0, 10, 1, 1, 0, 0, 0)
dq_2_3 = DualQuaternion.from_pose(2, 1, 3, 0, 1, 0, 0)
dq_1_3 = DualQuaternion.from_pose(2, 9, (- 2), 0, 0, 1, 0)
dq_1_3_computed = (dq_1_2 * dq_2_3)
p_1 = np.array([1, 2, 3])
p_3_direct = dq_1_3.inverse().passive_transform_point(p_1)
p_3_consecutive = dq_1_3_computed.inverse().passive_transform_point(p_1)
npt.assert_allclose(p_3_direct, p_3_consecutive)
def test_dq_to_matrix(self):
pose = [1, 2, 3, 4.0, 5.0, 6.0, 7.0]
dq = DualQuaternion.from_pose_vector(pose)
dq.normalize()
matrix_out = dq.to_matrix()
dq_from_matrix = DualQuaternion.from_transformation_matrix(matrix_out)
matrix_out_2 = dq_from_matrix.to_matrix()
npt.assert_allclose(dq.dq, dq_from_matrix.dq)
npt.assert_allclose(matrix_out, matrix_out_2) |
class OptionSeriesPyramid3dMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesPyramid3dMarkerStates':
return self._config_sub_data('states', OptionSeriesPyramid3dMarkerStates)
def symbol(self):
return self._config_get(None)
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
_init.register_param_type
class param_supported_addr(param):
_VALUE_STR = '!H'
_VALUE_LEN = struct.calcsize(_VALUE_STR)
def param_type(cls):
return PTYPE_SUPPORTED_ADDR
def __init__(self, value=None, length=0):
if (not isinstance(value, list)):
value = [value]
for one in value:
assert isinstance(one, int)
super(param_supported_addr, self).__init__(value, length)
def parser(cls, buf):
(_, length) = struct.unpack_from(cls._PACK_STR, buf)
value = []
offset = cls._MIN_LEN
while (offset < length):
(one,) = struct.unpack_from(cls._VALUE_STR, buf, offset)
value.append(one)
offset += cls._VALUE_LEN
return cls(value, length)
def serialize(self):
buf = bytearray(struct.pack(self._PACK_STR, self.param_type(), self.length))
for one in self.value:
buf.extend(struct.pack(param_supported_addr._VALUE_STR, one))
if (0 == self.length):
self.length = len(buf)
struct.pack_into('!H', buf, 2, self.length)
mod = (len(buf) % 4)
if mod:
buf.extend(bytearray((4 - mod)))
return six.binary_type(buf) |
class OptionSeriesPolygonStatesSelectHalo(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def opacity(self):
return self._config_get(0.25)
def opacity(self, num: float):
self._config(num, js_type=False)
def size(self):
return self._config_get(10)
def size(self, num: float):
self._config(num, js_type=False) |
class OptionSeriesOrganizationStatesSelect(Options):
def animation(self) -> 'OptionSeriesOrganizationStatesSelectAnimation':
return self._config_sub_data('animation', OptionSeriesOrganizationStatesSelectAnimation)
def borderColor(self):
return self._config_get('#000000')
def borderColor(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#cccccc')
def color(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionSeriesOrganizationStatesSelectHalo':
return self._config_sub_data('halo', OptionSeriesOrganizationStatesSelectHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionSeriesOrganizationStatesSelectMarker':
return self._config_sub_data('marker', OptionSeriesOrganizationStatesSelectMarker) |
def get_segment_vafs(variants, segments):
if segments:
chunks = variants.by_ranges(segments)
else:
chunks = [(None, variants)]
for (seg, seg_snvs) in chunks:
freqs = seg_snvs['alt_freq'].values
idx_above_mid = (freqs > 0.5)
for idx_vaf in (idx_above_mid, (~ idx_above_mid)):
if (sum(idx_vaf) > 1):
(yield (seg, np.median(freqs[idx_vaf]))) |
_toolkit([ToolkitName.qt, ToolkitName.wx])
class TestCustomCheckListEditor(BaseTestMixin, unittest.TestCase):
def setUp(self):
BaseTestMixin.setUp(self)
def tearDown(self):
BaseTestMixin.tearDown(self)
def setup_gui(self, model, view):
with create_ui(model, dict(view=view)) as ui:
process_cascade_events()
editor = ui.get_editors('value')[0]
(yield editor)
def test_custom_check_list_editor_button_update(self):
list_edit = ListModel()
with reraise_exceptions(), self.setup_gui(list_edit, get_view('custom')) as editor:
self.assertEqual(get_all_button_status(editor.control), [False, False, False, False])
list_edit.value = ['two', 'four']
process_cascade_events()
self.assertEqual(get_all_button_status(editor.control), [False, True, False, True])
list_edit.value = ['one', 'four']
process_cascade_events()
self.assertEqual(get_all_button_status(editor.control), [True, False, False, True])
def test_custom_check_list_editor_click(self):
list_edit = ListModel()
tester = UITester()
with tester.create_ui(list_edit, dict(view=get_view('custom'))) as ui:
self.assertEqual(list_edit.value, [])
check_list = tester.find_by_name(ui, 'value')
item_1 = check_list.locate(Index(1))
item_1.perform(MouseClick())
self.assertEqual(list_edit.value, ['two'])
item_1.perform(MouseClick())
self.assertEqual(list_edit.value, [])
def test_custom_check_list_editor_click_initial_value(self):
list_edit = ListModel(value=['two'])
tester = UITester()
with tester.create_ui(list_edit, dict(view=get_view('custom'))) as ui:
self.assertEqual(list_edit.value, ['two'])
check_list = tester.find_by_name(ui, 'value')
item_1 = check_list.locate(Index(1))
item_1.perform(MouseClick())
self.assertEqual(list_edit.value, [])
def test_custom_check_list_editor_invalid_current_values_str(self):
class StrModel(HasTraits):
value = Str()
str_edit = StrModel(value='alpha, \ttwo, three,\n lambda, one')
tester = UITester()
with tester.create_ui(str_edit, dict(view=get_view('custom'))) as ui:
self.assertEqual(str_edit.value, 'two,three,one')
check_list = tester.find_by_name(ui, 'value')
item_1 = check_list.locate(Index(1))
item_1.perform(MouseClick())
self.assertEqual(str_edit.value, 'three,one')
def test_custom_check_list_editor_grid_layout(self):
for cols in range(1, 8):
list_edit = ListModel()
tester = UITester()
view = get_view_custom_cols(cols=cols)
with tester.create_ui(list_edit, dict(view=view)) as ui:
self.assertEqual(list_edit.value, [])
check_list = tester.find_by_name(ui, 'value')
item = check_list.locate(Index(6))
item.perform(MouseClick())
self.assertEqual(list_edit.value, ['seven'])
item.perform(MouseClick())
self.assertEqual(list_edit.value, []) |
def check_heading_slug_func(inst: 'MdParserConfig', field: dc.Field, value: Any) -> None:
if (value is None):
return
if isinstance(value, str):
try:
(module_path, function_name) = value.rsplit('.', 1)
mod = import_module(module_path)
value = getattr(mod, function_name)
except ImportError as exc:
raise TypeError(f"'{field.name}' could not be loaded from string: {value!r}") from exc
setattr(inst, field.name, value)
if (not callable(value)):
raise TypeError(f"'{field.name}' is not callable: {value!r}") |
class _DummyService(BaseService):
service_name = 'dummy'
def __init__(self, exec_args, *args, **kwargs):
super().__init__(*args, exec_args=exec_args, timeout=10, **kwargs)
def start(self):
assert (self._proc is not None)
self._proc.start()
def join(self):
self.wait() |
def test_session_edit_locked_allow_organizer(db, client, user, jwt):
session = get_session(db, user, event_owner=True, is_locked=True)
data = json.dumps({'data': {'type': 'session', 'id': str(session.id), 'attributes': {'title': 'Sheesha'}}})
response = client.patch(f'/v1/sessions/{session.id}', content_type='application/vnd.api+json', headers=jwt, data=data)
db.session.refresh(session)
assert (response.status_code == 200)
assert (session.title == 'Sheesha') |
('config_type', ['strict'])
def test_missing_envs_not_required_in_strict_mode(config, json_config_file_3):
with open(json_config_file_3, 'w') as file:
file.write(json.dumps({'section': {'undefined': '${UNDEFINED}'}}))
config.from_json(json_config_file_3, envs_required=False)
assert (config.section.undefined() == '') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.