code stringlengths 281 23.7M |
|---|
def run_csv_to_features(in_path, out_path=None, featurizer='host_tshark', otherflag=None):
args = ((['csv_to_features.py'] + COMMON_ARGS) + ['-g', featurizer])
if otherflag:
args.append(otherflag)
if out_path:
args.extend(['-o', out_path])
args.append(in_path)
sys.argv = args
instance = CSVToFeatures()
instance.main() |
def _test_correct_response_for_recipient_location(client):
resp = client.post('/api/v2/search/spending_over_time', content_type='application/json', data=json.dumps({'group': 'fiscal_year', 'filters': {'recipient_locations': [{'country': 'USA', 'state': 'le_state_code_4', 'city': 'le_city_name_4'}, {'country': 'USA', 'state': 'le_state_code_7', 'county': '007'}, {'country': 'USA', 'state': 'le_state_code_17', 'district_original': '17'}, {'country': 'USA', 'zip': 'le_zip5_20'}], 'time_period': [{'start_date': '2007-10-01', 'end_date': '2020-09-30'}]}}))
expected_result = [{'aggregated_amount': 0, 'time_period': {'fiscal_year': '2008'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2009'}}, {'aggregated_amount': 8020.0, 'time_period': {'fiscal_year': '2010'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2011'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2012'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2013'}}, {'aggregated_amount': 8004.0, 'time_period': {'fiscal_year': '2014'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2015'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2016'}}, {'aggregated_amount': 16024.0, 'time_period': {'fiscal_year': '2017'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2018'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2019'}}, {'aggregated_amount': 0, 'time_period': {'fiscal_year': '2020'}}]
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json().get('results') == expected_result), 'Recipient Location filter does not match expected result' |
class sRGBLinear(sRGB):
BASE = 'xyz-d65'
NAME = 'srgb-linear'
SERIALIZE = ('srgb-linear',)
WHITE = WHITES['2deg']['D65']
def to_base(self, coords: Vector) -> Vector:
return lin_srgb_to_xyz(coords)
def from_base(self, coords: Vector) -> Vector:
return xyz_to_lin_srgb(coords) |
def get_chat_default_kwargs():
from toolbox import get_conf
(LLM_MODEL, API_KEY) = get_conf('LLM_MODEL', 'API_KEY')
llm_kwargs = {'api_key': API_KEY, 'llm_model': LLM_MODEL, 'top_p': 1.0, 'max_length': None, 'temperature': 1.0}
default_chat_kwargs = {'inputs': 'Hello there, are you ready?', 'llm_kwargs': llm_kwargs, 'history': [], 'sys_prompt': 'You are AI assistant', 'observe_window': None, 'console_slience': False}
return default_chat_kwargs |
class models(AppCommand):
title = 'Models'
headers = ['name', 'help']
sortkey = attrgetter('_options.namespace')
options = [option('--builtins/--no-builtins', default=False)]
async def run(self, *, builtins: bool) -> None:
self.say(self.tabulate([self.model_to_row(model) for model in self.models(builtins)], headers=self.headers, title=self.title))
def models(self, builtins: bool) -> Sequence[Type[ModelT]]:
sortkey = cast(Callable[([Type[ModelT]], Any)], self.sortkey)
return [model for model in sorted(registry.values(), key=sortkey) if ((not model._options.namespace.startswith('')) or builtins)]
def model_to_row(self, model: Type[ModelT]) -> Sequence[str]:
return [self._name(model), self._help(model)]
def _name(self, model: Type[ModelT]) -> str:
return self.abbreviate_fqdn(model._options.namespace)
def _help(self, model: Type[ModelT]) -> str:
return (model.__doc__ or '<N/A>') |
class OptionPlotoptionsStreamgraphSonificationDefaultspeechoptionsMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def get_current_node_exporter_version():
version = UNKNOWN_VERSION
try:
process_result = subprocess.run([(NODE_EXPORTER_INSTALLED_PATH + 'node_exporter'), '--version'], capture_output=True, text=True)
process_output = ((process_result.stdout + '\n') + process_result.stderr)
result = re.search('version (?P<version>[^ ]+)', process_output)
if result:
version = result.group('version')
except FileNotFoundError:
return False
return version |
class SnippetResponsePost(ModelComposed):
allowed_values = {('type',): {'INIT': 'init', 'RECV': 'recv', 'HASH': 'hash', 'HIT': 'hit', 'MISS': 'miss', 'PASS': 'pass', 'FETCH': 'fetch', 'ERROR': 'error', 'DELIVER': 'deliver', 'LOG': 'log', 'NONE': 'none'}, ('dynamic',): {'regular': 0, 'dynamic': 1}}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'name': (str,), 'type': (str,), 'content': (str,), 'priority': (str,), 'dynamic': (float,), 'created_at': (datetime, none_type), 'deleted_at': (datetime, none_type), 'updated_at': (datetime, none_type), 'service_id': (str,), 'version': (str,), 'id': (str,)}
_property
def discriminator():
return None
attribute_map = {'name': 'name', 'type': 'type', 'content': 'content', 'priority': 'priority', 'dynamic': 'dynamic', 'created_at': 'created_at', 'deleted_at': 'deleted_at', 'updated_at': 'updated_at', 'service_id': 'service_id', 'version': 'version', 'id': 'id'}
read_only_vars = {'created_at', 'deleted_at', 'updated_at', 'service_id', 'version', 'id'}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [SnippetResponseCommon, SnippetWithDynamicNumber], 'oneOf': []} |
class ChatApi(ChatApiBase, ChatIncrementalApi):
def __init__(self, config, endpoint):
super(ChatApi, self).__init__(config, endpoint=endpoint)
self.accounts = ChatApiBase(config, endpoint.account, request_handler=AccountRequest)
self.agents = AgentApi(config, endpoint.agents)
self.visitors = ChatApiBase(config, endpoint.visitors, request_handler=VisitorRequest)
self.shortcuts = ChatApiBase(config, endpoint.shortcuts)
self.triggers = ChatApiBase(config, endpoint.triggers)
self.bans = ChatApiBase(config, endpoint.bans)
self.departments = ChatApiBase(config, endpoint.departments)
self.goals = ChatApiBase(config, endpoint.goals)
self.stream = ChatApiBase(config, endpoint.stream)
def search(self, *args, **kwargs):
url = self._build_url(self.endpoint.search(*args, **kwargs))
return self._get(url) |
class LatentDiffusionTrainer(Trainer[(ConfigType, TextEmbeddingLatentsBatch)]):
_property
def unet(self) -> SD1UNet:
assert (self.config.models['unet'] is not None), 'The config must contain a unet entry.'
return SD1UNet(in_channels=4, device=self.device).to(device=self.device)
_property
def text_encoder(self) -> CLIPTextEncoderL:
assert (self.config.models['text_encoder'] is not None), 'The config must contain a text_encoder entry.'
return CLIPTextEncoderL(device=self.device).to(device=self.device)
_property
def lda(self) -> SD1Autoencoder:
assert (self.config.models['lda'] is not None), 'The config must contain a lda entry.'
return SD1Autoencoder(device=self.device).to(device=self.device)
def load_models(self) -> dict[(str, fl.Module)]:
return {'unet': self.unet, 'text_encoder': self.text_encoder, 'lda': self.lda}
def load_dataset(self) -> Dataset[TextEmbeddingLatentsBatch]:
return TextEmbeddingLatentsDataset(trainer=self)
_property
def ddpm_scheduler(self) -> DDPM:
return DDPM(num_inference_steps=1000, device=self.device).to(device=self.device)
def sample_timestep(self) -> Tensor:
random_step = random.randint(a=self.config.latent_diffusion.min_step, b=self.config.latent_diffusion.max_step)
self.current_step = random_step
return self.ddpm_scheduler.timesteps[random_step].unsqueeze(dim=0)
def sample_noise(self, size: tuple[(int, ...)], dtype: (DType | None)=None) -> Tensor:
return sample_noise(size=size, offset_noise=self.config.latent_diffusion.offset_noise, device=self.device, dtype=dtype)
def compute_loss(self, batch: TextEmbeddingLatentsBatch) -> Tensor:
(clip_text_embedding, latents) = (batch.text_embeddings, batch.latents)
timestep = self.sample_timestep()
noise = self.sample_noise(size=latents.shape, dtype=latents.dtype)
noisy_latents = self.ddpm_scheduler.add_noise(x=latents, noise=noise, step=self.current_step)
self.unet.set_timestep(timestep=timestep)
self.unet.set_clip_text_embedding(clip_text_embedding=clip_text_embedding)
prediction = self.unet(noisy_latents)
loss = mse_loss(input=prediction, target=noise)
return loss
def compute_evaluation(self) -> None:
sd = StableDiffusion_1(unet=self.unet, lda=self.lda, clip_text_encoder=self.text_encoder, scheduler=DPMSolver(num_inference_steps=self.config.test_diffusion.num_inference_steps), device=self.device)
prompts = self.config.test_diffusion.prompts
num_images_per_prompt = self.config.test_diffusion.num_images_per_prompt
if self.config.test_diffusion.use_short_prompts:
prompts = [prompt.split(sep=',')[0] for prompt in prompts]
images: dict[(str, WandbLoggable)] = {}
for prompt in prompts:
canvas_image: Image.Image = Image.new(mode='RGB', size=(512, (512 * num_images_per_prompt)))
for i in range(num_images_per_prompt):
logger.info(f'Generating image {(i + 1)}/{num_images_per_prompt} for prompt: {prompt}')
x = randn(1, 4, 64, 64, device=self.device)
clip_text_embedding = sd.compute_clip_text_embedding(text=prompt).to(device=self.device)
for step in sd.steps:
x = sd(x, step=step, clip_text_embedding=clip_text_embedding)
canvas_image.paste(sd.lda.decode_latents(x=x), box=(0, (512 * i)))
images[prompt] = canvas_image
self.log(data=images) |
class OptionPlotoptionsTreemapOnpointPosition(Options):
def offsetX(self):
return self._config_get(None)
def offsetX(self, num: float):
self._config(num, js_type=False)
def offsetY(self):
return self._config_get(None)
def offsetY(self, num: float):
self._config(num, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
def fetch_consumption(zone_key='JP', session=None, target_datetime: datetime.datetime=None, logger: logging.Logger=logging.getLogger(__name__)) -> dict:
if (target_datetime is None):
raise NotImplementedError('target_datetime must be provided, this parser can only parse historic data')
region = MAP_ZONE_TO_REGION_NAME[zone_key]
year = target_datetime.year
month = target_datetime.month
day = target_datetime.day
df = get_data(region, year, month, day)
df = process_data(df)
data = []
for (name, row) in df.iterrows():
dat = {'datetime': row['date_time'].to_pydatetime(), 'zoneKey': zone_key, 'consumption': row.get('consumption', None), 'source': 'isep-energychart.com'}
data += [dat]
return data |
.xfail(raises=ImageComparisonFailure, reason='Matplotlib plots for reasons a different image size.')
.skipif((LOW_MEMORY > memory), reason='Travis has too less memory to run it.')
def test_hicPlotMatrix_h5_perChr_log1p_chromosomeOrder():
outfile = NamedTemporaryFile(suffix='.png', prefix='hicexplorer_test', delete=False)
args = '--matrix {0}/small_test_matrix_50kb_res.h5 --perChr --disable_tight_layout --outFileName {1} --log --chromosomeOrder chr2L chr3L chr3R chr2R'.format(ROOT, outfile.name).split()
compute(hicexplorer.hicPlotMatrix.main, args, 5)
res = compare_images(((ROOT + 'hicPlotMatrix') + '/small_test_matrix_perChr_log1p_chromosomeOrder_disable_tight_layout.png'), outfile.name, tol=tolerance)
assert (res is None), res
if REMOVE_OUTPUT:
os.remove(outfile.name) |
_postgres
def test_relations(pgs):
sc1 = SourceCustom.new(foo='test1')
sc1.save()
sc2 = SourceCustom.new(foo='test2')
sc2.save()
sm1 = SourceMulti.new(baz='test1')
sm1.save()
sm2 = SourceMulti.new(baz='test2')
sm2.save()
dcc1 = sc1.dest_custom_customs.new(foo='test')
assert (dcc1.source_custom == sc1.id)
assert isinstance(dcc1.source_custom, str)
dcm1 = sc1.dest_custom_multis.new(baz='test')
assert (dcm1.source_custom == sc1.id)
assert isinstance(dcc1.source_custom, str)
dmc1 = sm1.dest_multi_customs.new(foo='test')
assert (dmc1.source_multi_foo == sm1.foo)
assert (dmc1.source_multi_bar == sm1.bar)
dmm1 = sm1.dest_multi_multis.new(baz='test')
assert (dmm1.source_multi_foo == sm1.foo)
assert (dmm1.source_multi_bar == sm1.bar)
dcc1 = sc1.dest_custom_customs.create(foo='test')
assert isinstance(dcc1.id, str)
row = sc1.dest_custom_customs().first()
assert (row.foo == 'test')
rc = DestCustomCustom.get(row.id)
assert (rc.foo == row.foo)
assert isinstance(rc.source_custom, str)
dcm1 = sc1.dest_custom_multis.create(baz='test')
assert isinstance(dcm1.id, tuple)
row = sc1.dest_custom_multis().first()
assert (row.baz == 'test')
rc = DestCustomMulti.get(row.foo, row.bar)
assert (rc.foo == row.foo)
assert (rc.bar == row.bar)
assert isinstance(rc.source_custom, str)
rc = DestCustomMulti.get(foo=row.foo, bar=row.bar)
assert (rc.foo == row.foo)
assert (rc.bar == row.bar)
assert isinstance(rc.source_custom, str)
rc = DestCustomMulti.get((row.foo, row.bar))
assert (rc.foo == row.foo)
assert (rc.bar == row.bar)
assert isinstance(rc.source_custom, str)
rc = DestCustomMulti.get({'foo': row.foo, 'bar': row.bar})
assert (rc.foo == row.foo)
assert (rc.bar == row.bar)
assert isinstance(rc.source_custom, str)
dmc1 = sm1.dest_multi_customs.create(foo='test')
assert isinstance(dmc1.id, str)
row = sm1.dest_multi_customs().first()
assert (row.foo == 'test')
rc = DestMultiCustom.get(row.id)
assert (rc.foo == row.foo)
assert isinstance(rc.source_multi, tuple)
assert (rc.source_multi.foo == rc.source_multi_foo)
assert (rc.source_multi.bar == rc.source_multi_bar)
assert (rc.source_multi.baz == 'test1')
dmm1 = sm1.dest_multi_multis.create(baz='test')
assert isinstance(dmm1.id, tuple)
row = sm1.dest_multi_multis().first()
assert (row.baz == 'test')
rc = DestMultiMulti.get(row.foo, row.bar)
assert (rc.foo == row.foo)
assert (rc.bar == row.bar)
assert isinstance(rc.source_multi, tuple)
assert (rc.source_multi.foo == rc.source_multi_foo)
assert (rc.source_multi.bar == rc.source_multi_bar)
assert (rc.source_multi.baz == 'test1')
rc = DestMultiMulti.get(foo=row.foo, bar=row.bar)
assert (rc.foo == row.foo)
assert (rc.bar == row.bar)
assert isinstance(rc.source_multi, tuple)
assert (rc.source_multi.foo == rc.source_multi_foo)
assert (rc.source_multi.bar == rc.source_multi_bar)
assert (rc.source_multi.baz == 'test1')
rc = DestMultiMulti.get((row.foo, row.bar))
assert (rc.foo == row.foo)
assert (rc.bar == row.bar)
assert isinstance(rc.source_multi, tuple)
assert (rc.source_multi.foo == rc.source_multi_foo)
assert (rc.source_multi.bar == rc.source_multi_bar)
assert (rc.source_multi.baz == 'test1')
rc = DestMultiMulti.get({'foo': row.foo, 'bar': row.bar})
assert (rc.foo == row.foo)
assert (rc.bar == row.bar)
assert isinstance(rc.source_multi, tuple)
assert (rc.source_multi.foo == rc.source_multi_foo)
assert (rc.source_multi.bar == rc.source_multi_bar)
assert (rc.source_multi.baz == 'test1')
dcc1 = DestCustomCustom.first()
sc2.dest_custom_customs.add(dcc1)
assert (sc1.dest_custom_customs.count() == 0)
assert (sc2.dest_custom_customs.count() == 1)
assert (dcc1.source_custom.id == sc2.id)
sc2.dest_custom_customs.remove(dcc1)
assert (sc1.dest_custom_customs.count() == 0)
assert (sc2.dest_custom_customs.count() == 0)
assert (dcc1.source_custom is None)
assert (not dcc1.is_valid)
dcm1 = DestCustomMulti.first()
sc2.dest_custom_multis.add(dcm1)
assert (sc1.dest_custom_multis.count() == 0)
assert (sc2.dest_custom_multis.count() == 1)
assert (dcm1.source_custom.id == sc2.id)
sc2.dest_custom_multis.remove(dcm1)
assert (sc1.dest_custom_multis.count() == 0)
assert (sc2.dest_custom_multis.count() == 0)
assert (dcm1.source_custom is None)
assert (not dcm1.is_valid)
dmc1 = DestMultiCustom.first()
sm2.dest_multi_customs.add(dmc1)
assert (sm1.dest_multi_customs.count() == 0)
assert (sm2.dest_multi_customs.count() == 1)
assert (dmc1.source_multi.foo == sm2.foo)
assert (dmc1.source_multi.bar == sm2.bar)
sm2.dest_multi_customs.remove(dmc1)
assert (sm1.dest_multi_customs.count() == 0)
assert (sm2.dest_multi_customs.count() == 0)
assert (dmc1.source_multi is None)
assert (not dmc1.is_valid)
dmm1 = DestMultiMulti.first()
sm2.dest_multi_multis.add(dmm1)
assert (sm1.dest_multi_multis.count() == 0)
assert (sm2.dest_multi_multis.count() == 1)
assert (dmm1.source_multi.foo == sm2.foo)
assert (dmm1.source_multi.bar == sm2.bar)
sm2.dest_multi_multis.remove(dmm1)
assert (sm1.dest_multi_multis.count() == 0)
assert (sm2.dest_multi_multis.count() == 0)
assert (dmm1.source_multi is None)
assert (not dmm1.is_valid) |
class ListQueryMixin(object):
def list(self, resource=None, fields=None, max_results=None, verb='list', **kwargs):
arguments = {'fields': fields, self._max_results_field: max_results}
if (self._list_key_field and resource):
arguments[self._list_key_field] = resource
if kwargs:
arguments.update(kwargs)
if self._request_supports_pagination(verb):
for resp in self.execute_paged_query(verb=verb, verb_arguments=arguments):
(yield resp)
else:
del arguments[self._max_results_field]
(yield self.execute_query(verb=verb, verb_arguments=arguments)) |
def test_halfspace():
hs = DATAEMPYMOD['hs'][()]
hsres = DATAEMPYMOD['hsres'][()]
hsbp = DATAEMPYMOD['hsbp'][()]
for key in hs:
hs_res = kernel.halfspace(**hs[key])
assert_allclose(hs_res, hsres[key], rtol=5e-05)
full = kernel.halfspace(**hs['21'])
hs['21']['solution'] = 'dsplit'
(direct, reflect, air) = kernel.halfspace(**hs['21'])
assert_allclose(full, ((direct + reflect) + air))
hsbp['21']['xdirect'] = True
hsbp['21']['depth'] = []
hsbp['21']['res'] = hsbp['21']['res'][1]
hsbp['21']['aniso'] = hsbp['21']['aniso'][1]
hsbp['21']['ft'] = 'dlf'
hs_res = bipole(**hsbp['21'])
assert_allclose(direct, hs_res, rtol=0.01)
hs['11']['solution'] = 'dfs'
full = kernel.halfspace(**hs['11'])
hs['11']['solution'] = 'dsplit'
(direct, _, _) = kernel.halfspace(**hs['11'])
assert_allclose(full, direct)
hs['11']['solution'] = 'dsplit'
(direct, reflect, air) = kernel.halfspace(**hs['11'])
hs['11']['solution'] = 'dtetm'
(dTE, dTM, rTE, rTM, air2) = kernel.halfspace(**hs['11'])
assert_allclose(direct, (dTE + dTM))
assert_allclose(reflect, (rTE + rTM))
assert_allclose(air, air2)
hsbp['11']['xdirect'] = True
hsbp['11']['depth'] = []
hsbp['11']['res'] = hsbp['11']['res'][1]
hsbp['11']['aniso'] = hsbp['11']['aniso'][1]
hs_res = bipole(**hsbp['11'])
assert_allclose(direct, hs_res, atol=0.01) |
class InvitationDataAttributes(ModelNormal):
allowed_values = {('status_code',): {'inactive': 0, 'active': 1}}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'email': (str,), 'limit_services': (bool,), 'role': (RoleUser,), 'status_code': (int,)}
_property
def discriminator():
return None
attribute_map = {'email': 'email', 'limit_services': 'limit_services', 'role': 'role', 'status_code': 'status_code'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def handle_response(to_class=None):
def decorator(func):
(func)
def wrapped(*args, **kwargs):
ret = func(*args, **kwargs)
if (ret is None):
return
smart_map(check_response_body, ret)
if to_class:
if args:
self = args[0]
else:
self = inspect.currentframe().f_back.f_locals.get('self')
from ..api.bot import Bot
if isinstance(self, Bot):
bot = weakref.proxy(self)
else:
bot = getattr(self, 'bot', None)
if (not bot):
raise ValueError('bot not found:m\nmethod: {}\nself: {}\nbot: {}'.format(func, self, bot))
ret = smart_map(to_class, ret, bot)
if isinstance(ret, list):
from ..api.chats import Group
if (to_class == Group):
from ..api.chats import Groups
ret = Groups(ret)
else:
from ..api.chats import Chats
ret = Chats(ret, bot)
return ret
return wrapped
return decorator |
def example():
async def amenity_selected(e):
(await amenity_chips.update_async())
title = ft.Row([ft.Icon(ft.icons.HOTEL_CLASS), ft.Text('Amenities')])
amenities = ['Washer / Dryer', 'Ramp access', 'Dogs OK', 'Cats OK', 'Smoke-free']
amenity_chips = ft.Row()
for amenity in amenities:
amenity_chips.controls.append(ft.Chip(label=ft.Text(amenity), on_select=amenity_selected))
return ft.Column(controls=[title, amenity_chips]) |
class Record(list, UsableFlag):
def __init__(self):
list.__init__(self)
UsableFlag.__init__(self)
self.ldict = None
self.comment = None
def get_comment(self):
assert (self.comment is not None)
return self.comment
def set_comment(self, comment):
self.comment = comment
def convert_to_dict(self):
self.ldict = {}
for i in self:
tag = i.get_tag()
if (tag in self.ldict):
raise RMTException(81, ("Tag '%s' multiple defined" % tag))
self.ldict[i.get_tag()] = i
def get_dict(self):
if (self.ldict is None):
self.convert_to_dict()
return self.ldict
def insert(self, index, o):
assert (self.ldict is None)
list.insert(self, index, o)
def append(self, o):
if (self.ldict is not None):
self.ldict[o.get_tag()] = o
list.append(self, o)
def __delitem__(self, index):
assert (self.ldict is None)
list.__delitem__(self, index)
def remove(self, v):
for element in self:
if (element.get_tag() == v):
list.remove(self, element)
return
return
def set_content(self, key, value):
for element in self:
if (element.get_tag() == key):
element.set_content(value)
return
raise ValueError()
def is_tag_available(self, tag):
for i in self:
if (tag == i.get_tag()):
return True
return False |
def get_runnable_realizations_mask(storage: StorageReader, casename: str):
try:
ensemble = storage.get_ensemble_by_name(casename)
except KeyError:
return []
runnable_states = [RealizationStorageState.UNDEFINED, RealizationStorageState.INITIALIZED, RealizationStorageState.LOAD_FAILURE, RealizationStorageState.HAS_DATA]
return ensemble.get_realization_mask_from_state(runnable_states) |
class PlantWateringNotifier(hass.Hass):
def initialize(self):
self.timer_handle_list = []
self.listen_event_handle_list = []
self.listen_state_handle_list = []
self.app_switch = self.args['app_switch']
self.rain_precip_sensor = self.args['rain_precip_sensor']
self.rain_precip_intensity_sensor = self.args['rain_precip_intensity_sensor']
self.precip_type_sensor = self.args['precip_type_sensor']
self.notify_name = self.args['notify_name']
self.user_id = self.args['user_id']
self.reminder_acknowledged_entity = self.args['reminder_acknowledged_entity']
self.message = self.args['message']
self.message_not_needed = self.args['message_not_needed']
self.message_evening = self.args['message_evening']
self.intensity_minimum = 2
self.propability_minimum = 90
self.keyboard_callback = '/plants_watered'
self.notifier = self.get_app('Notifier')
self.reminder_acknowledged = self.get_state(self.reminder_acknowledged_entity)
self.listen_event_handle_list.append(self.listen_event(self.receive_telegram_callback, 'telegram_callback'))
self.timer_handle_list.append(self.run_daily(self.run_morning_callback, datetime.time(8, 0, 0)))
self.timer_handle_list.append(self.run_daily(self.run_evening_callback, datetime.time(18, 0, 0)))
def run_morning_callback(self, kwargs):
if (self.get_state(self.app_switch) == 'on'):
precip_propability = self.get_state(self.rain_precip_sensor)
self.log('Rain Propability: {}'.format(float(precip_propability)))
precip_intensity = self.get_state(self.rain_precip_intensity_sensor)
self.log('Rain Intensity: {}'.format(float(precip_intensity)))
precip_type = self.get_state(self.precip_type_sensor)
self.log('Precip Type: {}'.format(precip_type))
if ((precip_propability != None) and (precip_propability != '') and (float(precip_propability) < self.propability_minimum) and (precip_intensity != None) and (precip_intensity != '') and (float(precip_intensity) < self.intensity_minimum)):
self.turn_off(self.reminder_acknowledged_entity)
self.log('Setting reminder_acknowledged to: {}'.format('off'))
self.log('Reminding user')
keyboard = [[('Hab ich gemacht', self.keyboard_callback)]]
self.call_service('telegram_bot/send_message', target=self.user_id, message=self.message.format(precip_propability), inline_keyboard=keyboard)
else:
self.turn_on(self.reminder_acknowledged_entity)
self.log('Setting reminder_acknowledged to: {}'.format('off'))
self.log('Notifying user')
self.notifier.notify(self.notify_name, self.message_not_needed.format(precip_propability, precip_intensity))
def run_evening_callback(self, kwargs):
if (self.get_state(self.app_switch) == 'on'):
if (self.get_state(self.reminder_acknowledged_entity) == 'off'):
self.log('Reminding user')
self.call_service(('notify/' + self.notify_name), message=self.message_evening)
def receive_telegram_callback(self, event_name, data, kwargs):
assert (event_name == 'telegram_callback')
data_callback = data['data']
callback_id = data['id']
chat_id = data['chat_id']
message_id = data['message']['message_id']
text = data['message']['text']
self.log('callback data: {}'.format(data), level='DEBUG')
if (data_callback == self.keyboard_callback):
self.call_service('telegram_bot/answer_callback_query', message='Super!', callback_query_id=callback_id)
self.turn_on(self.reminder_acknowledged_entity)
self.log('Setting reminder_acknowledged to: {}'.format('on'))
self.call_service('telegram_bot/edit_message', chat_id=chat_id, message_id=message_id, message=(text + ' Hast du um {}:{} erledigt.'.format(datetime.datetime.now().hour, datetime.datetime.now().minute)), inline_keyboard=[])
def terminate(self):
for timer_handle in self.timer_handle_list:
self.cancel_timer(timer_handle)
for listen_event_handle in self.listen_event_handle_list:
self.cancel_listen_event(listen_event_handle)
for listen_state_handle in self.listen_state_handle_list:
self.cancel_listen_state(listen_state_handle) |
class EventT(Generic[T], AsyncContextManager):
app: _AppT
key: K
value: V
headers: Mapping
message: Message
acked: bool
__slots__ = ('app', 'key', 'value', 'headers', 'message', 'acked')
def __init__(self, app: _AppT, key: K, value: V, headers: Optional[HeadersArg], message: Message) -> None:
...
async def send(self, channel: Union[(str, _ChannelT)], key: K=None, value: V=None, partition: Optional[int]=None, timestamp: Optional[float]=None, headers: HeadersArg=None, schema: Optional[_SchemaT]=None, key_serializer: CodecArg=None, value_serializer: CodecArg=None, callback: Optional[MessageSentCallback]=None, force: bool=False) -> Awaitable[RecordMetadata]:
...
async def forward(self, channel: Union[(str, _ChannelT)], key: Any=None, value: Any=None, partition: Optional[int]=None, timestamp: Optional[float]=None, headers: HeadersArg=None, schema: Optional[_SchemaT]=None, key_serializer: CodecArg=None, value_serializer: CodecArg=None, callback: Optional[MessageSentCallback]=None, force: bool=False) -> Awaitable[RecordMetadata]:
...
def ack(self) -> bool:
... |
.skip
.django_db
def test_award_category_endpoint(client, award_spending_data):
resp = client.get('/api/v2/award_spending/award_category/?fiscal_year=2017&awarding_agency_id=111')
assert (resp.status_code == status.HTTP_200_OK)
assert (len(resp.data['results']) == 3)
assert (float(resp.data['results'][0]['obligated_amount']) == 40)
resp = client.get('/api/v2/award_spending/award_category/')
assert (resp.status_code == status.HTTP_400_BAD_REQUEST) |
def get_number_of_almost_duplicated_columns(dataset: pd.DataFrame, threshold: float) -> int:
result = 0
all_rows = dataset.shape[0]
checked_columns = set()
for column_name_1 in dataset.columns:
checked_columns.add(column_name_1)
for column_name_2 in dataset.columns:
if (column_name_2 in checked_columns):
continue
if (dataset[column_name_1].dtype.name != dataset[column_name_2].dtype.name):
continue
if (isinstance(dataset[column_name_1].dtype, pd.CategoricalDtype) and isinstance(dataset[column_name_2].dtype, pd.CategoricalDtype)):
if (dataset[column_name_1].cat.categories.tolist() != dataset[column_name_2].cat.categories.tolist()):
continue
score = (dataset[column_name_1].eq(dataset[column_name_2]).sum() / all_rows)
if (score >= threshold):
result += 1
return result |
def main():
commands = {'file': {}, 'eat': {'breakfast', 'dinner', 'lunch', 'snack'}, 'play': {'cards', 'chess', 'go'}, 'walk': {'left', 'right', 'straight'}}
readline.parse_and_bind('tab: complete')
readline.set_completer(make_subcommand_completer(commands))
readline.set_completer_delims(' \t\n"\\\'`$><=;|&{(')
try:
while True:
s = input('>> ').strip()
print('[{0}]'.format(s))
except (EOFError, KeyboardInterrupt) as e:
print('\nShutting down...') |
(scope='session')
def setup_install_nftables():
shellexec('yum install -y nftables')
shellexec('nft delete table inet filter')
shellexec('nft create table inet filter')
shellexec('nft create chain inet filter input { type filter hook input priority 0 \\; }')
shellexec('nft create chain inet filter forward { type filter hook forward priority 0 \\; }')
shellexec('nft create chain inet filter output { type filter hook output priority 0 \\; }')
(yield None) |
class OptionSeriesGaugeTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionSeriesGaugeTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionSeriesGaugeTooltipDatetimelabelformats)
def distance(self):
return self._config_get(16)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get('')
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get(None)
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
def test_fence_from_almost_vertical_polygon():
pol = Polygons()
mypoly = {pol.xname: [0.1, 0.2, 0.3], pol.yname: [100.1, 100.2, 100.3], pol.zname: [0, 50, 60], pol.pname: [1, 1, 1]}
pol.dataframe = pd.DataFrame(mypoly)
fence = pol.get_fence(distance=10, nextend=1, name='SOMENAME', asnumpy=False, atleast=3)
assert (len(fence.dataframe) == 145)
assert (fence.dataframe.H_DELTALEN.mean() == pytest.approx(0.1414, abs=0.01))
assert (fence.dataframe.H_DELTALEN.std() <= 0.001)
assert (fence.dataframe.H_CUMLEN.max() == pytest.approx(10.0, abs=0.5))
assert (fence.dataframe.H_CUMLEN.min() == pytest.approx((- 10.0), abs=0.5)) |
class PhantomSet(sublime.PhantomSet):
def __init__(self, view, key=''):
super().__init__(view, key)
def __del__(self):
for p in self.phantoms:
erase_phantom_by_id(self.view, p.id)
def update(self, new_phantoms):
regions = query_phantoms(self.view, [p.id for p in self.phantoms])
for i in range(len(regions)):
self.phantoms[i].region = regions[i]
count = 0
for p in new_phantoms:
if (not isinstance(p, Phantom)):
p = Phantom(p.region, p.content, p.layout, md=False, css=None, on_navigate=p.on_navigate, wrapper_class=None, template_vars=None, template_env_options=None)
new_phantoms[count] = p
try:
idx = self.phantoms.index(p)
p.id = self.phantoms[idx].id
except ValueError:
p.id = add_phantom(self.view, self.key, p.region, p.content, p.layout, p.md, p.css, p.on_navigate, p.wrapper_class, p.template_vars, p.template_env_options)
count += 1
for p in self.phantoms:
if ((p not in new_phantoms) and (p.region != sublime.Region((- 1)))):
erase_phantom_by_id(self.view, p.id)
self.phantoms = new_phantoms |
class OneOf(PythonType):
values: typing.Sequence
def __init__(self, values):
self.values = values
def __le__(self, other):
if isinstance(other, OneOf):
return (set(self.values) <= set(other.values))
elif isinstance(other, PythonType):
try:
for v in self.values:
other.validate_instance(v)
except TypeMismatchError:
return False
return True
return NotImplemented
def __ge__(self, other):
if isinstance(other, OneOf):
return (set(self.values) >= set(other.values))
elif isinstance(other, PythonType):
return False
return NotImplemented
def validate_instance(self, obj, sampler=None):
tok = cv_type_checking.set(True)
try:
if (obj not in self.values):
raise TypeMismatchError(obj, self)
finally:
cv_type_checking.reset(tok)
def __repr__(self):
return ('Literal[%s]' % ', '.join(map(repr, self.values)))
def cast_from(self, obj):
if (obj not in self.values):
raise TypeMismatchError(obj, self) |
class TestDialogues(BaseSkillTestCase):
path_to_skill = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'tac_negotiation')
def setup(cls):
super().setup()
cls.fipa_dialogues = cast(FipaDialogues, cls._skill.skill_context.fipa_dialogues)
cls.contract_api_dialogues = cast(ContractApiDialogues, cls._skill.skill_context.contract_api_dialogues)
cls.cosm_trade_dialogues = cast(CosmTradeDialogues, cls._skill.skill_context.cosm_trade_dialogues)
cls.default_dialogues = cast(DefaultDialogues, cls._skill.skill_context.default_dialogues)
cls.signing_dialogues = cast(SigningDialogues, cls._skill.skill_context.signing_dialogues)
cls.ledger_api_dialogues = cast(LedgerApiDialogues, cls._skill.skill_context.ledger_api_dialogues)
cls.oef_search_dialogues = cast(OefSearchDialogues, cls._skill.skill_context.oef_search_dialogues)
cls.query = Query([Constraint('some_attribute', ConstraintType('==', 'some_service'))], DataModel(SUPPLY_DATAMODEL_NAME, [Attribute('some_attribute', str, False, 'Some attribute descriptions.')]))
def test_fipa_dialogue(self):
fipa_dialogue = FipaDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=FipaDialogue.Role.BUYER)
with pytest.raises(ValueError, match='counterparty_signature not set!'):
assert fipa_dialogue.counterparty_signature
fipa_dialogue.counterparty_signature = 'some_counterparty_signature'
with pytest.raises(AEAEnforceError, match='counterparty_signature already set!'):
fipa_dialogue.counterparty_signature = 'some_other_counterparty_signature'
assert (fipa_dialogue.counterparty_signature == 'some_counterparty_signature')
with pytest.raises(ValueError, match='Proposal not set!'):
assert fipa_dialogue.proposal
description = Description({'foo1': 1, 'bar1': 2})
fipa_dialogue.proposal = description
with pytest.raises(AEAEnforceError, match='Proposal already set!'):
fipa_dialogue.proposal = description
assert (fipa_dialogue.proposal == description)
with pytest.raises(ValueError, match='Terms not set!'):
assert fipa_dialogue.terms
terms = Terms('some_ledger_id', self.skill.skill_context.agent_address, 'counterprty', {'currency_id': 50}, {'good_id': (- 10)}, 'some_nonce')
fipa_dialogue.terms = terms
with pytest.raises(AEAEnforceError, match='Terms already set!'):
fipa_dialogue.terms = terms
assert (fipa_dialogue.terms == terms)
def test_fipa_dialogues(self):
(_, dialogue) = self.fipa_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=FipaMessage.Performative.CFP, query=self.query)
assert (dialogue.role == FipaDialogue.Role.BUYER)
assert (dialogue.self_address == self.skill.skill_context.agent_address)
def test_contract_api_dialogue(self):
contract_api_dialogue = ContractApiDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=ContractApiDialogue.Role.AGENT)
with pytest.raises(ValueError, match='associated_fipa_dialogue not set!'):
assert contract_api_dialogue.associated_fipa_dialogue
fipa_dialogue = FipaDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=FipaDialogue.Role.BUYER)
contract_api_dialogue.associated_fipa_dialogue = fipa_dialogue
with pytest.raises(AEAEnforceError, match='associated_fipa_dialogue already set!'):
contract_api_dialogue.associated_fipa_dialogue = fipa_dialogue
assert (contract_api_dialogue.associated_fipa_dialogue == fipa_dialogue)
def test_contract_api_dialogues(self):
(_, dialogue) = self.contract_api_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=ContractApiMessage.Performative.GET_DEPLOY_TRANSACTION, ledger_id='some_ledger_id', contract_id='some_contract_id', callable='some_callable', kwargs=Kwargs({'some_key': 'some_value'}))
assert (dialogue.role == ContractApiDialogue.Role.AGENT)
assert (dialogue.self_address == str(self.skill.skill_context.skill_id))
def test_cosm_trade_dialogues(self):
(_, dialogue) = self.cosm_trade_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=CosmTradeMessage.Performative.INFORM_PUBLIC_KEY, public_key='some_public_key')
assert (dialogue.role == CosmTradeDialogue.Role.AGENT)
assert (dialogue.self_address == self.skill.skill_context.agent_address)
def test_default_dialogues(self):
(_, dialogue) = self.default_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=DefaultMessage.Performative.BYTES, content=b'some_content')
assert (dialogue.role == DefaultDialogue.Role.AGENT)
assert (dialogue.self_address == self.skill.skill_context.agent_address)
def test_ledger_api_dialogue(self):
ledger_api_dialogue = LedgerApiDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=ContractApiDialogue.Role.AGENT)
with pytest.raises(ValueError, match='Associated signing dialogue not set!'):
assert ledger_api_dialogue.associated_signing_dialogue
signing_dialogue = SigningDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=SigningDialogue.Role.SKILL)
ledger_api_dialogue.associated_signing_dialogue = signing_dialogue
with pytest.raises(AEAEnforceError, match='Associated signing dialogue already set!'):
ledger_api_dialogue.associated_signing_dialogue = signing_dialogue
assert (ledger_api_dialogue.associated_signing_dialogue == signing_dialogue)
def test_ledger_api_dialogues(self):
(_, dialogue) = self.ledger_api_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=LedgerApiMessage.Performative.GET_BALANCE, ledger_id='some_ledger_id', address='some_address')
assert (dialogue.role == LedgerApiDialogue.Role.AGENT)
assert (dialogue.self_address == str(self.skill.skill_context.skill_id))
def test_oef_search_dialogue(self):
oef_search_dialogue = OefSearchDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=ContractApiDialogue.Role.AGENT)
with pytest.raises(ValueError, match='is_seller_search not set!'):
assert oef_search_dialogue.is_seller_search
oef_search_dialogue.is_seller_search = True
with pytest.raises(AEAEnforceError, match='is_seller_search already set!'):
oef_search_dialogue.is_seller_search = False
assert (oef_search_dialogue.is_seller_search is True)
def test_oef_search_dialogues(self):
(_, dialogue) = self.oef_search_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=OefSearchMessage.Performative.SEARCH_SERVICES, query=self.query)
assert (dialogue.role == OefSearchDialogue.Role.AGENT)
assert (dialogue.self_address == str(self.skill.skill_context.skill_id))
def test_signing_dialogue(self):
signing_dialogue = SigningDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=ContractApiDialogue.Role.AGENT)
with pytest.raises(ValueError, match='associated_fipa_dialogue not set!'):
assert signing_dialogue.associated_fipa_dialogue
fipa_dialogue = FipaDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=FipaDialogue.Role.BUYER)
signing_dialogue.associated_fipa_dialogue = fipa_dialogue
with pytest.raises(AEAEnforceError, match='associated_fipa_dialogue already set!'):
signing_dialogue.associated_fipa_dialogue = fipa_dialogue
assert (signing_dialogue.associated_fipa_dialogue == fipa_dialogue)
cosm_trade_dialogue = CosmTradeDialogue(DialogueLabel(('', ''), COUNTERPARTY_AGENT_ADDRESS, self.skill.skill_context.agent_address), self.skill.skill_context.agent_address, role=CosmTradeDialogue.Role.AGENT)
signing_dialogue.associated_cosm_trade_dialogue = cosm_trade_dialogue
with pytest.raises(AEAEnforceError, match='associated_cosm_trade_dialogue already set!'):
signing_dialogue.associated_cosm_trade_dialogue = cosm_trade_dialogue
assert (signing_dialogue.associated_cosm_trade_dialogue == cosm_trade_dialogue)
def test_signing_dialogues(self):
(_, dialogue) = self.signing_dialogues.create(counterparty=COUNTERPARTY_AGENT_ADDRESS, performative=SigningMessage.Performative.SIGN_TRANSACTION, terms=Terms('some_ledger_id', 'some_sender_address', 'some_counterparty_address', dict(), dict(), 'some_nonce'), raw_transaction=RawTransaction('some_ledger_id', {'some_key': 'some_value'}))
assert (dialogue.role == SigningDialogue.Role.SKILL)
assert (dialogue.self_address == str(self.skill.skill_context.skill_id)) |
class OptionPlotoptionsVariablepieSonificationContexttracksMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsVariablepieSonificationContexttracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsVariablepieSonificationContexttracksMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsVariablepieSonificationContexttracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsVariablepieSonificationContexttracksMappingHighpassResonance) |
def parse_esysinfo(stdout):
x = stdout.split('||||')
num_items = len(x)
sysinfo = json.loads(x[0])
img_sources = _parse_imgadm_sources(json.loads(x[1]))
diskinfo = {}
zpools = {}
config = x[6].strip()
sshkey = x[7].strip()
nictags = []
overlay_rules = {}
img_initial_raw = x[8].strip()
if img_initial_raw:
try:
img_initial = json.loads(img_initial_raw)
except ValueError as exc:
logger.exception(exc)
img_initial = None
else:
img_initial = None
for i in x[2].strip().splitlines():
disk = i.split()
diskinfo[disk[1]] = {'type': disk[0], 'VID': disk[2], 'PID': ' '.join(disk[3:(- 3)]), 'size': int((t_long(disk[(- 3)]) / 1048576)), 'RMV': disk[(- 2)], 'SSD': disk[(- 1)]}
for i in x[3].strip().splitlines():
(pool, size) = map(str.strip, str(i).split())
zpools[pool] = {'size': int((t_long(size) / 1048576))}
for i in x[4].strip().splitlines():
(pool, used, avail) = map(str.strip, str(i).split())
zpools[pool]['size'] = int(((t_long(used) + t_long(avail)) / 1048576))
for i in re.split('\\s*pool: ', x[5].strip()):
i = i.strip()
if i:
(pool, data) = _parse_zpool_status(i)
zpools[pool]['config'] = data
if (num_items >= 10):
for i in x[9].strip().splitlines():
(name, mac, link, typ) = map((lambda c: (None if (c == '-') else c)), map(str.strip, str(i).split('|')))
nictags.append({'name': name, 'mac': mac, 'link': link, 'type': typ})
if (num_items >= 11):
overlay_rules_raw = x[10].strip()
if overlay_rules_raw:
try:
rules = json.loads(overlay_rules_raw)
except ValueError as exc:
logger.exception(exc)
else:
overlay_rules = _parse_overlay_rules(rules)
return {'sysinfo': sysinfo, 'diskinfo': diskinfo, 'zpools': zpools, 'config': config, 'sshkey': sshkey, 'img_sources': img_sources, 'img_initial': img_initial, 'nictags': nictags, 'overlay_rules': overlay_rules} |
def linefunc(P1: Optimized_Point3D[Optimized_Field], P2: Optimized_Point3D[Optimized_Field], T: Optimized_Point3D[Optimized_Field]) -> Optimized_Point2D[Optimized_Field]:
zero = P1[0].zero()
(x1, y1, z1) = P1
(x2, y2, z2) = P2
(xt, yt, zt) = T
m_numerator = ((y2 * z1) - (y1 * z2))
m_denominator = ((x2 * z1) - (x1 * z2))
if (m_denominator != zero):
return (((m_numerator * ((xt * z1) - (x1 * zt))) - (m_denominator * ((yt * z1) - (y1 * zt)))), ((m_denominator * zt) * z1))
elif (m_numerator == zero):
m_numerator = ((3 * x1) * x1)
m_denominator = ((2 * y1) * z1)
return (((m_numerator * ((xt * z1) - (x1 * zt))) - (m_denominator * ((yt * z1) - (y1 * zt)))), ((m_denominator * zt) * z1))
else:
return (((xt * z1) - (x1 * zt)), (z1 * zt)) |
class RuleManager():
def __init__(self):
self.rules = WebMirror.rules.load_rules()
self.global_bad = [tmp.lower() for tmp in common.global_constants.GLOBAL_BAD_URLS]
self.nl_badwords_map = {}
for ruleset in [rules for rules in self.rules if rules['netlocs']]:
for netloc in ruleset['netlocs']:
badwords = (self.global_bad + ruleset['badwords'])
badwords = [badword for badword in badwords if badword]
badwords = [badword.lower() for badword in badwords]
badwords = list(set(badwords))
self.nl_badwords_map[netloc] = badwords
def is_bad(self, netloc, url):
if (netloc in self.nl_badwords_map):
if any([(badword in url) for badword in self.nl_badwords_map[netloc]]):
return True
return False
else:
return any([(badword in url) for badword in self.global_bad]) |
.parametrize('seed,path,key', [('a0b0c0d0e0f', 'm', 'e8f32e723decf4051aefac8e2c93c9c5bcdb01a1494b917c8436b35'), ('a0b0c0d0e0f', 'm/0H', 'edb2e14f9ee77d26dd93b4ecede8d16ed408ce149b6cd80b0715a2d911a0afea'), ('a0b0c0d0e0f', 'm/0H/1', '3c6cb8d0f6a264c91ea8b5030fadaa8e538b020f0a387421a12de9319dc93368'), ('a0b0c0d0e0f', 'm/0H/1/2H', 'cbce0d719ecf7431d88e6a89fa1483e02e35092af60c042b1df2ff59fa424dca'), ('a0b0c0d0e0f', 'm/0H/1/2H/2', '0f479245fb19a38a1954c5c7c0ebab2f9bdfd96a17563ef28a6a4b1a2a764ef4'), ('a0b0c0d0e0f', 'm/0H/1/2H/2/', '471b76e389e528d6de6d816857e012c5455051cad6660850e58372a6c3e6e7c8'), ('fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9cd8a8784817e7b7875726f6cd5a5754514e4b484542', 'm', '4b03d6fc340455b363f51020ad3ecca4f0850280cf436c70c727923f6db46c3e'), ('fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9cd8a8784817e7b7875726f6cd5a5754514e4b484542', 'm/0', 'abe74a98f6c7eabee0428f53798f0ab8aa1bdc742f15ac7e1e'), ('fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9cd8a8784817e7b7875726f6cd5a5754514e4b484542', 'm/0/H', '877c779ad9687164e9c2f4f0f4ffce95a58fe18fd52e6e93'), ('fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9cd8a8784817e7b7875726f6cd5a5754514e4b484542', 'm/0/H/1', '704addf544a06e5ee4beac23613da32020d604506da8c0518e1da4b7'), ('fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9cd8a8784817e7b7875726f6cd5a5754514e4b484542', 'm/0/H/1/H', 'f1c7c871a54a804afe328b4c83a1c33b8e5ff48f5087273f04efa83b247d6a2d'), ('fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9cd8a8784817e7b7875726f6cd5a5754514e4b484542', 'm/0/H/1/H/2', 'bb7d39bdb83ecf58f2fd82b6d918341cbef428661ef01ab97c28a4842125ac23'), ('4bbe4423346c643850da4b320e46a87ae3d2a4e6da11eba819cd4acba45d239319ac14f863b8d5ab5a0d0c64d2e8a1e7d1457df2e5a3c51c73235be', 'm', '00ddb80b067e0d4993197fe10f2657a844ad56f0c629c81aae32'), ('4bbe4423346c643850da4b320e46a87ae3d2a4e6da11eba819cd4acba45d239319ac14f863b8d5ab5a0d0c64d2e8a1e7d1457df2e5a3c51c73235be', 'm/0H', '491f7a2eebc7b57028e0d3faa0acda02e75c33b03c48fb288c41e2ea44e1daef')])
def test_bip32_testvectors(seed, path, key):
assert (HDPath(path).derive(bytes.fromhex(seed)).hex() == key) |
def _format_list(x):
if isinstance(x, (list, tuple)):
if isinstance(x[0], datetime.datetime):
is_regular = True
delta = (x[1] - x[0])
for (prev, current) in zip(x[:(- 1)], x[1:]):
if ((current - prev) != delta):
is_regular = False
break
if is_regular:
return f'{_format_list(x[0])}/to/{_format_list(x[(- 1)])}/by/{(delta.total_seconds() / 3600)}'
txt = '/'.join((_format_list(_) for _ in x))
if (len(txt) > 200):
txt = ((txt[:50] + '...') + txt[(- 50):])
return txt
if isinstance(x, datetime.datetime):
return x.strftime('%Y-%m-%d.%H:%M')
return str(x) |
class TestMap3(_MapTest):
map_data = {'map': MAP3, 'zcoord': 'map3'}
map_display = MAP3_DISPLAY
def test_str_output(self):
stripped_map = '\n'.join((line.rstrip() for line in str(self.map).split('\n')))
self.assertEqual(MAP3_DISPLAY, stripped_map.replace('||', '|'))
([((0, 0), (1, 0), ()), ((2, 0), (5, 0), ('e', 'e')), ((0, 0), (1, 1), ('ne',)), ((4, 1), (4, 3), ('nw', 'ne')), ((4, 1), (4, 3), ('nw', 'ne')), ((2, 2), (3, 5), ('nw', 'ne')), ((2, 2), (1, 5), ('nw', 'n', 'n')), ((5, 5), (0, 0), ('sw', 's', 'sw', 'w', 'sw', 'sw')), ((5, 5), (0, 0), ('sw', 's', 'sw', 'w', 'sw', 'sw')), ((5, 2), (1, 2), ('sw', 'nw', 'w', 'nw', 's')), ((4, 1), (1, 1), ('s', 'w', 'nw'))])
def test_shortest_path(self, startcoord, endcoord, expected_directions):
(directions, _) = self.map.get_shortest_path(startcoord, endcoord)
self.assertEqual(expected_directions, tuple(directions))
([((2, 2), 2, None, ' # \n / \n # / \n |/ \n # #\n |\\ / \n # -# \n |/ \\ \n # #\n / \\ \n# # '), ((5, 2), 2, None, ' # \n | \n # \n / \\ \n# \n \\ / \n # \n | \n # ')])
def test_get_visual_range__nodes__character(self, coord, dist, max_size, expected):
mapstr = self.map.get_visual_range(coord, dist=dist, mode='nodes', character='', max_size=max_size)
self.assertEqual(expected, mapstr.replace('||', '|'))
def test_spawn(self):
self.grid.spawn()
self.assertEqual(xyzroom.XYZRoom.objects.all().count(), 18)
self.assertEqual(xyzroom.XYZExit.objects.all().count(), 44) |
def add_numba_comments(code):
mod = parse(code)
new_body = [CommentLine('# __protected__ from numba import njit')]
for node in mod.body:
if isinstance(node, gast.FunctionDef):
new_body.append(CommentLine('# __protected__ (cache=True, fastmath=True)'))
new_body.append(node)
mod.body = new_body
return format_str(unparse(mod)) |
def _getAnchoredTemplate(template, wrap=(lambda s: ('{^LN-BEG}' + s))):
name = wrap(template.name)
template2 = DD_patternCache.get(name)
if (not template2):
regex = wrap(getattr(template, 'pattern', template.regex))
if hasattr(template, 'pattern'):
template2 = DD_patternCache.get(regex)
if (not template2):
if (not hasattr(template, 'pattern')):
template2 = _getPatternTemplate(name)
else:
template2 = _getPatternTemplate(regex)
return template2 |
class SaaSConnector(BaseConnector[AuthenticatedClient]):
def __init__(self, configuration: ConnectionConfig):
super().__init__(configuration)
required_saas_config = configuration.get_saas_config()
assert (required_saas_config is not None)
self.saas_config = required_saas_config
self.endpoints = self.saas_config.top_level_endpoint_dict
self.secrets = cast(Dict, configuration.secrets)
self.current_collection_name: Optional[str] = None
self.current_privacy_request: Optional[PrivacyRequest] = None
self.current_saas_request: Optional[SaaSRequest] = None
def query_config(self, node: TraversalNode) -> SaaSQueryConfig:
privacy_request = self.current_privacy_request
assert (privacy_request is not None)
return SaaSQueryConfig(node, self.endpoints, self.secrets, self.saas_config.data_protection_request, privacy_request)
def get_client_config(self) -> ClientConfig:
saas_config_client_config = self.saas_config.client_config
required_current_saas_request = self.current_saas_request
assert (required_current_saas_request is not None)
current_request_client_config = required_current_saas_request.client_config
return (current_request_client_config or saas_config_client_config)
def get_rate_limit_config(self) -> Optional[RateLimitConfig]:
saas_config_rate_limit_config = self.saas_config.rate_limit_config
required_current_saas_request = self.current_saas_request
assert (required_current_saas_request is not None)
current_request_rate_limit_config = required_current_saas_request.rate_limit_config
return (current_request_rate_limit_config or saas_config_rate_limit_config or None)
def set_privacy_request_state(self, privacy_request: PrivacyRequest, node: TraversalNode) -> None:
self.current_collection_name = node.address.collection
self.current_privacy_request = privacy_request
def set_saas_request_state(self, current_saas_request: SaaSRequest) -> None:
self.current_saas_request = current_saas_request
def unset_connector_state(self) -> None:
self.current_collection_name = None
self.current_privacy_request = None
self.current_saas_request = None
def test_connection(self) -> Optional[ConnectionTestStatus]:
test_request: SaaSRequest = self.saas_config.test_request
self.set_saas_request_state(test_request)
prepared_request = map_param_values('test', f'{self.configuration.name}', test_request, self.secrets)
client: AuthenticatedClient = self.create_client()
client.send(prepared_request, test_request.ignore_errors)
self.unset_connector_state()
return ConnectionTestStatus.succeeded
def build_uri(self) -> str:
client_config = self.get_client_config()
host = client_config.host
return f'{client_config.protocol}://{assign_placeholders(host, self.secrets)}'
def create_client(self) -> AuthenticatedClient:
uri = self.build_uri()
client_config = self.get_client_config()
rate_limit_config = self.get_rate_limit_config()
logger.info('Creating client to {}', uri)
return AuthenticatedClient(uri, self.configuration, client_config, rate_limit_config)
def retrieve_data(self, node: TraversalNode, policy: Policy, privacy_request: PrivacyRequest, input_data: Dict[(str, List[Any])]) -> List[Row]:
self.set_privacy_request_state(privacy_request, node)
query_config: SaaSQueryConfig = self.query_config(node)
read_requests: List[SaaSRequest] = query_config.get_read_requests_by_identity()
delete_request: Optional[SaaSRequest] = query_config.get_erasure_request_by_action('delete')
if (not read_requests):
if delete_request:
logger.info("Skipping read for the '{}' collection, it is delete-only", self.current_collection_name)
return [{}]
raise FidesopsException(f"The 'read' action is not defined for the '{self.current_collection_name}' endpoint in {self.saas_config.fides_key}")
custom_privacy_request_fields = privacy_request.get_cached_custom_privacy_request_fields()
if custom_privacy_request_fields:
input_data[CUSTOM_PRIVACY_REQUEST_FIELDS] = [custom_privacy_request_fields]
rows: List[Row] = []
for read_request in read_requests:
self.set_saas_request_state(read_request)
if self._missing_dataset_reference_values(input_data, read_request.param_values):
return []
if read_request.request_override:
return self._invoke_read_request_override(read_request.request_override, self.create_client(), policy, privacy_request, node, input_data, self.secrets)
prepared_requests: List[SaaSRequestParams] = query_config.generate_requests(input_data, policy, read_request)
for next_request in prepared_requests:
while next_request:
(processed_rows, next_request) = self.execute_prepared_request(next_request, privacy_request.get_cached_identity_data(), read_request)
rows.extend(processed_rows)
self.unset_connector_state()
return rows
def _missing_dataset_reference_values(self, input_data: Dict[(str, Any)], param_values: Optional[List[ParamValue]]) -> List[str]:
required_param_value_references = [param_value.name for param_value in (param_values or []) if param_value.references]
provided_input_keys = ((list(input_data.get('fidesops_grouped_inputs')[0].keys()) if input_data.get('fidesops_grouped_inputs') else []) + list(input_data.keys()))
missing_dataset_reference_values = list((set(required_param_value_references) - set(provided_input_keys)))
if missing_dataset_reference_values:
logger.info("The '{}' request of {} is missing the following dataset reference values [{}], skipping traversal", self.current_collection_name, self.saas_config.fides_key, ', '.join(missing_dataset_reference_values))
return missing_dataset_reference_values
def execute_prepared_request(self, prepared_request: SaaSRequestParams, identity_data: Dict[(str, Any)], saas_request: SaaSRequest) -> Tuple[(List[Row], Optional[SaaSRequestParams])]:
client: AuthenticatedClient = self.create_client()
response: Response = client.send(prepared_request, saas_request.ignore_errors)
response = self._handle_errored_response(saas_request, response)
response_data = self._unwrap_response_data(saas_request, response)
rows = self.process_response_data(response_data, identity_data, cast(Optional[List[PostProcessorStrategy]], saas_request.postprocessors))
logger.info("{} row(s) returned after postprocessing '{}' collection.", len(rows), self.current_collection_name)
next_request = None
if saas_request.pagination:
strategy: PaginationStrategy = PaginationStrategy.get_strategy(saas_request.pagination.strategy, saas_request.pagination.configuration)
next_request = strategy.get_next_request(prepared_request, self.secrets, response, saas_request.data_path)
if next_request:
logger.info("Using '{}' pagination strategy to get next page for '{}'.", saas_request.pagination.strategy, self.current_collection_name)
return (rows, next_request)
def process_response_data(self, response_data: Union[(List[Dict[(str, Any)]], Dict[(str, Any)])], identity_data: Dict[(str, Any)], postprocessors: Optional[List[PostProcessorStrategy]]) -> List[Row]:
rows: List[Row] = []
processed_data = response_data
for postprocessor in (postprocessors or []):
strategy: PostProcessorStrategy = PostProcessorStrategy.get_strategy(postprocessor.strategy, postprocessor.configuration)
logger.info("Starting postprocessing of '{}' collection with '{}' strategy.", self.current_collection_name, postprocessor.strategy)
try:
processed_data = strategy.process(processed_data, identity_data)
except Exception as exc:
raise PostProcessingException(f"Exception occurred during the '{postprocessor.strategy}' postprocessor on the '{self.current_collection_name}' collection: {exc}")
if (not processed_data):
return rows
if isinstance(processed_data, list):
if (not all((isinstance(item, dict) for item in processed_data))):
raise PostProcessingException('The list returned after postprocessing did not contain elements of the same type.')
rows.extend(processed_data)
elif isinstance(processed_data, dict):
rows.append(processed_data)
else:
raise PostProcessingException(f"Not enough information to continue processing. The result of postprocessing must be an dict or a list of dicts, found value of '{processed_data}'")
return rows
def mask_data(self, node: TraversalNode, policy: Policy, privacy_request: PrivacyRequest, rows: List[Row], input_data: Dict[(str, List[Any])]) -> int:
self.set_privacy_request_state(privacy_request, node)
query_config = self.query_config(node)
masking_request = query_config.get_masking_request()
if (not masking_request):
raise Exception(f'Either no masking request configured or no valid masking request for {node.address.collection}. Check that MASKING_STRICT env var is appropriately set')
self.set_saas_request_state(masking_request)
if masking_request.request_override:
return self._invoke_masking_request_override(masking_request.request_override, self.create_client(), policy, privacy_request, rows, query_config, masking_request, self.secrets)
if (masking_request.data_path and rows):
unwrapped = []
for row in rows:
unwrapped.extend(pydash.get(row, masking_request.data_path))
rows = unwrapped
rows = self.process_response_data(rows, privacy_request.get_cached_identity_data(), cast(Optional[List[PostProcessorStrategy]], masking_request.postprocessors))
rows_updated = 0
client = self.create_client()
for row in rows:
try:
prepared_request = query_config.generate_update_stmt(row, policy, privacy_request)
except ValueError as exc:
if masking_request.skip_missing_param_values:
logger.info('Skipping optional masking request on node {}: {}', node.address.value, exc)
continue
raise exc
client.send(prepared_request, masking_request.ignore_errors)
rows_updated += 1
self.unset_connector_state()
return rows_updated
def relevant_consent_identities(matching_consent_requests: List[SaaSRequest], identity_data: Dict[(str, Any)]) -> Dict[(str, Any)]:
related_identities: Dict[(str, Any)] = {}
for consent_request in (matching_consent_requests or []):
for param_value in (consent_request.param_values or []):
if (not param_value.identity):
continue
identity_type: Optional[str] = param_value.identity
identity_value: Any = identity_data.get(param_value.identity)
if (identity_type and identity_value):
related_identities[identity_type] = identity_value
return related_identities
def run_consent_request(self, node: TraversalNode, policy: Policy, privacy_request: PrivacyRequest, identity_data: Dict[(str, Any)], session: Session) -> bool:
logger.info("Starting consent request for node: '{}'", node.address.value)
self.set_privacy_request_state(privacy_request, node)
query_config = self.query_config(node)
(should_opt_in, filtered_preferences) = should_opt_in_to_service(self.configuration.system, privacy_request)
if (should_opt_in is None):
logger.info('Skipping consent requests on node {}: No actionable consent preferences to propagate', node.address.value)
raise SkippingConsentPropagation(f'Skipping consent propagation for node {node.address.value} - no actionable consent preferences to propagate')
matching_consent_requests: List[SaaSRequest] = self._get_consent_requests_by_preference(should_opt_in)
query_config.action = ('opt_in' if should_opt_in else 'opt_out')
if (not matching_consent_requests):
logger.info("Skipping consent requests on node {}: No '{}' requests defined", node.address.value, query_config.action)
raise SkippingConsentPropagation(f"Skipping consent propagation for node {node.address.value} - No '{query_config.action}' requests defined.")
cache_initial_status_and_identities_for_consent_reporting(db=session, privacy_request=privacy_request, connection_config=self.configuration, relevant_preferences=filtered_preferences, relevant_user_identities=self.relevant_consent_identities(matching_consent_requests, identity_data))
fired: bool = False
for consent_request in matching_consent_requests:
self.set_saas_request_state(consent_request)
try:
prepared_request: SaaSRequestParams = query_config.generate_consent_stmt(policy, privacy_request, consent_request)
except ValueError as exc:
if consent_request.skip_missing_param_values:
logger.info('Skipping optional consent request on node {}: {}', node.address.value, exc)
continue
raise exc
client: AuthenticatedClient = self.create_client()
client.send(prepared_request)
fired = True
self.unset_connector_state()
if (not fired):
raise SkippingConsentPropagation('Missing needed values to propagate request.')
add_complete_system_status_for_consent_reporting(session, privacy_request, self.configuration)
return True
def close(self) -> None:
def _handle_errored_response(saas_request: SaaSRequest, response: Response) -> Response:
if (saas_request.ignore_errors and (not response.ok)):
logger.info('Ignoring and clearing errored response with status code {}.', response.status_code)
response = Response()
response._content = b'{}'
return response
def _unwrap_response_data(saas_request: SaaSRequest, response: Response) -> Any:
try:
return (pydash.get(response.json(), saas_request.data_path) if saas_request.data_path else response.json())
except JSONDecodeError:
raise FidesopsException(f'Unable to parse JSON response from {saas_request.path}')
def _invoke_read_request_override(override_function_name: str, client: AuthenticatedClient, policy: Policy, privacy_request: PrivacyRequest, node: TraversalNode, input_data: Dict[(str, List)], secrets: Any) -> List[Row]:
override_function: Callable[(..., Union[(List[Row], int)])] = SaaSRequestOverrideFactory.get_override(override_function_name, SaaSRequestType.READ)
try:
return override_function(client, node, policy, privacy_request, input_data, secrets)
except Exception as exc:
logger.error("Encountered error executing override access function '{}'", override_function_name, exc_info=True)
raise FidesopsException(str(exc))
def _invoke_masking_request_override(override_function_name: str, client: AuthenticatedClient, policy: Policy, privacy_request: PrivacyRequest, rows: List[Row], query_config: SaaSQueryConfig, masking_request: SaaSRequest, secrets: Any) -> int:
override_function: Callable[(..., Union[(List[Row], int)])] = SaaSRequestOverrideFactory.get_override(override_function_name, SaaSRequestType(query_config.action))
try:
update_param_values: List[Dict[(str, Any)]] = [query_config.generate_update_param_values(row, policy, privacy_request, masking_request) for row in rows]
return override_function(client, update_param_values, policy, privacy_request, secrets)
except Exception as exc:
logger.error("Encountered error executing override mask function '{}", override_function_name, exc_info=True)
raise FidesopsException(str(exc))
def _get_consent_requests_by_preference(self, opt_in: bool) -> List[SaaSRequest]:
consent_requests: Optional[ConsentRequestMap] = self.saas_config.consent_requests
if (not consent_requests):
return []
return (consent_requests.opt_in if opt_in else consent_requests.opt_out) |
class DatabaseBackendTestSQLLiteInMemory(DatabaseBackendTestCase, TestCase):
CONFIG = "\n configurationVersion: '1'\n logFile: /dev/stderr\n ios:\n - name: file\n module: file\n defaultStorage: s1\n storages:\n - name: s1\n storageId: 1\n module: file\n configuration:\n path: {testpath}/data\n databaseEngine: sqlite:// \n " |
class EmbeddingEngine():
def __init__(self, model_name, vector_store_config, knowledge_type: Optional[str]=KnowledgeType.DOCUMENT.value, knowledge_source: Optional[str]=None, source_reader: Optional=None, text_splitter: Optional[TextSplitter]=None, embedding_factory: EmbeddingFactory=None):
self.knowledge_source = knowledge_source
self.model_name = model_name
self.vector_store_config = vector_store_config
self.knowledge_type = knowledge_type
if (not embedding_factory):
embedding_factory = DefaultEmbeddingFactory()
self.embeddings = embedding_factory.create(model_name=self.model_name)
self.vector_store_config['embeddings'] = self.embeddings
self.source_reader = source_reader
self.text_splitter = text_splitter
def knowledge_embedding(self):
self.knowledge_embedding_client = self.init_knowledge_embedding()
self.knowledge_embedding_client.source_embedding()
def knowledge_embedding_batch(self, docs):
return self.knowledge_embedding_client.index_to_store(docs)
def read(self):
self.knowledge_embedding_client = self.init_knowledge_embedding()
return self.knowledge_embedding_client.read_batch()
def init_knowledge_embedding(self):
return get_knowledge_embedding(self.knowledge_type, self.knowledge_source, self.vector_store_config, self.source_reader, self.text_splitter)
def similar_search(self, text, topk):
vector_client = VectorStoreConnector(self.vector_store_config['vector_store_type'], self.vector_store_config)
ans = vector_client.similar_search(text, topk)
return ans
def similar_search_with_scores(self, text, topk, score_threshold: float=0.3):
vector_client = VectorStoreConnector(self.vector_store_config['vector_store_type'], self.vector_store_config)
ans = vector_client.similar_search_with_scores(text, topk, score_threshold)
return ans
def vector_exist(self):
vector_client = VectorStoreConnector(self.vector_store_config['vector_store_type'], self.vector_store_config)
return vector_client.vector_name_exists()
def delete_by_ids(self, ids):
vector_client = VectorStoreConnector(self.vector_store_config['vector_store_type'], self.vector_store_config)
vector_client.delete_by_ids(ids=ids) |
def fortios_firewall(data, fos, check_mode):
fos.do_member_operation('firewall', 'internet-service')
if data['firewall_internet_service']:
resp = firewall_internet_service(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'firewall_internet_service'))
if check_mode:
return resp
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
def sync_stocks(center=None, date=None):
if cint(frappe.db.get_single_value('Zenoti Settings', 'enable_zenoti')):
if (center or cint(frappe.db.get_single_value('Zenoti Settings', 'enable_auto_syncing'))):
check_perpetual_inventory_disabled()
error_logs = []
list_of_centers = ([center] if center else get_list_of_centers())
for row in list_of_centers:
center = frappe.get_doc('Zenoti Center', row)
process_stock_reconciliation(center, error_logs, date)
process_purchase_orders(center, error_logs, date)
if len(error_logs):
make_error_log(error_logs) |
def create_defaults_list(repo: IConfigRepository, config_name: Optional[str], overrides_list: List[Override], prepend_hydra: bool, skip_missing: bool) -> DefaultsList:
overrides = Overrides(repo=repo, overrides_list=overrides_list)
(defaults, tree) = _create_defaults_list(repo, config_name, overrides, prepend_hydra=prepend_hydra, skip_missing=skip_missing)
overrides.ensure_overrides_used()
overrides.ensure_deletions_used()
return DefaultsList(defaults=defaults, config_overrides=overrides.config_overrides, defaults_tree=tree, overrides=overrides) |
class DeviceEnv():
def __init__(self, env, from_device, to_device):
self.env = env
self.from_device = from_device
self.to_device = to_device
self.action_space = self.env.action_space
def reset(self, env_info=DictTensor({})):
assert (env_info.empty() or (env_info.device() == torch.device('cpu'))), 'env_info must be on CPU'
(o, e) = self.env.reset(env_info)
return (o.to(self.to_device), e.to(self.to_device))
def step(self, policy_output):
policy_output = policy_output.to(self.from_device)
((a, b), (c, d)) = self.env.step(policy_output)
return ((a.to(self.to_device), b.to(self.to_device)), (c.to(self.to_device), d.to(self.to_device)))
def close(self):
self.env.close()
def n_envs(self):
return self.env.n_envs() |
class CoreGenerator():
def __init__(self, width=384, height=384, channels=1, gpus=0):
self.width = width
self.height = height
self.input_channels = channels
self.channels = channels
self.gpus = gpus
core_generator_idea = Input(shape=(self.width, self.height, self.input_channels))
core_generator_idea_downsample = AvgPool2D(2, padding='same')(core_generator_idea)
core_generator_style = Input(shape=((self.width / (2 ** 7)), (self.height / (2 ** 7)), self.input_channels))
encoder = Conv2D_r(64, 7, 1, core_generator_idea_downsample)
encoder = InstanceNormalization(axis=(- 1))(encoder)
encoder = Activation('relu')(encoder)
encoder = Conv2D_r(128, 3, 2, encoder)
encoder = InstanceNormalization(axis=(- 1))(encoder)
encoder = Activation('relu')(encoder)
encoder = Conv2D_r(256, 3, 2, encoder)
encoder = InstanceNormalization(axis=(- 1))(encoder)
encoder = Activation('relu')(encoder)
encoder = Conv2D_r(512, 3, 2, encoder)
encoder = InstanceNormalization(axis=(- 1))(encoder)
encoder = Activation('relu')(encoder)
encoder = Conv2D_r(512, 3, 2, encoder)
encoder = InstanceNormalization(axis=(- 1))(encoder)
encoder = Activation('relu')(encoder)
style = Conv2D_r(128, 3, 1, core_generator_style)
style = InstanceNormalization(axis=(- 1))(style)
style = Activation('relu')(style)
style = UpSampling2D(2)(style)
style = Conv2D_r(256, 3, 1, style)
style = InstanceNormalization(axis=(- 1))(style)
style = Activation('relu')(style)
style = UpSampling2D(2)(style)
style = Conv2D_r(512, 3, 1, style)
style = InstanceNormalization(axis=(- 1))(style)
style = Activation('relu')(style)
style_and_idea = concatenate([encoder, style], axis=(- 1))
style_and_idea = Conv2D_r(1024, 3, 1, style_and_idea)
style_and_idea = InstanceNormalization(axis=(- 1))(style_and_idea)
style_and_idea = Activation('relu')(style_and_idea)
style_and_idea = Conv2D_r(512, 3, 1, style_and_idea)
style_and_idea = InstanceNormalization(axis=(- 1))(style_and_idea)
style_and_idea = Activation('relu')(style_and_idea)
def ResidualUnit(input_features):
output_features = Conv2D_r(512, 3, 1, input_features)
output_features = InstanceNormalization(axis=(- 1))(output_features)
output_features = Activation('relu')(output_features)
output_features = Conv2D_r(512, 3, 1, output_features)
output_features = InstanceNormalization(axis=(- 1))(output_features)
output_features = add([input_features, output_features])
output_features = Activation('relu')(output_features)
return output_features
resnet = ResidualUnit(style_and_idea)
resnet = ResidualUnit(resnet)
resnet = ResidualUnit(resnet)
resnet = ResidualUnit(resnet)
resnet = ResidualUnit(resnet)
resnet = ResidualUnit(resnet)
resnet = ResidualUnit(resnet)
resnet = ResidualUnit(resnet)
resnet = ResidualUnit(resnet)
decoder = UpSampling2D(2)(resnet)
decoder = Conv2D_r(512, 3, 1, decoder)
decoder = InstanceNormalization(axis=(- 1))(decoder)
decoder = Activation('relu')(decoder)
decoder = UpSampling2D(2)(decoder)
decoder = Conv2D_r(256, 3, 1, decoder)
decoder = InstanceNormalization(axis=(- 1))(decoder)
decoder = Activation('relu')(decoder)
decoder = UpSampling2D(2)(decoder)
decoder = Conv2D_r(128, 3, 1, decoder)
decoder = InstanceNormalization(axis=(- 1))(decoder)
decoder = Activation('relu')(decoder)
decoder = UpSampling2D(2)(decoder)
decoder = Conv2D_r(64, 3, 1, decoder)
features = Lambda((lambda x: x), name='core_features_org')(decoder)
decoder = InstanceNormalization(axis=(- 1))(decoder)
decoder = Activation('relu')(decoder)
decoder = Conv2D_r(channels, 7, 1, decoder)
picture_lowres = Activation('tanh')(decoder)
core_generator = Model([core_generator_idea, core_generator_style], [picture_lowres, features])
core_generator.name = 'core_generator'
if (self.gpus < 2):
self.model = core_generator
self.save_model = self.model
else:
self.save_model = core_generator
self.model = multi_gpu_model(self.save_model, gpus=gpus) |
def test_union():
assert _are_types_castable(str_type, str_or_int)
assert _are_types_castable(str_type, int_or_str)
assert (not _are_types_castable(str_or_int, str_type))
assert _are_types_castable(str_or_int, str_or_int)
assert _are_types_castable(int_or_str, str_or_int)
assert _are_types_castable(str_type, optional_str)
assert _are_types_castable(none_type, optional_str)
assert (not _are_types_castable(bool_type, optional_str)) |
def test_invoice_list_admin(db, client, admin_jwt, user):
invoice = get_invoice(db, user)
get_invoice(db, UserFactory())
response = client.get('/v1/event-invoices', content_type='application/vnd.api+json', headers=admin_jwt)
assert (response.status_code == 200)
response_dict = json.loads(response.data)
assert (len(response_dict['data']) == 2)
assert (response_dict['data'][0]['id'] == str(invoice.id)) |
class OptionPlotoptionsNetworkgraphSonificationDefaultinstrumentoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_required
_required
_required
_POST
def dc_form(request):
if (request.POST['action'] == 'create'):
dc = None
else:
dc = get_dc_or_404(request, request.POST['name'], api=False)
form = DcForm(request, dc, request.POST)
if form.is_valid():
status = form.save(args=(form.cleaned_data.get('name'),))
if (status == 204):
return HttpResponse(None, status=status)
elif (status in (200, 201)):
messages.success(request, _('Datacenter settings were successfully updated'))
return redirect('dc_list', query_string=request.GET)
return render(request, 'gui/dc/dc_form.html', {'form': form}) |
class InterComBackEndLogsTask(InterComListenerAndResponder):
CONNECTION_TYPE = 'logs_task'
OUTGOING_CONNECTION_TYPE = 'logs_task_resp'
def get_response(self, task):
backend_logs = Path(config.backend.logging.file_backend)
if backend_logs.is_file():
return backend_logs.read_text().splitlines()[(- 100):]
return [] |
class RandomizedSet():
def __init__(self):
self.nums = []
self.pos = {}
def insert(self, val: int) -> bool:
if (val not in self.pos):
index = len(self.nums)
self.pos[val] = index
self.nums.append(val)
return True
return False
def remove(self, val: int) -> bool:
if (val in self.pos):
index = self.pos[val]
tail = self.nums[(- 1)]
self.nums[index] = self.nums[(- 1)]
self.nums.pop()
self.pos[tail] = index
del self.pos[val]
return True
return False
def getRandom(self) -> int:
while True:
ch = random.choice(self.nums)
if (ch is not None):
return ch |
def extract_entity(text: str, e: types.MessageEntity) -> str:
offset = 0
start = 0
encoded_text = text.encode()
end = len(encoded_text)
i = 0
for byte in encoded_text:
if ((byte & 192) != 128):
if (offset == e.offset):
start = i
elif ((offset - e.offset) == e.length):
end = i
break
if (byte >= 240):
offset += 2
else:
offset += 1
i += 1
return encoded_text[start:end].decode() |
def hourTable(date, pos):
lastSunrise = ephem.lastSunrise(date, pos)
middleSunset = ephem.nextSunset(lastSunrise, pos)
nextSunrise = ephem.nextSunrise(date, pos)
table = []
length = ((middleSunset.jd - lastSunrise.jd) / 12.0)
for i in range(12):
start = (lastSunrise.jd + (i * length))
end = (start + length)
ruler = nthRuler(i, lastSunrise.date.dayofweek())
table.append([start, end, ruler])
length = ((nextSunrise.jd - middleSunset.jd) / 12.0)
for i in range(12):
start = (middleSunset.jd + (i * length))
end = (start + length)
ruler = nthRuler((i + 12), lastSunrise.date.dayofweek())
table.append([start, end, ruler])
return table |
class OptionSeriesTreegraphMarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False) |
class FrontEdge(edges.BaseEdge):
char = 'a'
def __call__(self, length, **kw):
x = math.ceil(((((self.canDiameter * 0.5) + (2 * self.thickness)) * math.sin(math.radians(self.chuteAngle))) / self.thickness))
if (self.top_edge != 'e'):
self.corner(90, self.thickness)
self.edge((0.5 * self.canDiameter))
self.corner((- 90), (0.25 * self.canDiameter))
else:
self.moveTo((- self.burn), (self.canDiameter + self.thickness), (- 90))
self.corner(90, (0.25 * self.canDiameter))
self.edge(self.thickness)
self.edge(((0.5 * self.canDiameter) - self.thickness))
self.corner((- 90), (0.25 * self.canDiameter))
self.edge((0.5 * self.canDiameter))
self.corner(90, self.thickness)
self.edge((x * self.thickness))
self.corner(90, self.thickness)
self.edge((0.5 * self.canDiameter))
self.corner((- 90), (0.25 * self.canDiameter))
self.edge((((((0.5 * self.canDiameter) - ((1 + x) * self.thickness)) + self.top_chute_height) + self.bottom_chute_height) - self.barrier_height))
self.corner((- 90), (0.25 * self.canDiameter))
self.edge((0.5 * self.canDiameter))
self.corner(90, self.thickness)
self.edge(self.barrier_height)
self.edge(self.thickness) |
class OptionSeriesItemSonificationTracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesVariablepieMarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False) |
class RecordContainer(object):
def __init__(self):
self._records = []
def record(self, record):
self._records.append(record)
def save_to_file(self, filename):
with open(filename, 'w', encoding='utf-8') as fh:
for record in self._records:
fh.write(str(record)) |
class PlotArea():
def __init__(self, parent, position: Any, camera: Union[pygfx.PerspectiveCamera], controller: Union[pygfx.Controller], scene: pygfx.Scene, canvas: WgpuCanvas, renderer: pygfx.WgpuRenderer, name: str=None):
self._parent: PlotArea = parent
self._position = position
self._scene = scene
self._canvas = canvas
self._renderer = renderer
if (parent is None):
self._viewport: pygfx.Viewport = pygfx.Viewport(renderer)
else:
self._viewport = pygfx.Viewport(parent.renderer)
self._camera = camera
self._controller = controller
self.controller.add_camera(self._camera)
self.controller.register_events(self.viewport)
self._animate_funcs_pre = list()
self._animate_funcs_post = list()
self.renderer.add_event_handler(self.set_viewport_rect, 'resize')
self._graphics: List[str] = list()
self._selectors: List[str] = list()
self._name = name
self.children = list()
self.set_viewport_rect()
def parent(self):
return self._parent
def position(self) -> Union[(Tuple[(int, int)], Any)]:
return self._position
def scene(self) -> pygfx.Scene:
return self._scene
def canvas(self) -> WgpuCanvas:
return self._canvas
def renderer(self) -> pygfx.WgpuRenderer:
return self._renderer
def viewport(self) -> pygfx.Viewport:
return self._viewport
def camera(self) -> pygfx.PerspectiveCamera:
return self._camera
def camera(self, new_camera: Union[(str, pygfx.PerspectiveCamera)]):
if isinstance(new_camera, pygfx.PerspectiveCamera):
self.controller.remove_camera(self._camera)
self.controller.add_camera(new_camera)
self._camera = new_camera
elif isinstance(new_camera, str):
if (new_camera == '2d'):
self._camera.fov = 0
elif (new_camera == '3d'):
if (self._camera.fov == 0):
self._camera.fov = 50
else:
raise ValueError("camera must be one of '2d', '3d' or a pygfx.PerspectiveCamera instance")
else:
raise ValueError("camera must be one of '2d', '3d' or a pygfx.PerspectiveCamera instance")
def controller(self) -> pygfx.Controller:
return self._controller
def controller(self, new_controller: Union[(str, pygfx.Controller)]):
new_controller = create_controller(new_controller, self._camera)
cameras_list = list()
for camera in self._controller.cameras:
self._controller.remove_camera(camera)
cameras_list.append(camera)
for camera in cameras_list:
new_controller.add_camera(camera)
new_controller.register_events(self.viewport)
if (self.parent is not None):
if (self.parent.__class__.__name__ == 'GridPlot'):
for subplot in self.parent:
if (subplot.camera in cameras_list):
new_controller.register_events(subplot.viewport)
subplot._controller = new_controller
self._controller = new_controller
def graphics(self) -> Tuple[(Graphic, ...)]:
proxies = list()
for loc in self._graphics:
p = weakref.proxy(GRAPHICS[loc])
proxies.append(p)
return tuple(proxies)
def selectors(self) -> Tuple[(BaseSelector, ...)]:
proxies = list()
for loc in self._selectors:
p = weakref.proxy(SELECTORS[loc])
proxies.append(p)
return tuple(proxies)
def name(self) -> Any:
return self._name
def name(self, name: Any):
self._name = name
def get_rect(self) -> Tuple[(float, float, float, float)]:
raise NotImplementedError('Must be implemented in subclass')
def map_screen_to_world(self, pos: Union[(Tuple[(float, float)], pygfx.PointerEvent)]) -> np.ndarray:
if isinstance(pos, pygfx.PointerEvent):
pos = (pos.x, pos.y)
if (not self.viewport.is_inside(*pos)):
return None
vs = self.viewport.logical_size
pos_rel = ((pos[0] - self.viewport.rect[0]), (pos[1] - self.viewport.rect[1]))
pos_ndc = ((((pos_rel[0] / vs[0]) * 2) - 1), (- (((pos_rel[1] / vs[1]) * 2) - 1)), 0)
pos_ndc += vec_transform(self.camera.world.position, self.camera.camera_matrix)
pos_world = vec_unproject(pos_ndc[:2], self.camera.camera_matrix)
return np.array([*pos_world[:2], 0])
def set_viewport_rect(self, *args):
self.viewport.rect = self.get_rect()
def render(self):
self._call_animate_functions(self._animate_funcs_pre)
self.viewport.render(self.scene, self.camera)
for child in self.children:
child.render()
self._call_animate_functions(self._animate_funcs_post)
def _call_animate_functions(self, funcs: Iterable[callable]):
for fn in funcs:
try:
args = getfullargspec(fn).args
if (len(args) > 0):
if ((args[0] == 'self') and (not (len(args) > 1))):
fn()
else:
fn(self)
else:
fn()
except (ValueError, TypeError):
warn(f'Could not resolve argspec of {self.__class__.__name__} animation function: {fn}, calling it without arguments.')
fn()
def add_animations(self, *funcs: Iterable[callable], pre_render: bool=True, post_render: bool=False):
for f in funcs:
if (not callable(f)):
raise TypeError(f'all positional arguments to add_animations() must be callable types, you have passed a: {type(f)}')
if pre_render:
self._animate_funcs_pre += funcs
if post_render:
self._animate_funcs_post += funcs
def remove_animation(self, func):
if ((func not in self._animate_funcs_pre) and (func not in self._animate_funcs_post)):
raise KeyError(f'''The passed function: {func} is not registered as an animation function. These are the animation functions that are currently registered:
pre: {self._animate_funcs_pre}
post: {self._animate_funcs_post}''')
if (func in self._animate_funcs_pre):
self._animate_funcs_pre.remove(func)
if (func in self._animate_funcs_post):
self._animate_funcs_post.remove(func)
def add_graphic(self, graphic: Graphic, center: bool=True):
self._add_or_insert_graphic(graphic=graphic, center=center, action='add')
graphic.position_z = len(self)
def insert_graphic(self, graphic: Graphic, center: bool=True, index: int=0, z_position: int=None):
if (index > len(self._graphics)):
raise IndexError(f'''Position {index} is out of bounds for number of graphics currently in the PlotArea: {len(self._graphics)}
Call `add_graphic` method to insert graphic in the last position of the stored graphics''')
self._add_or_insert_graphic(graphic=graphic, center=center, action='insert', index=index)
if (z_position is None):
graphic.position_z = index
else:
graphic.position_z = z_position
def _add_or_insert_graphic(self, graphic: Graphic, center: bool=True, action: str=Union[('insert', 'add')], index: int=0):
if (not isinstance(graphic, Graphic)):
raise TypeError(f'Can only add Graphic types to a PlotArea, you have passed a: {type(graphic)}')
if (graphic.name is not None):
self._check_graphic_name_exists(graphic.name)
if isinstance(graphic, BaseSelector):
loc = graphic.loc
SELECTORS[loc] = graphic
if (action == 'insert'):
self._selectors.insert(index, loc)
else:
self._selectors.append(loc)
else:
loc = graphic.loc
GRAPHICS[loc] = graphic
if (action == 'insert'):
self._graphics.insert(index, loc)
else:
self._graphics.append(loc)
graphic = weakref.proxy(graphic)
self.scene.add(graphic.world_object)
if center:
self.center_graphic(graphic)
if hasattr(graphic, '_add_plot_area_hook'):
graphic._add_plot_area_hook(self)
def _check_graphic_name_exists(self, name):
graphic_names = list()
for g in self.graphics:
graphic_names.append(g.name)
for s in self.selectors:
graphic_names.append(s.name)
if (name in graphic_names):
raise ValueError(f'''graphics must have unique names, current graphic names are:
{graphic_names}''')
def center_graphic(self, graphic: Graphic, zoom: float=1.35):
self.camera.show_object(graphic.world_object)
self.camera.zoom = zoom
def center_scene(self, *, zoom: float=1.35):
if (not (len(self.scene.children) > 0)):
return
for camera in self.controller.cameras:
camera.show_object(self.scene)
camera.zoom = zoom
def auto_scale(self, *, maintain_aspect: Union[(None, bool)]=None, zoom: float=0.8):
if (not (len(self.scene.children) > 0)):
return
for selector in self.selectors:
self.scene.remove(selector.world_object)
self.center_scene()
if (maintain_aspect is None):
maintain_aspect = self.camera.maintain_aspect
for camera in self.controller.cameras:
camera.maintain_aspect = maintain_aspect
if (len(self.scene.children) > 0):
(width, height, depth) = np.ptp(self.scene.get_world_bounding_box(), axis=0)
else:
(width, height, depth) = (1, 1, 1)
if (width < 0.01):
width = 1
if (height < 0.01):
height = 1
for selector in self.selectors:
self.scene.add(selector.world_object)
for camera in self.controller.cameras:
camera.width = width
camera.height = height
camera.zoom = zoom
def remove_graphic(self, graphic: Graphic):
self.scene.remove(graphic.world_object)
def delete_graphic(self, graphic: Graphic):
loc = graphic.loc
if (loc in self._graphics):
glist = self._graphics
kind = 'graphic'
elif (loc in self._selectors):
kind = 'selector'
glist = self._selectors
else:
raise KeyError(f'Graphic with following address not found in plot area: {loc}')
if (graphic.world_object in self.scene.children):
self.scene.remove(graphic.world_object)
glist.remove(loc)
graphic._cleanup()
if (kind == 'graphic'):
del GRAPHICS[loc]
elif (kind == 'selector'):
del SELECTORS[loc]
def clear(self):
for g in self.graphics:
self.delete_graphic(g)
for s in self.selectors:
self.delete_graphic(s)
def __getitem__(self, name: str):
for graphic in self.graphics:
if (graphic.name == name):
return graphic
for selector in self.selectors:
if (selector.name == name):
return selector
graphic_names = list()
for g in self.graphics:
graphic_names.append(g.name)
selector_names = list()
for s in self.selectors:
selector_names.append(s.name)
raise IndexError(f'''No graphic or selector of given name.
The current graphics are:
{graphic_names}
The current selectors are:
{selector_names}''')
def __contains__(self, item: Union[(str, Graphic)]):
to_check = [*self.graphics, *self.selectors]
if isinstance(item, Graphic):
if (item in to_check):
return True
else:
return False
elif isinstance(item, str):
for graphic in to_check:
if (graphic.name is None):
continue
if (graphic.name == item):
return True
return False
raise TypeError('PlotArea `in` operator accepts only `Graphic` or `str` types')
def __str__(self):
if (self.name is None):
name = 'unnamed'
else:
name = self.name
return f'{name}: {self.__class__.__name__} {hex(id(self))}'
def __repr__(self):
newline = '\n\t'
return f'''{self}
parent: {self.parent.__str__()}
Graphics:
{newline.join((graphic.__repr__() for graphic in self.graphics))}
'''
def __len__(self) -> int:
return (len(self._graphics) + len(self.selectors)) |
class TestWeakIDDict(unittest.TestCase):
def test_weak_keys(self):
wd = WeakIDKeyDict()
keep = []
dont_keep = []
values = list(range(10))
for (n, i) in enumerate(values, 1):
key = AllTheSame()
if (not (i % 2)):
keep.append(key)
else:
dont_keep.append(key)
wd[key] = i
del key
self.assertEqual(len(wd), n)
self.assertEqual(len(wd), 10)
del dont_keep
self.assertEqual(len(wd), 5)
self.assertCountEqual(list(wd.values()), list(range(0, 10, 2)))
self.assertEqual([wd[k] for k in keep], list(range(0, 10, 2)))
self.assertCountEqual([id(k) for k in wd.keys()], [id(k) for k in wd])
self.assertCountEqual([id(k) for k in wd.keys()], [id(k) for k in keep])
def test_weak_keys_values(self):
wd = WeakIDDict()
keep = []
dont_keep = []
values = list(map(WeakreffableInt, range(10)))
for (n, i) in enumerate(values, 1):
key = AllTheSame()
if (not (i.value % 2)):
keep.append(key)
else:
dont_keep.append(key)
wd[key] = i
del key
self.assertEqual(len(wd), n)
self.assertEqual(len(wd), 10)
del dont_keep
self.assertEqual(len(wd), 5)
self.assertCountEqual(list(wd.values()), list(map(WeakreffableInt, [0, 2, 4, 6, 8])))
self.assertEqual([wd[k] for k in keep], list(map(WeakreffableInt, [0, 2, 4, 6, 8])))
self.assertCountEqual([id(k) for k in wd.keys()], [id(k) for k in wd])
self.assertCountEqual([id(k) for k in wd.keys()], [id(k) for k in keep])
del values[0:2]
self.assertEqual(len(wd), 4)
del values[0:2]
self.assertEqual(len(wd), 3)
del values[0:2]
self.assertEqual(len(wd), 2)
del values[0:2]
self.assertEqual(len(wd), 1)
del values[0:2]
self.assertEqual(len(wd), 0)
def test_weak_id_dict_str_representation(self):
weak_id_dict = WeakIDDict()
desired_repr = '<WeakIDDict at 0x{0:x}>'.format(id(weak_id_dict))
self.assertEqual(desired_repr, str(weak_id_dict))
self.assertEqual(desired_repr, repr(weak_id_dict))
def test_weak_id_key_dict_str_representation(self):
weak_id_key_dict = WeakIDKeyDict()
desired_repr = f'<WeakIDKeyDict at 0x{id(weak_id_key_dict):x}>'
self.assertEqual(desired_repr, str(weak_id_key_dict))
self.assertEqual(desired_repr, repr(weak_id_key_dict)) |
class OptionSeriesSankeySonificationTracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class RpcJobManagerWrapper(LogBase.LoggerMixin):
loggerPath = 'Main.RpcInterfaceManager'
def __init__(self, start_worker=True, test_mode=False, lowrate=False):
super().__init__()
self.log.info('Launching job-dispatching RPC system')
self.log.info('Lowrate setting: %s', lowrate)
self.normal_out_queue = multiprocessing.Queue(maxsize=(MAX_IN_FLIGHT_JOBS * 2))
self.run_flag = multiprocessing.Value('i', 1, lock=False)
if start_worker:
self.main_job_agg = multiprocessing.Process(target=MultiRpcRunner.run_shim, args=(self.normal_out_queue, self.run_flag), kwargs={'test_mode': test_mode, 'lowrate': lowrate})
self.main_job_agg.start()
else:
self.main_job_agg = None
def get_queues(self):
return self.normal_out_queue
def join_proc(self):
self.log.info('Requesting job-dispatching RPC system to halt.')
self.run_flag.value = 0
if self.main_job_agg:
for _ in range((60 * 5)):
try:
self.main_job_agg.join(timeout=1)
return
except multiprocessing.TimeoutError:
pass
self.log.info('Waiting for job dispatcher to join. Currently active jobs in queue: %s', self.normal_out_queue.qsize())
while True:
self.main_job_agg.join(timeout=1)
try:
self.main_job_agg.join(timeout=1)
return
except multiprocessing.TimeoutError:
pass
self.log.error('Timeout when waiting for join. Bulk consuming from intermediate queue.')
try:
while 1:
self.normal_out_queue.get_nowait()
except queue.Empty:
pass
def get_status(self):
if self.main_job_agg:
return ('Worker: %s, alive: %s, exit-code: %s' % (self.main_job_agg.pid, self.main_job_agg.is_alive(), self.main_job_agg.exitcode))
return 'Worker is none! Error!'
def is_running(self):
if self.main_job_agg:
return self.main_job_agg.is_alive()
return False |
def get_decorated_method_simple_timing(decorator):
construct_person = PERSON_CONSTRUCTION_TEMPLATE.format(decorator=decorator)
construction_time = timeit.timeit(stmt=construct_person, setup=BASE_SETUP, number=N)
instantiation_time = timeit.timeit(stmt=INSTANTIATE_PERSON, setup=(BASE_SETUP + construct_person), number=N)
reassign_person_name_time = timeit.timeit(stmt=REASSIGN_NAME, setup=((BASE_SETUP + construct_person) + INSTANTIATE_PERSON), number=N)
return (construction_time, instantiation_time, reassign_person_name_time) |
def parse_args():
parser = argparse.ArgumentParser(description='Test a Fast R-CNN network')
parser.add_argument('-c', '--cfg', dest='cfg_file', help='optional config file', default=None, type=str)
parser.add_argument('--wait', dest='wait', help='wait until net file exists', default=True, type=bool)
parser.add_argument('--single-gpu-testing', dest='single_gpu_testing', help='using cfg.NUM_GPUS for inference', action='store_false')
parser.add_argument('--range', dest='range', help='start (inclusive) and end (exclusive) indices', default=None, type=int, nargs=2)
parser.add_argument('opts', help='See detectron/core/config.py for all options', default=None, nargs=argparse.REMAINDER)
if (len(sys.argv) == 1):
parser.print_help()
sys.exit(1)
return parser.parse_args() |
class SearchParamSource(ParamSource):
def __init__(self, track, params, **kwargs):
super().__init__(track, params, **kwargs)
target_name = get_target(track, params)
type_name = params.get('type')
if (params.get('data-stream') and type_name):
raise exceptions.InvalidSyntax(f"'type' not supported with 'data-stream' for operation '{kwargs.get('operation_name')}'")
request_cache = params.get('cache', None)
detailed_results = params.get('detailed-results', False)
query_body = params.get('body', None)
pages = params.get('pages', None)
results_per_page = params.get('results-per-page', None)
request_params = params.get('request-params', {})
response_compression_enabled = params.get('response-compression-enabled', True)
with_point_in_time_from = params.get('with-point-in-time-from', None)
self.query_params = {'index': target_name, 'type': type_name, 'cache': request_cache, 'detailed-results': detailed_results, 'request-params': request_params, 'response-compression-enabled': response_compression_enabled, 'body': query_body}
if (not target_name):
raise exceptions.InvalidSyntax(f"'index' or 'data-stream' is mandatory and is missing for operation '{kwargs.get('operation_name')}'")
if pages:
self.query_params['pages'] = pages
if results_per_page:
self.query_params['results-per-page'] = results_per_page
if with_point_in_time_from:
self.query_params['with-point-in-time-from'] = with_point_in_time_from
if ('assertions' in params):
if (not detailed_results):
is_paginated = bool(pages)
if (not is_paginated):
raise exceptions.InvalidSyntax('The property [detailed-results] must be [true] if assertions are defined')
self.query_params['assertions'] = params['assertions']
self.query_params.update(self._client_params())
def params(self):
return self.query_params |
('aea.aea_builder.AEABuilder.install_pypi_dependencies')
class TestMultiAgentManagerPackageConsistencyError():
EXPECTED_ERROR_MESSAGE = dedent(" cannot add project 'fetchai/weather_client:0.27.0': the following AEA dependencies have conflicts with previously added projects:\n - 'fetchai/ledger' of type connection: the new version '0.17.0' conflicts with existing version '0.18.0' of the same package required by agents: [<fetchai/weather_station:0.27.0>]\n - 'fetchai/p2p_libp2p' of type connection: the new version '0.20.0' conflicts with existing version '0.21.0' of the same package required by agents: [<fetchai/weather_station:0.27.0>]\n - 'fetchai/soef' of type connection: the new version '0.21.0' conflicts with existing version '0.22.0' of the same package required by agents: [<fetchai/weather_station:0.27.0>]\n - 'fetchai/contract_api' of type protocol: the new version '0.14.0' conflicts with existing version '1.0.0' of the same package required by agents: [<fetchai/weather_station:0.27.0>]\n - 'fetchai/default' of type protocol: the new version '0.15.0' conflicts with existing version '1.0.0' of the same package required by agents: [<fetchai/weather_station:0.27.0>]\n - 'fetchai/fipa' of type protocol: the new version '0.16.0' conflicts with existing version '1.0.0' of the same package required by agents: [<fetchai/weather_station:0.27.0>]\n - 'fetchai/ledger_api' of type protocol: the new version '0.13.0' conflicts with existing version '1.0.0' of the same package required by agents: [<fetchai/weather_station:0.27.0>]\n - 'fetchai/oef_search' of type protocol: the new version '0.16.0' conflicts with existing version '1.0.0' of the same package required by agents: [<fetchai/weather_station:0.27.0>]\n - 'fetchai/signing' of type protocol: the new version '0.13.0' conflicts with existing version '1.0.0' of the same package required by agents: [<fetchai/weather_station:0.27.0>]\n - 'fetchai/state_update' of type protocol: the new version '0.13.0' conflicts with existing version '1.0.0' of the same package required by agents: [<fetchai/weather_station:0.27.0>]\n ")
def setup(self):
self.project_public_id = MY_FIRST_AEA_PUBLIC_ID
self.tmp_dir = TemporaryDirectory()
self.working_dir = os.path.join(self.tmp_dir.name, 'MultiAgentManager_dir')
self.project_path = os.path.join(self.working_dir, self.project_public_id.author, self.project_public_id.name)
assert (not os.path.exists(self.working_dir))
self.manager = MultiAgentManager(self.working_dir)
def test_run(self, *args):
self.manager.start_manager()
weather_station_id = PublicId.from_str('fetchai/weather_station:0.27.0')
self.manager.add_project(weather_station_id)
weather_client_id = PublicId.from_str('fetchai/weather_client:0.27.0')
with pytest.raises(ProjectPackageConsistencyCheckError, match=re.escape(self.EXPECTED_ERROR_MESSAGE)):
self.manager.add_project(weather_client_id)
def teardown(self):
try:
self.manager.stop_manager()
if os.path.exists(self.working_dir):
rmtree(self.working_dir)
finally:
self.tmp_dir.cleanup() |
def getIntEncoder(format):
if (format == 'cff'):
twoByteOp = bytechr(28)
fourByteOp = bytechr(29)
elif (format == 't1'):
twoByteOp = None
fourByteOp = bytechr(255)
else:
assert (format == 't2')
twoByteOp = bytechr(28)
fourByteOp = None
def encodeInt(value, fourByteOp=fourByteOp, bytechr=bytechr, pack=struct.pack, unpack=struct.unpack, twoByteOp=twoByteOp):
if ((- 107) <= value <= 107):
code = bytechr((value + 139))
elif (108 <= value <= 1131):
value = (value - 108)
code = (bytechr(((value >> 8) + 247)) + bytechr((value & 255)))
elif ((- 1131) <= value <= (- 108)):
value = ((- value) - 108)
code = (bytechr(((value >> 8) + 251)) + bytechr((value & 255)))
elif ((twoByteOp is not None) and ((- 32768) <= value <= 32767)):
code = (twoByteOp + pack('>h', value))
elif (fourByteOp is None):
log.warning('4-byte T2 number got passed to the IntType handler. This should happen only when reading in old XML files.\n')
code = (bytechr(255) + pack('>l', value))
else:
code = (fourByteOp + pack('>l', value))
return code
return encodeInt |
def _make_log(_type='mined', log_index=0, transaction_index=0, transaction_hash=ZERO_32BYTES, block_hash=ZERO_32BYTES, block_number=0, address=ZERO_ADDRESS, data=b'', topics=None):
return {'type': _type, 'log_index': log_index, 'transaction_index': transaction_index, 'transaction_hash': transaction_hash, 'block_hash': block_hash, 'block_number': block_number, 'address': address, 'data': data, 'topics': (topics or [])} |
class SslClient(NamespacedClient):
_rewrite_parameters()
async def certificates(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
__path = '/_ssl/certificates'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (human is not None):
__query['human'] = human
if (pretty is not None):
__query['pretty'] = pretty
__headers = {'accept': 'application/json'}
return (await self.perform_request('GET', __path, params=__query, headers=__headers)) |
class OptionSeriesScatterLabel(Options):
def boxesToAvoid(self):
return self._config_get(None)
def boxesToAvoid(self, value: Any):
self._config(value, js_type=False)
def connectorAllowed(self):
return self._config_get(False)
def connectorAllowed(self, flag: bool):
self._config(flag, js_type=False)
def connectorNeighbourDistance(self):
return self._config_get(24)
def connectorNeighbourDistance(self, num: float):
self._config(num, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get('undefined')
def formatter(self, value: Any):
self._config(value, js_type=False)
def maxFontSize(self):
return self._config_get(None)
def maxFontSize(self, num: float):
self._config(num, js_type=False)
def minFontSize(self):
return self._config_get(None)
def minFontSize(self, num: float):
self._config(num, js_type=False)
def onArea(self):
return self._config_get(None)
def onArea(self, flag: bool):
self._config(flag, js_type=False)
def style(self) -> 'OptionSeriesScatterLabelStyle':
return self._config_sub_data('style', OptionSeriesScatterLabelStyle)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False) |
.django_db
def test_exclude_gtas_for_incompleted_period(client, monkeypatch, helpers, defc_codes, partially_completed_year, late_gtas, early_gtas, basic_faba):
helpers.patch_datetime_now(monkeypatch, EARLY_YEAR, LATE_MONTH, 25)
helpers.reset_dabs_cache()
resp = client.get(OVERVIEW_URL)
assert (resp.data['funding'] == [{'amount': EARLY_GTAS_CALCULATIONS['total_budgetary_resources'], 'def_code': 'M'}])
assert (resp.data['total_budget_authority'] == EARLY_GTAS_CALCULATIONS['total_budgetary_resources'])
assert (resp.data['spending']['total_obligations'] == EARLY_GTAS_CALCULATIONS['total_obligations'])
assert (resp.data['spending']['total_outlays'] == EARLY_GTAS_CALCULATIONS['total_outlays']) |
def try_analyses_conllu(token, outfile, hacks=None):
anals = token.analyses
original = get_reference_conllu_list(token)
best = None
highest = (- 1)
for (i, anal) in enumerate(anals):
upos = anal.get_upos()
feats = anal.printable_ud_feats()
lemmas = anal.get_lemmas()
dephead = anal.udeppos
depname = anal.udepname
if lemmas:
lemma = '#'.join(anal.get_lemmas())
else:
lemma = '_'
score = 0
if (upos == original[3]):
score += 10
if (lemma == original[2]):
score += 10
elif (lemma.strip('#') == original[2].strip('#')):
score += 5
elif (lemma.lower() == original[2].lower()):
score += 5
if (feats == original[5]):
score += 10
else:
featset = set(feats.split('|'))
refset = set(original[5].split('|'))
score += len(featset.intersection(refset))
if (dephead == original[6]):
score += 1
if (depname == original[7]):
score += 1
if (score > highest):
best = i
highest = score
print(token.printable_conllu(hacks, best), file=outfile) |
class OptionSeriesArcdiagramSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class Unpacker():
tag = 'DefaultUnpackPlugin'
name = 'DefaultUnpackName'
def __init__(self, tag: str, name: str, apk_object: APK, dexes: list[DEX], output_dir):
self.tag = tag
self.name = name
self.decrypted_payload_path = None
self.logger = logger
self.apk_object = apk_object
self.dexes = list(filter(self.filter_dvms, dexes))
if output_dir:
self.output_dir = output_dir
else:
self.output_dir = os.getcwd()
def filter_dvms(dvm):
if (dvm.classes == None):
return False
return True
def is_packed(self) -> bool:
ispacked = False
not_found_counter = 0
act_serv_recv = ((self.apk_object.get_activities() + self.apk_object.get_receivers()) + self.apk_object.get_services())
for component in act_serv_recv:
if component:
for dex in self.dexes:
try:
dex_classes = dex.get_classes_names()
except Exception as e:
continue
clas_name = (('L' + component.replace('.', '/')) + ';')
if (clas_name in dex_classes):
break
else:
not_found_counter += 1
if (len(act_serv_recv) == 0):
return False
score = (not_found_counter / len(act_serv_recv))
self.logger.info(f'Packed : Score : {score}')
if (score > 0.8):
ispacked = True
elif (score == 0.0):
ispacked = False
else:
res = self.apk_object.get_main_activity()
if res:
for dex in self.dexes:
try:
dex_classes = dex.get_classes_names()
except Exception as e:
continue
clas_name = (('L' + res.replace('.', '/')) + ';')
if (clas_name in dex_classes):
break
else:
ispacked = True
return ispacked
def is_really_unpacked(self) -> bool:
if (not self.decrypted_payload_path):
return False
with open(self.decrypted_payload_path, 'rb') as fp:
self.dexes.append(DEX(fp.read()))
return (not self.is_packed())
def get_tag(self) -> str:
return self.tag
def get_name(self) -> str:
return self.name
def __str__(self):
return f'''Name: {self.name}
Tag: {self.tag}'''
def get_smali(target_method: EncodedMethod) -> str:
smali_str = ''
for ins in target_method.get_instructions():
smali_str += f'{ins.get_name()} '
if (ins.get_name() == 'const-string'):
replaced_const_string = ins.get_output().replace('"\'', '"').replace('\'"', '"')
smali_str += (replaced_const_string + '\n')
else:
smali_str += f'''{ins.get_output()}
'''
return smali_str
def get_array_data(target_method: EncodedMethod) -> list:
barrays = []
for ins in target_method.get_instructions():
if (ins.get_name() == 'fill-array-data-payload'):
raw_data = list(ins.get_raw())
data_size = raw_data[4]
barray = bytearray(raw_data[8:(8 + data_size)])
barrays.append(barray)
return barrays
def find_main_application(self) -> str:
application_smali = None
application = self.apk_object.get_attribute_value('application', 'name')
if (application == None):
for d in self.dexes:
for c in d.get_classes():
if (c.get_superclassname() == 'Landroid/app/Application;'):
application_smali = c.get_name()
break
else:
application_smali = (('L' + application.replace('.', '/')) + ';')
return application_smali
def find_method(self, klass_name: str, method_name: str, descriptor: str='') -> EncodedMethod:
for dvm in self.dexes:
c = dvm.get_class(klass_name)
if (c != None):
methods = c.get_methods()
for method in methods:
if (method.get_name() == method_name):
if (descriptor == ''):
return method
elif (method.get_descriptor() == descriptor):
return method
return None
def find_method_re(self, klass_name: str, method_name: str, descriptor: str='') -> EncodedMethod:
for dvm in self.dexes:
c = dvm.get_class(klass_name)
if (c != None):
methods = c.get_methods()
for method in methods:
if (len(re.findall(method_name, method.get_name())) > 1):
if (descriptor == ''):
return method
elif (method.get_descriptor() == descriptor):
return method
return None
def find_class_in_dvms(self, klass_name: str) -> ClassDefItem:
for dvm in self.dexes:
c = dvm.get_class(klass_name)
if (c != None):
return c
return None
def find_method_in_class_m(klass, method_name):
methods = klass.get_methods()
for method in methods:
if (method.get_name() == method_name):
return method
return None
def lazy_check(self, apk_object: APK, dvms: 'list[DEX]') -> bool:
return True
def calculate_name(self, file_data) -> str:
m = hashlib.md5(file_data).hexdigest()
return f'external-{m[:8]}.dex'
def check_header(self, fd) -> bool:
if ((len(fd) > 7) and (fd[:8] in dex_headers)):
return True
elif ((len(fd) > 3) and (fd[:4] in pkzip_headers)):
return True
elif ((len(fd) > 1) and (fd[:2] in zlib_headers)):
return True
return False
def check_and_write_file(self, dec) -> bool:
if (dec[:8] in dex_headers):
self.decrypted_payload_path = os.path.join(self.output_dir, self.calculate_name(dec))
self.logger.success(f'Decryption successful! Output dex : {self.decrypted_payload_path}')
with open(self.decrypted_payload_path, 'wb') as fp:
fp.write(dec)
return True
elif (dec[:4] in pkzip_headers):
self.logger.success(f'Decryption successful! Found zip file')
with zipfile.ZipFile(io.BytesIO(dec), 'r') as drop:
for file in drop.filelist:
with drop.open(file.filename) as f:
zip_files_ex = f.read(8)
f.seek(0)
if (zip_files_ex in dex_headers):
self.logger.info(f'Extracting dex from zip file. Output dex : {self.decrypted_payload_path}')
file_data = f.read()
self.decrypted_payload_path = os.path.join(self.output_dir, self.calculate_name(dec))
with open(self.decrypted_payload_path, 'wb') as fp:
fp.write(file_data)
return True
elif (dec[:2] in zlib_headers):
try:
decrypted = zlib.decompress(dec)
except Exception as e:
self.logger.error(e)
return False
if (decrypted[:8] in dex_headers):
self.decrypted_payload_path = os.path.join(self.output_dir, self.calculate_name(decrypted))
self.logger.success(f'Decryption successful! Found zlib file')
with open(self.decrypted_payload_path, 'wb') as fp:
fp.write(decrypted)
return True
return False
def main(self, native_lib: str='') -> dict:
start_time = time.time()
result = {}
result['name'] = self.get_name()
result['tag'] = self.get_tag()
if (not self.lazy_check(self.apk_object, self.dexes)):
result['status'] = ('success' if self.get_status() else 'fail')
return result
o = self.start_decrypt()
result['status'] = ('success' if self.get_status() else 'fail')
if self.get_status():
result['output_file'] = self.get_path()
end_time = time.time()
self.logger.info(f'total analysis time = {(end_time - start_time)}')
return result
def get_status(self) -> bool:
return (self.decrypted_payload_path != None)
def get_path(self) -> str:
return self.decrypted_payload_path
def start_decrypt(self):
pass |
class ValveStackRedundancyTestCase(ValveTestBases.ValveTestNetwork):
CONFIG = STACK_CONFIG
STACK_ROOT_STATE_UPDATE_TIME = 10
STACK_ROOT_DOWN_TIME = (STACK_ROOT_STATE_UPDATE_TIME * 3)
def setUp(self):
self.setup_valves(self.CONFIG)
def dp_by_name(self, dp_name):
for valve in self.valves_manager.valves.values():
if (valve.dp.name == dp_name):
return valve.dp
return None
def set_stack_all_ports_status(self, dp_name, status):
dp = self.dp_by_name(dp_name)
for port in dp.stack_ports():
port.dyn_stack_current_state = status
def test_redundancy(self):
now = 1
self.trigger_stack_ports()
for dp in [valve.dp for valve in self.valves_manager.valves.values()]:
dp.dyn_running = False
self.set_stack_all_ports_status(dp.name, STACK_STATE_INIT)
for valve in self.valves_manager.valves.values():
self.assertFalse(valve.dp.dyn_running)
self.assertEqual('s1', valve.dp.stack.root_name)
root_hop_port = valve.dp.stack.shortest_path_port('s1')
root_hop_port = (root_hop_port.number if root_hop_port else 0)
self.assertEqual(root_hop_port, self.get_prom('dp_root_hop_port', dp_id=valve.dp.dp_id))
self.assertEqual(None, self.valves_manager.meta_dp_state.stack_root_name)
self.assertFalse(self.valves_manager.maintain_stack_root(now, self.STACK_ROOT_STATE_UPDATE_TIME))
self.assertEqual('s1', self.valves_manager.meta_dp_state.stack_root_name)
self.assertEqual(1, self.get_prom('faucet_stack_root_dpid', bare=True))
self.assertTrue(self.get_prom('is_dp_stack_root', dp_id=1))
self.assertFalse(self.get_prom('is_dp_stack_root', dp_id=2))
self.assertEqual(1, self.get_prom('stack_root_change_count_total', bare=True))
now += (self.STACK_ROOT_DOWN_TIME * 2)
self.assertFalse(self.valves_manager.maintain_stack_root(now, self.STACK_ROOT_STATE_UPDATE_TIME))
self.assertEqual('s1', self.valves_manager.meta_dp_state.stack_root_name)
self.assertEqual(1, self.get_prom('faucet_stack_root_dpid', bare=True))
self.assertTrue(self.get_prom('is_dp_stack_root', dp_id=1))
self.assertFalse(self.get_prom('is_dp_stack_root', dp_id=2))
self.assertEqual(1, self.get_prom('stack_root_change_count_total', bare=True))
self.valves_manager.meta_dp_state.dp_last_live_time['s2'] = now
now += (self.STACK_ROOT_STATE_UPDATE_TIME * 2)
self.set_stack_all_ports_status('s2', STACK_STATE_UP)
now += (self.STACK_ROOT_STATE_UPDATE_TIME * 2)
self.valves_manager.meta_dp_state.dp_last_live_time['s2'] = now
self.assertTrue(self.valves_manager.maintain_stack_root(now, self.STACK_ROOT_STATE_UPDATE_TIME))
self.assertEqual('s2', self.valves_manager.meta_dp_state.stack_root_name)
self.assertEqual(2, self.get_prom('faucet_stack_root_dpid', bare=True))
self.assertFalse(self.get_prom('is_dp_stack_root', dp_id=1))
self.assertTrue(self.get_prom('is_dp_stack_root', dp_id=2))
self.assertEqual(2, self.get_prom('stack_root_change_count_total', bare=True))
now += (self.STACK_ROOT_DOWN_TIME * 2)
self.valves_manager.meta_dp_state.dp_last_live_time['s2'] = (now - 1)
self.set_stack_all_ports_status('s2', STACK_STATE_UP)
self.assertFalse(self.valves_manager.maintain_stack_root(now, self.STACK_ROOT_STATE_UPDATE_TIME))
self.assertEqual('s2', self.valves_manager.meta_dp_state.stack_root_name)
self.assertEqual(2, self.get_prom('faucet_stack_root_dpid', bare=True))
self.assertFalse(self.get_prom('is_dp_stack_root', dp_id=1))
self.assertTrue(self.get_prom('is_dp_stack_root', dp_id=2))
self.assertEqual(2, self.get_prom('stack_root_change_count_total', bare=True))
self.valves_manager.meta_dp_state.dp_last_live_time['s1'] = (now + 1)
now += self.STACK_ROOT_STATE_UPDATE_TIME
self.assertFalse(self.valves_manager.maintain_stack_root(now, self.STACK_ROOT_STATE_UPDATE_TIME))
self.assertEqual('s2', self.valves_manager.meta_dp_state.stack_root_name)
self.assertEqual(2, self.get_prom('faucet_stack_root_dpid', bare=True))
self.assertFalse(self.get_prom('is_dp_stack_root', dp_id=1))
self.assertTrue(self.get_prom('is_dp_stack_root', dp_id=2))
self.assertEqual(2, self.get_prom('stack_root_change_count_total', bare=True)) |
class ConnectorRegistry():
def _get_combined_templates(cls) -> Dict[(str, ConnectorTemplate)]:
return {**FileConnectorTemplateLoader.get_connector_templates(), **CustomConnectorTemplateLoader.get_connector_templates()}
def connector_types(cls) -> List[str]:
return list(cls._get_combined_templates().keys())
def get_connector_template(cls, connector_type: str) -> Optional[ConnectorTemplate]:
return cls._get_combined_templates().get(connector_type) |
def test_mysql_loader_setup_loader_connection_error(mysql_loader, mocker):
mocker.patch('mysql.connector.connection.MySQLConnection', side_effect=IOError('Mocked connection error'))
with pytest.raises(ValueError, match='Unable to connect with the given config:'):
mysql_loader._setup_loader(config={}) |
def gen_settings_str(model_obj, description=False, concise=False):
desc = {'peak_width_limits': 'Limits for minimum and maximum peak widths, in Hz.', 'max_n_peaks': 'Maximum number of peaks that can be extracted.', 'min_peak_height': 'Minimum absolute height of a peak, above the aperiodic component.', 'peak_threshold': 'Relative threshold for minimum height required for detecting peaks.', 'aperiodic_mode': 'The approach taken for fitting the aperiodic component.'}
if (not description):
desc = {k: '' for (k, v) in desc.items()}
str_lst = ['=', '', 'SpecParam - SETTINGS', '', *[el for el in ['Peak Width Limits : {}'.format(model_obj.peak_width_limits), '{}'.format(desc['peak_width_limits']), 'Max Number of Peaks : {}'.format(model_obj.max_n_peaks), '{}'.format(desc['max_n_peaks']), 'Minimum Peak Height : {}'.format(model_obj.min_peak_height), '{}'.format(desc['min_peak_height']), 'Peak Threshold: {}'.format(model_obj.peak_threshold), '{}'.format(desc['peak_threshold']), 'Aperiodic Mode : {}'.format(model_obj.aperiodic_mode), '{}'.format(desc['aperiodic_mode'])] if (el != '')], '', '=']
output = _format(str_lst, concise)
return output |
def test_message_instance_creation():
from stalker import Status, StatusList
status_unread = Status(name='Unread', code='UR')
status_read = Status(name='Read', code='READ')
status_replied = Status(name='Replied', code='REP')
message_status_list = StatusList(name='Message Statuses', statuses=[status_unread, status_read, status_replied], target_entity_type='Message')
from stalker import Message
new_message = Message(description='This is a test message', status_list=message_status_list)
assert (new_message.description == 'This is a test message') |
_blueprint.route('/login/', methods=('GET', 'POST'))
_blueprint.route('/login', methods=('GET', 'POST'))
def login():
next_url = flask.request.args.get('next', '/')
if (not next_url.startswith('/')):
next_url = '/'
flask.session['next_url'] = next_url
return flask.render_template('login.html') |
_set_stats_type(ofproto.OFPMP_FLOW_MONITOR, OFPFlowUpdateHeader)
_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPFlowMonitorRequest(OFPFlowMonitorRequestBase):
def __init__(self, datapath, flags=0, monitor_id=0, out_port=ofproto.OFPP_ANY, out_group=ofproto.OFPG_ANY, monitor_flags=0, table_id=ofproto.OFPTT_ALL, command=ofproto.OFPFMC_ADD, match=None, type_=None):
if (match is None):
match = OFPMatch()
super(OFPFlowMonitorRequest, self).__init__(datapath, flags, monitor_id, out_port, out_group, monitor_flags, table_id, command, match) |
def process_copr_update(copr, form):
copr.name = form.name.data
copr.homepage = form.homepage.data
copr.contact = form.contact.data
copr.repos = form.repos.data.replace('\n', ' ')
copr.description = form.description.data
copr.instructions = form.instructions.data
copr.disable_createrepo = form.disable_createrepo.data
copr.build_enable_net = form.build_enable_net.data
copr.unlisted_on_hp = form.unlisted_on_hp.data
copr.follow_fedora_branching = form.follow_fedora_branching.data
copr.delete_after_days = form.delete_after_days.data
copr.multilib = form.multilib.data
copr.module_hotfixes = form.module_hotfixes.data
copr.fedora_review = form.fedora_review.data
copr.runtime_dependencies = form.runtime_dependencies.data.replace('\n', ' ')
copr.bootstrap = form.bootstrap.data
copr.isolation = form.isolation.data
copr.appstream = form.appstream.data
copr.packit_forge_projects_allowed = form.packit_forge_projects_allowed.data
copr.repo_priority = form.repo_priority.data
if flask.g.user.admin:
copr.auto_prune = form.auto_prune.data
else:
copr.auto_prune = True
try:
coprs_logic.CoprChrootsLogic.update_from_names(flask.g.user, copr, form.selected_chroots)
coprs_logic.CoprsLogic.update(flask.g.user, copr)
except (exceptions.ActionInProgressException, exceptions.InsufficientRightsException, exceptions.ConflictingRequest) as e:
flask.flash(str(e), 'error')
db.session.rollback()
else:
flask.flash('Project has been updated successfully.', 'success')
db.session.commit()
(copr_deps, _, non_existing) = ComplexLogic.get_transitive_runtime_dependencies(copr)
deps_without_chroots = {}
for copr_dep in copr_deps:
for chroot in copr.active_chroots:
if (chroot not in copr_dep.active_chroots):
if (copr_dep in deps_without_chroots):
deps_without_chroots[copr_dep].append(chroot.name)
else:
deps_without_chroots[copr_dep] = [chroot.name]
if non_existing:
flask.flash('Non-existing projects set as runtime dependencies: {0}.'.format(', '.join(non_existing)), 'warning')
for dep in deps_without_chroots:
flask.flash("Project {0}/{1} that is set as a dependency doesn't provide all the chroots enabled in this project: {2}.".format((dep.owner.name if isinstance(dep.owner, models.User) else ('' + dep.owner.name)), dep.name, ', '.join(deps_without_chroots[dep])), 'warning')
_check_rpmfusion(copr.repos) |
class ResGrad_SC(ShockCapturing_base):
def __init__(self, coefficients, nd, shockCapturingFactor=0.25, lag=True):
ShockCapturing_base.__init__(self, coefficients, nd, shockCapturingFactor, lag)
def calculateNumericalDiffusion(self, q):
for ci in range(self.nc):
cshockCapturing.calculateNumericalDiffusionResGrad(self.shockCapturingFactor, self.mesh.elementDiametersArray, q[('pdeResidual', ci)], q[('grad(u)', ci)], self.numDiff[ci]) |
class DBTTest(PythonInstanceTask):
def __init__(self, name: str, **kwargs):
super(DBTTest, self).__init__(task_type='dbt-test', name=name, task_config=None, interface=Interface(inputs={'input': DBTTestInput}, outputs={'output': DBTTestOutput}), **kwargs)
def execute(self, **kwargs) -> DBTTestOutput:
task_input: DBTTestInput = kwargs['input']
args = task_input.to_args()
cmd = (['dbt', '--log-format', 'json', 'test'] + args)
full_command = ' '.join(cmd)
logger.info(f'Executing command: {full_command}')
(exit_code, logs) = run_cli(cmd)
logger.info(f'dbt exited with return code {exit_code}')
if ((exit_code == HANDLED_ERROR_CODE) and (not task_input.ignore_handled_error)):
raise DBTHandledError(f'handled error while executing {full_command}', logs)
if (exit_code == UNHANDLED_ERROR_CODE):
raise DBTUnhandledError(f'unhandled error while executing {full_command}', logs)
output_dir = os.path.join(task_input.project_dir, task_input.output_path)
run_result_path = os.path.join(output_dir, 'run_results.json')
with open(run_result_path) as file:
run_result = file.read()
manifest_path = os.path.join(output_dir, 'manifest.json')
with open(manifest_path) as file:
manifest = file.read()
return DBTTestOutput(command=full_command, exit_code=exit_code, raw_run_result=run_result, raw_manifest=manifest) |
class Cache(object):
def __init__(self, obj, path, verbose):
self.obj = obj
self.path = path
self.name = obj.name
self.verbose = verbose
def __call__(self, path=None):
path = (path if path else self.path)
save(path, self.name, self.obj)
if self.verbose:
msg = '{:<30} {}'.format(self.name, 'cached')
f = ('stdout' if (self.verbose < (10 - 3)) else 'stderr')
safe_print(msg, file=f) |
class OptionSeriesOrganizationOnpointConnectoroptions(Options):
def dashstyle(self):
return self._config_get(None)
def dashstyle(self, text: str):
self._config(text, js_type=False)
def stroke(self):
return self._config_get(None)
def stroke(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(1)
def width(self, num: float):
self._config(num, js_type=False) |
class CollectorClient(RemoteClientBase):
def create_collector(self, id: str, collector: CollectorConfig):
self._request(f'/{id}', 'POST', body=collector.dict())
def send_data(self, id: str, data: pd.DataFrame):
self._request(f'/{id}/data', 'POST', body=data.to_dict())
def set_reference(self, id: str, reference: pd.DataFrame):
self._request(f'/{id}/reference', 'POST', body=reference.to_dict()) |
def test_invalid_document(tmpdir):
schema = os.path.join(tmpdir, 'schema.json')
with open(schema, 'w') as schema_file:
schema_file.write(json.dumps({'openapi': '3.0.0', 'info': {'version': ''}}))
runner = CliRunner()
result = runner.invoke(cli, ['validate', '--path', schema, '--format', 'openapi'])
assert (result.exit_code != 0)
assert (result.output == "* The field 'paths' is required. (At line 1, column 1.)\n* The field 'title' is required. (At ['info'], line 1, column 30.)\n Invalid OpenAPI schema.\n") |
.parametrize('request_1__time_requested', [datetime(2020, 1, 1)])
def test_versions(dashboard_user, request_1):
response = dashboard_user.get('dashboard/api/versions')
assert (response.status_code == 200)
[data] = [row for row in response.json if (row['version'] == request_1.version_requested)]
assert (data['date'] == request_1.time_requested.strftime('%a, %d %b %Y %H:%M:%S GMT')) |
class BaseSQLParserTest(unittest.TestCase):
def setUp(self):
self.parse_function = None
def skipTestIfBaseClass(self, reason):
if (not self.parse_function):
self.skipTest(reason)
def test_simple_create_table(self):
self.skipTestIfBaseClass('Need to implement base class')
sql = 'Create table foo\n( column1 int )'
tbl = self.parse_function(sql)
self.assertEqual(tbl.name, 'foo')
self.assertEqual(len(tbl.column_list), 1)
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_json_column(self):
self.skipTestIfBaseClass('Need to implement base class')
sql = 'Create table foo\n( column1 json )'
tbl = self.parse_function(sql)
self.assertTrue(tbl.has_80_features)
self.assertEqual(len(tbl.column_list), 1)
self.assertEqual(tbl.column_list[0].column_type, 'JSON')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_desc_index(self):
self.skipTestIfBaseClass('Need to implement base class')
sql = "\n CREATE TABLE `test_table_1` (\n `id` bigint unsigned NOT NULL AUTO_INCREMENT,\n `a` int DEFAULT NULL,\n `t` char(1) NOT NULL DEFAULT 't',\n PRIMARY KEY (`id` DESC),\n KEY `t_index` (`t`),\n KEY `a_index` (`a` ASC),\n KEY `a_index_desc` (`a` DESC),\n KEY `a_t_composite_index` (`a` ASC, `t` DESC)\n ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4\n "
tbl = self.parse_function(sql)
self.assertTrue(tbl.has_80_features)
for idx in tbl.indexes:
if (idx.name == 't_index'):
self.assertEqual(len(idx.column_list), 1)
self.assertEqual(idx.column_list[0].name, 't')
self.assertEqual(idx.column_list[0].order, 'ASC')
elif (idx.name == 'a_index'):
self.assertEqual(len(idx.column_list), 1)
self.assertEqual(idx.column_list[0].name, 'a')
self.assertEqual(idx.column_list[0].order, 'ASC')
elif (idx.name == 'a_index_desc'):
self.assertEqual(len(idx.column_list), 1)
self.assertEqual(idx.column_list[0].name, 'a')
self.assertEqual(idx.column_list[0].order, 'DESC')
elif (idx.name == 'a_t_composite_index'):
self.assertEqual(len(idx.column_list), 2)
for col in idx.column_list:
if (col.name == 'a'):
self.assertEqual(col.order, 'ASC')
elif (col.name == 't'):
self.assertEqual(col.order, 'DESC')
else:
raise Exception('Wrong column name')
else:
raise Exception('Wrong index name')
self.assertEqual(len(tbl.primary_key.column_list), 1)
self.assertEqual(tbl.primary_key.column_list[0].order, 'DESC')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_table_name_quoted_with_backtick(self):
self.skipTestIfBaseClass('Need to implement base class')
sql = 'Create table `foo`\n( column1 int )'
tbl = self.parse_function(sql)
self.assertEqual(tbl.name, 'foo')
self.assertEqual(len(tbl.column_list), 1)
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_column_name_quoted_with_backtick(self):
self.skipTestIfBaseClass('Need to implement base class')
sql = 'Create table foo\n( `column1` int )'
tbl = self.parse_function(sql)
self.assertEqual(tbl.name, 'foo')
self.assertEqual(len(tbl.column_list), 1)
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_simple_with_all_supported_int_type(self):
self.skipTestIfBaseClass('Need to implement base class')
supported_type = ['int', 'tinyint', 'bigint', 'mediumint', 'smallint']
for col_type in supported_type:
for unsigned in ['unsigned', '']:
sql = 'Create table foo\n( column1 {} {})'.format(col_type, unsigned)
tbl = self.parse_function(sql)
self.assertTrue(tbl.primary_key.is_unique)
self.assertEqual(len(tbl.column_list), 1)
self.assertEqual(tbl.column_list[0].name, 'column1')
self.assertEqual(tbl.column_list[0].column_type, col_type.upper())
self.assertEqual(tbl.name, 'foo')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_table_comment(self):
self.skipTestIfBaseClass('Need to implement base class')
sql = "Create table foo\n( column1 int )comment='table comment'"
tbl = self.parse_function(sql)
self.assertEqual(tbl.name, 'foo')
self.assertEqual(tbl.comment, "'table comment'")
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_table_charset(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = ['Create table foo\n( column1 int )character set=utf8', 'Create table foo\n( column1 int )default character set=utf8']
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(tbl.name, 'foo')
self.assertEqual(tbl.charset, 'utf8')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_bare_column_collate(self):
self.skipTestIfBaseClass('Need to implement base class')
sql = 'Create table foo\n( column1 varchar(10) collate latin1_bin )'
tbl = self.parse_function(sql)
self.assertEqual(tbl.name, 'foo')
self.assertEqual(tbl.column_list[0].collate, 'latin1_bin')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_bare_column_charset(self):
self.skipTestIfBaseClass('Need to implement base class')
sql = 'Create table foo\n( column1 varchar(10) character set latin1 )'
tbl = self.parse_function(sql)
self.assertEqual(tbl.name, 'foo')
self.assertEqual(tbl.column_list[0].charset, 'latin1')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_table_collate(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = ["Create table foo\n( column1 int )collate='utf8_bin'", "Create table foo\n( column1 int )default collate='utf8_bin'"]
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(tbl.name, 'foo')
self.assertEqual(tbl.collate, 'utf8_bin')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_table_key_block_size(self):
self.skipTestIfBaseClass('Need to implement base class')
sql = 'Create table foo\n( column1 int )key_block_size=16'
tbl = self.parse_function(sql)
self.assertEqual(tbl.name, 'foo')
self.assertEqual(tbl.key_block_size, 16)
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_engine(self):
self.skipTestIfBaseClass('Need to implement base class')
sql = 'Create table foo\n( column1 int )engine=Innodb'
tbl = self.parse_function(sql)
self.assertEqual(tbl.name, 'foo')
self.assertEqual(tbl.engine, 'INNODB')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_table_incre(self):
self.skipTestIfBaseClass('Need to implement base class')
sql = 'Create table foo\n( column1 int )auto_increment=123'
tbl = self.parse_function(sql)
self.assertEqual(tbl.name, 'foo')
self.assertEqual(tbl.auto_increment, 123)
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_row_format(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = ['Create table foo\n( column1 int )row_format=compressed']
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(tbl.name, 'foo')
self.assertEqual(tbl.row_format, 'COMPRESSED')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_simple_with_all_supported_int_type_and_length(self):
self.skipTestIfBaseClass('Need to implement base class')
supported_type = ['int', 'tinyint', 'bigint', 'mediumint', 'smallint']
for col_type in supported_type:
for unsigned in ['unsigned', '']:
sql = 'Create table foo\n( column1 {}(10) {})'.format(col_type, unsigned)
tbl = self.parse_function(sql)
self.assertTrue(tbl.primary_key.is_unique)
self.assertEqual(len(tbl.column_list), 1)
self.assertEqual(tbl.column_list[0].name, 'column1')
self.assertEqual(tbl.column_list[0].length, str(10))
self.assertEqual(tbl.column_list[0].column_type, col_type.upper())
self.assertEqual(tbl.name, 'foo')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_trailing_pri(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append('Create table foo\n( column1 int , primary key (column1))')
for sql in sqls:
tbl = self.parse_function(sql)
self.assertTrue(tbl.primary_key.is_unique)
self.assertEqual(len(tbl.primary_key.column_list), 1)
self.assertEqual(tbl.primary_key.column_list[0].name, 'column1')
self.assertEqual(tbl.name, 'foo')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_multiple_tailing_index(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append("Create table foo\n( column1 int, \ncolumn2 varchar(10),\nkey `index_name1` (column1, column2(5) ) comment 'a comment',\n UNIQUE key `index_name2` (column1 )\n)")
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(len(tbl.indexes), 2)
self.assertEqual(len(tbl.indexes[0].column_list), 2)
self.assertEqual(len(tbl.indexes[1].column_list), 1)
self.assertFalse(tbl.indexes[0].is_unique)
self.assertEqual(tbl.indexes[0].column_list[0].name, 'column1')
self.assertEqual(tbl.indexes[0].column_list[1].name, 'column2')
self.assertEqual(tbl.indexes[0].comment, "'a comment'")
self.assertEqual(tbl.indexes[1].column_list[0].name, 'column1')
self.assertTrue(tbl.indexes[1].is_unique)
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_fulltext_index(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append('Create table foo\n( column1 int, \ncolumn2 varchar(10),\n FULLTEXT key `index_name` (column1 )\n)')
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(len(tbl.indexes), 1)
self.assertEqual(len(tbl.indexes[0].column_list), 1)
self.assertFalse(tbl.indexes[0].is_unique)
self.assertEqual(tbl.indexes[0].name, 'index_name')
self.assertEqual(tbl.indexes[0].key_type, 'FULLTEXT')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_default_value_int(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append('Create table foo\n( column1 int default 0) ')
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(tbl.column_list[0].default, '0')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_default_value_double(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append("Create table foo\n ( column1 double default 0.0, column2 double default 0, column3 double default '0') ")
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(tbl.column_list[0].default, '0.0')
self.assertEqual(tbl.column_list[1].default, '0')
self.assertEqual(tbl.column_list[2].default, "'0'")
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_default_value_string(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append("Create table foo\n( column1 int default '0') ")
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(tbl.column_list[0].default, "'0'")
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_default_value_empty_string(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append("Create table foo\n( column1 char(1) default '') ")
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(tbl.column_list[0].default, "''")
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_nullable(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append('Create table foo\n( column1 int null) ')
for sql in sqls:
tbl = self.parse_function(sql)
self.assertTrue(tbl.column_list[0].nullable)
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_explicit_nullable(self):
self.skipTestIfBaseClass('Need to implement base class')
left = 'Create table foo\n( column1 int null) '
right = 'Create table foo\n( column1 int) '
left_obj = self.parse_function(left)
right_obj = self.parse_function(right)
self.assertEqual(left_obj, right_obj)
def test_not_nullable(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append('Create table foo\n( column1 int not null) ')
for sql in sqls:
tbl = self.parse_function(sql)
self.assertFalse(tbl.column_list[0].nullable)
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_col_type_timestamp(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append('Create table foo\n( column1 timestamp default current_timestamp) ')
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(tbl.column_list[0].default, 'CURRENT_TIMESTAMP')
def test_col_type_timestamp_on_update(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append('Create table foo\n( column1 timestamp(10) default current_timestamp on update current_timestamp) ')
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(tbl.column_list[0].default, 'CURRENT_TIMESTAMP')
self.assertEqual(tbl.column_list[0].on_update_current_timestamp, 'CURRENT_TIMESTAMP')
def test_explicit_timestamp_default_for_bare_timestamp(self):
self.skipTestIfBaseClass('Need to implement base class')
left = 'Create table foo\n( column1 timestamp(10) ) '
right = 'Create table foo\n( column1 timestamp(10) NOT NULL default current_timestamp on update current_timestamp) '
self.assertEqual(self.parse_function(left), self.parse_function(right))
def test_explicit_timestamp_default_for_not_null(self):
self.skipTestIfBaseClass('Need to implement base class')
left = 'Create table foo\n( column1 timestamp(10) NOT NULL ) '
right = 'Create table foo\n( column1 timestamp(10) NOT NULL default current_timestamp on update current_timestamp) '
self.assertEqual(self.parse_function(left), self.parse_function(right))
def test_no_accidentally_explicit_timestamp_default_for(self):
self.skipTestIfBaseClass('Need to implement base class')
left = 'Create table foo\n( column1 timestamp(10) NULL ) '
right = 'Create table foo\n( column1 timestamp(10) NOT NULL default current_timestamp on update current_timestamp) '
self.assertNotEqual(self.parse_function(left), self.parse_function(right))
def test_col_collate(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append('Create table foo\n( column1 varchar(10) collate utf8_bin) ')
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(tbl.column_list[0].collate, 'utf8_bin')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_col_charset(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append('Create table foo\n( column1 varchar(10) character set utf8) ')
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(tbl.column_list[0].charset, 'utf8')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_col_comment(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append("Create table foo\n( column1 int comment 'column comment') ")
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(tbl.column_list[0].comment, "'column comment'")
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_index_use_btree(self):
self.skipTestIfBaseClass('Need to implement base class')
sql = 'Create table foo\n(id int primary key, column1 int, key m_idx (column1) USING BTREE ) '
tbl = self.parse_function(sql)
self.assertEqual(tbl.indexes[0].using.upper(), 'BTREE')
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_default_value_bit(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append("Create table foo\n( column1 bit default b'0') ")
for sql in sqls:
tbl = self.parse_function(sql)
self.assertEqual(tbl.column_list[0].default, "'0'")
self.assertTrue(tbl.column_list[0].is_default_bit, "'0'")
self.assertEqual(tbl, self.parse_function(tbl.to_sql()))
def test_foreign_key(self):
self.skipTestIfBaseClass('Need to implement base class')
sql = 'Create table foo\n( column1 int primary key, foreign key (column1) references table2 (column1))'
with self.assertRaises(ParseError):
self.parse_function(sql)
sql = 'Create table foo\n( column1 int primary key, constraint `key_with_name_1` foreign key (column1) references table2 (column1), constraint `key_with_name_2` foreign key (column2) references table2 (column2))'
sql_obj = self.parse_function(sql)
self.assertTrue((sql_obj.constraint != ''))
self.assertTrue((sql_obj.fk_constraint != {}))
sql = 'Create table foo\n( column1 int primary key) '
sql_obj = self.parse_function(sql)
self.assertTrue((sql_obj.constraint == ''))
self.assertTrue((sql_obj.fk_constraint == {}))
def test_multiple_primary(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append('Create table foo\n( column1 int primary key, primary key (column1))')
for sql in sqls:
with self.assertRaises(ParseError):
self.parse_function(sql)
def test_to_sql_consistency(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append("Create table foo\n( column1 bit default b'0', column2 varchar(100) default 'abc',PRIMARY key (column1) ,key `aname` (column1, column2(19)) )")
for sql in sqls:
tbl = self.parse_function(sql).to_sql()
str_after_parse = self.parse_function(tbl).to_sql()
self.assertEqual(tbl, str_after_parse)
def test_boolean_and_bool(self):
self.skipTestIfBaseClass('Need to implement base class')
sqls = []
sqls.append('Create table foo(column1 bool)')
sqls.append('Create table foo(column1 boolean)')
for sql in sqls:
self.parse_function(sql)
def test_inequallity_in_index_col_length(self):
self.skipTestIfBaseClass('Need to implement base class')
left = "Create table `foobar`\n( `column``1` bit default b'0', column2 varchar(101) default 'abc',PRIMARY key (`column``1`) ,key `a``name` (column1, column2(20)) )"
right = "Create table `foobar`\n( `column``1` bit default b'0', column2 varchar(100) default 'abc',PRIMARY key (`column``1`) ,key `a``name` (column1, column2(19)) )"
left = self.parse_function(left)
right = self.parse_function(right)
self.assertNotEqual(left.indexes, right.indexes)
def test_inequallity_in_col_type(self):
self.skipTestIfBaseClass('Need to implement base class')
left = "Create table `foo``bar`\n( `column``1` bit default b'0', column2 varchar(100) default 'abc',PRIMARY key (`column``1`) ,key `a``name` (column1, column2(19)) )"
right = "Create table `foo``bar`\n( `column``1` bit default b'0', column2 int(100) default 'abc',PRIMARY key (`column``1`) ,key `a``name` (column1, column2(19)) )"
left = self.parse_function(left)
right = self.parse_function(right)
self.assertNotEqual(left.column_list, right.column_list)
def test_inequallity_in_col_default(self):
self.skipTestIfBaseClass('Need to implement base class')
left = "Create table `foobar`\n( `column``1` bit default b'0', column2 varchar(100) default 'abcd',PRIMARY key (`column``1`) ,key `a``name` (column1, column2(20)) )"
right = "Create table `foobar`\n( `column``1` bit default b'0', column2 varchar(100) default 'abc',PRIMARY key (`column``1`) ,key `a``name` (column1, column2(20)) )"
left = self.parse_function(left)
right = self.parse_function(right)
self.assertNotEqual(left.column_list, right.column_list)
def test_inequallity_in_col_name(self):
self.skipTestIfBaseClass('Need to implement base class')
left = "Create table `foobar`\n( `column``1` bit default b'0', column3 varchar(100) default 'abc',PRIMARY key (`column``1`) ,key `a``name` (column1, column2(20)) )"
right = "Create table `foobar`\n( `column``1` bit default b'0', column2 varchar(100) default 'abc',PRIMARY key (`column``1`) ,key `a``name` (column1, column2(20)) )"
left = self.parse_function(left)
right = self.parse_function(right)
self.assertNotEqual(left.column_list, right.column_list)
def test_inequallity_in_index_length(self):
self.skipTestIfBaseClass('Need to implement base class')
left = "Create table `foo``bar`\n( `column``1` bit default b'0', column2 varchar(100) default 'abc',PRIMARY key (`column``1`) ,key `a``name` (column1, column2(20)) )"
right = "Create table `foo``bar`\n( `column``1` bit default b'0', column2 varchar(100) default 'abc',PRIMARY key (`column``1`) ,key `a``name` (column1, column2(19)) )"
left = self.parse_function(left)
right = self.parse_function(right)
self.assertNotEqual(left.indexes, right.indexes)
def test_inequallity_in_set(self):
self.skipTestIfBaseClass('Need to implement base class')
left = "Create table `foo``bar`\n( `column``1` bit default b'0', column2 set('a','b'),PRIMARY key (`column``1`) ,key `a``name` (column1, column2(20)) )"
right = "Create table `foo``bar`\n( `column``1` bit default b'0', column2 set('a','b','c'),PRIMARY key (`column``1`) ,key `a``name` (column1, column2(19)) )"
left = self.parse_function(left)
right = self.parse_function(right)
self.assertNotEqual(left, right)
def test_inequallity_in_enum(self):
self.skipTestIfBaseClass('Need to implement base class')
left = "Create table `foo``bar`\n( `column``1` bit default b'0', column2 enum('a','b'),PRIMARY key (`column``1`) ,key `a``name` (column1, column2(20)) )"
right = "Create table `foo``bar`\n( `column``1` bit default b'0', column2 enum('a','b','c'),PRIMARY key (`column``1`) ,key `a``name` (column1, column2(19)) )"
left = self.parse_function(left)
right = self.parse_function(right)
self.assertNotEqual(left, right)
def test_identical_equallity(self):
self.skipTestIfBaseClass('Need to implement base class')
left = right = "Create table `foo``bar`\n( `column``1` bit default b'0', column2 varchar(100) default 'abc', column3 timestamp on update current_timestamp, column4 enum('a','b'), column5 set('a','b'),PRIMARY key (`column``1`) ,key `a``name` (column1, column2(19)) )"
left = self.parse_function(left)
right = self.parse_function(right)
self.assertEqual(left.indexes, right.indexes)
self.assertEqual(left.column_list, right.column_list)
self.assertEqual(left, right)
self.assertFalse((left != right))
def test_implicit_default_for_nullable(self):
self.skipTestIfBaseClass('Need to implement base class')
left = 'Create table `foobar`\n( `column1` int ,PRIMARY key (`column1`))'
right = 'Create table `foobar`\n( `column1` int default null,PRIMARY key (`column1`))'
right_default = 'Create table `foobar`\n( `column1` int default 123,PRIMARY key (`column1`))'
left = self.parse_function(left)
right = self.parse_function(right)
self.assertEqual(left, right)
right = self.parse_function(right_default)
self.assertNotEqual(left, right)
def test_implicit_quote_for_default(self):
self.skipTestIfBaseClass('Need to implement base class')
left = 'Create table `foobar`\n( `column1` int default 0,PRIMARY key (`column1`))'
right = "Create table `foobar`\n( `column1` int default '0',PRIMARY key (`column1`))"
left = self.parse_function(left)
right = self.parse_function(right)
self.assertEqual(left, right)
def test_implicit_quote_for_not_nulldefault(self):
self.skipTestIfBaseClass('Need to implement base class')
left = 'Create table `foobar`\n( `column1` int not null default 0,PRIMARY key (`column1`))'
right = "Create table `foobar`\n( `column1` int not null default '0',PRIMARY key (`column1`))"
left = self.parse_function(left)
right = self.parse_function(right)
self.assertEqual(left, right) |
class ActivityTaskStartedEventAttributes(betterproto.Message):
scheduled_event_id: int = betterproto.int64_field(1)
identity: str = betterproto.string_field(2)
request_id: str = betterproto.string_field(3)
attempt: int = betterproto.int32_field(4)
last_failure: v1failure.Failure = betterproto.message_field(5) |
class NDBField(NAttributeProperty):
def __set_name__(self, owner, name):
key = f'{owner.name}::{name}'
self._key = key
ndb_fields = getattr(owner, '_ndb_fields', None)
if (ndb_fields is None):
ndb_fields = {}
setattr(owner, '_ndb_fields', ndb_fields)
ndb_fields[name] = self |
def _validate_values(opts: Mapping, conf_path: Path) -> None:
if ('wrap' in opts):
wrap_value = opts['wrap']
if (not ((isinstance(wrap_value, int) and (wrap_value > 1)) or (wrap_value in {'keep', 'no'}))):
raise InvalidConfError(f"Invalid 'wrap' value in {conf_path}")
if ('end_of_line' in opts):
if (opts['end_of_line'] not in {'crlf', 'lf', 'keep'}):
raise InvalidConfError(f"Invalid 'end_of_line' value in {conf_path}")
if ('number' in opts):
if (not isinstance(opts['number'], bool)):
raise InvalidConfError(f"Invalid 'number' value in {conf_path}") |
def load_file_a(submission_attributes, appropriation_data, db_cursor):
reverse = re.compile('gross_outlay_amount_by_tas_cpe')
skipped_tas = defaultdict(int)
bulk_treasury_appropriation_account_tas_lookup(appropriation_data, db_cursor)
save_manager = BulkCreateManager(AppropriationAccountBalances)
for row in appropriation_data:
(treasury_account, tas_rendering_label) = get_treasury_appropriation_account_tas_lookup(row.get('account_num'))
if (treasury_account is None):
skipped_tas[tas_rendering_label] += 1
continue
appropriation_balances = AppropriationAccountBalances()
value_map = {'treasury_account_identifier': treasury_account, 'submission': submission_attributes, 'reporting_period_start': submission_attributes.reporting_period_start, 'reporting_period_end': submission_attributes.reporting_period_end}
field_map = {}
save_manager.append(load_data_into_model(appropriation_balances, row, field_map=field_map, value_map=value_map, save=False, reverse=reverse))
save_manager.save_stragglers()
for (tas, count) in skipped_tas.items():
logger.info(f'Skipped {count:,} rows due to {tas}')
total_tas_skipped = sum([count for count in skipped_tas.values()])
if (total_tas_skipped > 0):
logger.info(f'SKIPPED {total_tas_skipped:,} ROWS of File A (missing TAS)')
else:
logger.info('All File A records in Broker loaded into USAspending') |
(IField)
class Field(MField, LayoutWidget):
value = Any()
def _get_control_value(self):
return self.control.value()
def _set_control_value(self, value):
self.control.setValue(value)
def _get_control_alignment(self):
return qalignment_to_alignment(self.control.alignment())
def _set_control_alignment(self, alignment):
self.control.setAlignment(alignment_to_qalignment(alignment)) |
class OptionPlotoptionsBoxplotSonificationContexttracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.