code stringlengths 281 23.7M |
|---|
def window_function_test(file_path, args):
out_dir = os.path.dirname(file_path)
if (not os.path.exists(out_dir)):
os.makedirs(out_dir)
if os.path.exists(file_path):
os.remove(file_path)
stdout = run_window_function(args)
assert (file_path in stdout), 'output filename should be mentioned on stdout'
assert os.path.exists(file_path), 'output file should have been written'
with open(file_path, 'r') as f:
contents = f.read()
assert (args['name'] in contents)
assert (str(args['length']) in contents)
arr = run_extract(os.path.abspath(file_path), args['name'], args['length'], out_dir)
return arr |
class OptionPlotoptionsParetoStatesSelectMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def test_index_merge(plugdir_and_storage):
(plugdir, storage) = plugdir_and_storage
manager = repo_manager.BotRepoManager(storage, plugdir, (os.path.join(assets, 'repos', 'b.json'), os.path.join(assets, 'repos', 'a.json')))
manager.index_update()
index_entry = manager[repo_manager.REPO_INDEX]
assert ('pluginname1' in index_entry['name1/err-reponame1'])
assert ('pluginname2' in index_entry['name2/err-reponame2'])
assert ('pluginname3' in index_entry['name3/err-reponame3'])
assert (index_entry['name2/err-reponame2']['pluginname2']['name'] == 'NewPluginName2') |
def config_wizard():
config = {'chromecast': {}}
if click.confirm('Set up last.fm account?', default=True):
click.echo("\nYou'll need to create a last.fm API application first. Do so here:\n\n you fill in doesn't matter at all, just make sure to save the API\nKey and Shared Secret.\n")
config['lastfm'] = {key: click.prompt(key, type=str, hide_input=hidden) for (key, hidden) in [('user_name', False), ('password', True), ('api_key', False), ('api_secret', True)]}
if click.confirm('Set up Libre.fm account?'):
libre_conf = {key: click.prompt(key, type=str, hide_input=hidden) for (key, hidden) in [('user_name', False), ('password', True)]}
libre = pylast.LibreFMNetwork(username=libre_conf['user_name'], password_hash=pylast.md5(libre_conf['password']))
skg = pylast.SessionKeyGenerator(libre)
url = skg.get_web_auth_url()
click.echo(('Please grant lastcast access to your Libre.fm account:\n\n %s\n' % url))
click.echo('Hit enter when ready')
click.getchar()
libre_conf['session_key'] = skg.get_web_auth_session_key(url)
config['librefm'] = libre_conf
(devices, _browser) = pychromecast.get_chromecasts()
available = [cc.cast_info.friendly_name for cc in devices]
if (len(available) == 1):
config['chromecast']['devices'] = [available[0]]
if ((len(available) > 1) or click.confirm('Manually specify cast device?', default=True)):
click.echo(('\n\nAvailable cast devices: %s' % ', '.join(available)))
device_names = click.prompt('Which device(s) should be used? (comma separated)')
device_names = [d.strip() for d in device_names.split(',') if (d.strip != '')]
config['chromecast']['devices'] = device_names
click.echo(('\n\nDefault chromecast apps to scrobble from: %s' % ', '.join(APP_WHITELIST)))
apps = click.prompt('Comma separated apps [blank for default]', default='', show_default=False)
apps = [app.strip() for app in apps.split(',') if (app.strip() != '')]
if apps:
config['chromecast']['app_whitelist'] = apps
generated = toml.dumps(config)
click.echo(('Generated config:\n\n%s' % generated))
if click.confirm('Write to ~/.lastcast.toml?', default=True):
with open(os.path.expanduser('~/.lastcast.toml'), 'w') as fp:
fp.write(generated) |
class Overworld(wilderness.WildernessScript):
_INSTANCE = None
_NAME = 'overworld'
def get_instance(cls):
if cls._INSTANCE:
return cls._INSTANCE
if (not Overworld.objects.filter(db_key=cls._NAME).exists()):
script = cls.create()
else:
script = Overworld.objects.get(db_key=cls._NAME)
if (not script.is_active):
script.start()
cls._INSTANCE = script
return script
def enter(cls, obj, coordinates=(0, 0)):
script = cls.get_instance()
if script.is_valid_coordinates(coordinates):
script.move_obj(obj, coordinates)
return True
else:
return False
def create(cls):
if Overworld.objects.filter(db_key=cls._NAME).exists():
return
logger.info('Creating new instance of Overworld script...')
script = create_script(Overworld, key=cls._NAME, persistent=True, autostart=True)
script.db.mapprovider = OverworldMapProvider()
script.start()
return script |
def check_model(model, args):
assert (model.display_name == args.get('display_name'))
assert (model.tags == args.get('tags'))
assert (model.model_id is not None)
assert (model.create_time is not None)
assert (model.update_time is not None)
assert (model.locked is False)
assert (model.etag is not None) |
class DoDeletePass(Cursor_Rewrite):
def __init__(self, proc_cursor):
super().__init__(proc_cursor)
def map_s(self, sc):
s = sc._node
if isinstance(s, LoopIR.Pass):
return []
elif isinstance(s, LoopIR.For):
body = self.map_stmts(sc.body())
if (body is None):
return None
elif (not body):
return []
else:
return [s.update(body=body)]
return super().map_s(sc) |
class Mob(NPC):
loot_chance = AttributeProperty(75, autocreate=False)
def ai_combat_next_action(self, combathandler):
from .combat_turnbased import CombatActionAttack, CombatActionDoNothing
if self.is_idle:
return (CombatActionDoNothing.key, (), {})
target = choice(combathandler.get_enemy_targets(self))
action = choice((CombatActionAttack, CombatActionDoNothing))
return (action.key, (target,), {})
def at_defeat(self):
self.at_death()
def at_do_loot(self, looted):
if (dice.roll('1d100') > self.loot_chance):
return
if looted.coins:
loot = dice.roll('1d20')
if (looted.coins < loot):
self.location.msg_location('$You(looter) loots $You() for all coin!', from_obj=looted, mapping={'looter': self})
else:
self.location.msg_location('$You(looter) loots $You() for |y{loot}|n coins!', from_obj=looted, mapping={'looter': self})
elif hasattr(looted, 'equipment'):
stealable = looted.equipment.get_usable_objects_from_backpack()
if (not stealable):
stealable = looted.equipment.get_wieldable_objects_from_backpack()
if (not stealable):
stealable = looted.equipment.get_wearable_objects_from_backpack()
if (not stealable):
stealable = [looted.equipment.slots[WieldLocation.SHIELD_HAND]]
if (not stealable):
stealable = [looted.equipment.slots[WieldLocation.HEAD]]
if (not stealable):
stealable = [looted.equipment.slots[WieldLocation.ARMOR]]
if (not stealable):
stealable = [looted.equipment.slots[WieldLocation.WEAPON_HAND]]
if (not stealable):
stealable = [looted.equipment.slots[WieldLocation.TWO_HANDS]]
stolen = looted.equipment.remove(choice(stealable))
stolen.location = self
self.location.msg_location('$You(looter) steals {stolen.key} from $You()!', from_obj=looted, mapping={'looter': self}) |
class Price(Event):
price: (float | None)
currency: str
('currency')
def _validate_currency(cls, v: str) -> str:
if (v not in VALID_CURRENCIES):
raise ValueError(f'Unknown currency: {v}')
return v
('datetime')
def _validate_datetime(cls, v: dt.datetime) -> datetime:
if (v.tzinfo is None):
raise ValueError(f'Missing timezone: {v}')
if (v < LOWER_DATETIME_BOUND):
raise ValueError(f'Date is before 2000, this is not plausible: {v}')
return v
('price')
def _validate_price(cls, v: (float | None)) -> float:
if (v is None):
raise ValueError(f'Price cannot be None: {v}')
return v
def create(logger: Logger, zoneKey: ZoneKey, datetime: datetime, source: str, price: (float | None), currency: str, sourceType: EventSourceType=EventSourceType.measured) -> Optional['Price']:
try:
return Price(zoneKey=zoneKey, datetime=datetime, source=source, price=price, currency=currency, sourceType=sourceType)
except ValidationError as e:
logger.error(f'Error(s) creating price Event {datetime}: {e}', extra={'zoneKey': zoneKey, 'datetime': datetime.strftime('%Y-%m-%dT%H:%M:%SZ'), 'kind': 'price'})
def to_dict(self) -> dict[(str, Any)]:
return {'datetime': self.datetime, 'zoneKey': self.zoneKey, 'currency': self.currency, 'price': self.price, 'source': self.source, 'sourceType': self.sourceType} |
def test_entity_storage_remove_entity_task(create_test_db, create_project, prepare_entity_storage):
from stalker import Asset, Task, Version
project = create_project
char1 = Asset.query.filter((Asset.project == project)).filter((Asset.name == 'Char1')).first()
model = Task.query.filter((Task.parent == char1)).filter((Task.name == 'Model')).first()
assert (model is not None)
v1 = model.versions[0]
assert (v1 is not None)
assert isinstance(v1, Version)
storage = EntityStorage()
storage.add_entity(v1)
storage.remove_entity(model)
assert (char1 in storage.storage)
assert (model not in storage.storage[char1]) |
(frozen=True)
class EQLRuleData(QueryRuleData):
type: Literal['eql']
language: Literal['eql']
timestamp_field: Optional[str] = field(metadata=dict(metadata=dict(min_compat='8.0')))
event_category_override: Optional[str] = field(metadata=dict(metadata=dict(min_compat='8.0')))
tiebreaker_field: Optional[str] = field(metadata=dict(metadata=dict(min_compat='8.0')))
def convert_relative_delta(self, lookback: str) -> int:
now = len('now')
min_length = (now + len('+5m'))
if (lookback.startswith('now') and (len(lookback) >= min_length)):
lookback = lookback[len('now'):]
sign = lookback[0]
span = lookback[1:]
amount = convert_time_span(span)
return (amount * ((- 1) if (sign == '-') else 1))
else:
return convert_time_span(lookback)
_property
def is_sample(self) -> bool:
return (eql.utils.get_query_type(self.ast) == 'sample')
_property
def is_sequence(self) -> bool:
return (eql.utils.get_query_type(self.ast) == 'sequence')
_property
def max_span(self) -> Optional[int]:
if (self.is_sequence and hasattr(self.ast.first, 'max_span')):
return (self.ast.first.max_span.as_milliseconds() if self.ast.first.max_span else None)
_property
def look_back(self) -> Optional[Union[(int, Literal['unknown'])]]:
to = (self.convert_relative_delta(self.to) if self.to else 0)
from_ = self.convert_relative_delta((self.from_ or 'now-6m'))
if (not (to or from_)):
return 'unknown'
else:
return (to - from_)
_property
def interval_ratio(self) -> Optional[float]:
if self.max_span:
interval = convert_time_span((self.interval or '5m'))
return (interval / self.max_span) |
def ensure_permissions(path: str, r: bool=True, w: bool=False, x: bool=False) -> None:
if (not os.path.exists(path)):
log.error(f"Path: '{path}' does not exist.")
raise SystemExit(1)
if (r and (not os.access(path, os.R_OK))):
log.error(f"Path: '{path}' is not readable.")
raise SystemExit(1)
if (w and (not os.access(path, os.W_OK))):
log.error(f"Path: '{path}' is not writable.")
raise SystemExit(1)
if (x and (not os.access(path, os.X_OK))):
log.error(f"Path: '{path}' is not executable.")
raise SystemExit(1) |
def fofa_search_all(client, query, fields, num):
size = 10000
page = 1
result = {'size': 0, 'results': [], 'consumed_fpoint': 0}
total = 0
while True:
try:
remain_num = (num - total)
if (remain_num < size):
size = remain_num
r = client.search(query, fields=fields, page=page, size=size)
data = r['results']
total += len(data)
result['results'] += data
result['size'] += r['size']
result['consumed_fpoint'] += r['consumed_fpoint']
result['query'] = r['query']
if ((len(data) < size) or (total >= num)):
break
page += 1
except fofa.FofaError as e:
raise click.ClickException(u'search page {}, error: {}'.format(page, e.message))
return result |
((not is_qt), 'This test is for qt.')
class TestApiQt(unittest.TestCase):
def test_importable_items_minimal(self):
from pyface.api import AboutDialog, Alignment, Application, ApplicationWindow, Border, BaseDropHandler, CANCEL, Clipboard, ConfirmationDialog, Dialog, DirectoryDialog, ExpandablePanel, FileDialog, FileDropHandler, Filter, GUI, GUIApplication, HasBorder, HasMargin, HeadingText, Image, ImageCache, ImageResource, ImageWidget, KeyPressedEvent, LayeredPanel, MDIApplicationWindow, MDIWindowMenu, Margin, MessageDialog, MultiToolbarWindow, NO, OK, ProgressDialog, SingleChoiceDialog, Sorter, SplashScreen, SplitApplicationWindow, SplitDialog, SplitPanel, SystemMetrics, Widget, Window, YES, beep, choose_one, clipboard, confirm, error, information, warning, IAboutDialog, IApplicationWindow, IClipboard, IConfirmationDialog, IDialog, IDirectoryDialog, IDropHandler, IFileDialog, IGUI, IHeadingText, IImage, IImageResource, ILayeredPanel, ILayoutItem, ILayoutWidget, IMessageDialog, IPILImage, IProgressDialog, IPythonEditor, IPythonShell, ISingleChoiceDialog, ISplashScreen, ISplitWidget, ISystemMetrics, IWidget, IWindow
def test_python_editor_python_shell_importable(self):
try:
import pygments
except ImportError:
raise self.skipTest('This test requires pygments.')
from pyface.api import PythonEditor, PythonShell |
def add_et_column(trace_df: pd.DataFrame, et: ExecutionTrace, column: str) -> None:
if ('et_node' not in trace_df):
logger.error('Please run correlate_execution_trace() first')
return
if (column == 'op_schema'):
def map_func(node_id):
return et.nodes[node_id].op_schema
elif (column == 'input_shapes'):
def map_func(node_id):
return et.nodes[node_id].input_shapes
elif (column == 'input_types'):
def map_func(node_id):
return et.nodes[node_id].input_types
elif (column == 'output_shapes'):
def map_func(node_id):
return et.nodes[node_id].output_shapes
elif (column == 'output_types'):
def map_func(node_id):
return et.nodes[node_id].output_types
elif (column == 'et_node_name'):
def map_func(node_id):
return et.nodes[node_id].name
else:
logger.error(f'Unknown column {column}')
return
trace_df[column] = trace_df.apply((lambda row: (map_func(row.et_node) if pd.notna(row.et_node) else np.nan)), axis=1) |
def test_manual_flow_with_or_without_hinting(testbot):
assert ('Flow w4 started' in testbot.exec_command('!flows start w4'))
assert ('a' in testbot.exec_command('!a'))
assert ('b' in testbot.exec_command('!b'))
flow_message = testbot.pop_message()
assert ('You are in the flow w4, you can continue with' in flow_message)
assert ('!c' in flow_message) |
class OptionPlotoptionsTreegraphMarkerStates(Options):
def hover(self) -> 'OptionPlotoptionsTreegraphMarkerStatesHover':
return self._config_sub_data('hover', OptionPlotoptionsTreegraphMarkerStatesHover)
def normal(self) -> 'OptionPlotoptionsTreegraphMarkerStatesNormal':
return self._config_sub_data('normal', OptionPlotoptionsTreegraphMarkerStatesNormal)
def select(self) -> 'OptionPlotoptionsTreegraphMarkerStatesSelect':
return self._config_sub_data('select', OptionPlotoptionsTreegraphMarkerStatesSelect) |
class VersionStatus(enum.Enum):
incomplete = 1
valid = 2
invalid = 3
min = incomplete
max = invalid
def __str__(self):
return self.name
def is_valid(self):
return (self == self.valid)
def is_deep_scrubbable(self):
return ((self == self.invalid) or (self == self.valid))
def is_scrubbable(self):
return (self == self.valid)
def is_removable(self):
return (self != self.incomplete) |
class DistributedShampooTest(unittest.TestCase):
def _train_quadratic_with_comms_dtype(self, communication_dtype: CommunicationDType=CommunicationDType.DEFAULT) -> Tuple[(nn.Module, DistributedShampoo)]:
data = torch.arange(10, dtype=torch.float)
model = nn.Sequential(nn.Linear(10, 1, bias=False))
model[0].weight.data.fill_(1.0)
loss = nn.MSELoss()
optimizer = DistributedShampoo(model.parameters(), lr=0.01, betas=(0.9, 1.0), epsilon=1e-12, momentum=0.0, weight_decay=0.0, max_preconditioner_dim=10, precondition_frequency=1, start_preconditioning_step=(- 1), num_trainers_per_group=1, communication_dtype=communication_dtype)
loss(model(data), torch.tensor([0.0])).backward()
optimizer.step()
return (model, optimizer)
def test_distributed_state_dict(self) -> None:
(model, optimizer) = self._train_quadratic_with_comms_dtype()
state_dict = optimizer.distributed_state_dict(model.named_parameters())
true_keys = {'step', 'preconditioners._split_preconditioners.0._dist_buffer', 'preconditioners._split_preconditioners.0._preconditioners.0.factor_matrix', 'preconditioners._split_preconditioners.0._preconditioners.0.inv_factor_matrix', 'preconditioners._split_preconditioners.0._grafting._preconditioner._dist_buffer', 'preconditioners._split_preconditioners.0._grafting._preconditioner._preconditioner', 'preconditioners._split_sizes.0', 'preconditioners._split_preconditioners.0._filtered_grad'}
assert (len(state_dict['state']['0.weight'].keys()) == len(true_keys))
self.assertEqual(set(state_dict['state']['0.weight'].keys()), true_keys)
def test_load_distributed_state_dict(self) -> None:
(model, old_optimizer) = self._train_quadratic_with_comms_dtype()
old_state_dict = old_optimizer.distributed_state_dict(model.named_parameters())
new_optimizer = DistributedShampoo(model.parameters(), lr=0.01, betas=(0.9, 1.0), epsilon=1e-12, momentum=0.0, weight_decay=0.0, max_preconditioner_dim=10, precondition_frequency=1, start_preconditioning_step=(- 1), num_trainers_per_group=1, communication_dtype=CommunicationDType.DEFAULT)
new_optimizer.load_distributed_state_dict(old_state_dict, model.named_parameters())
new_state_dict = new_optimizer.distributed_state_dict(model.named_parameters())
self.assertEqual(set(old_state_dict['state']['0.weight'].keys()), set(new_state_dict['state']['0.weight'].keys()))
for key in new_state_dict['state']['0.weight'].keys():
torch.testing.assert_allclose(old_state_dict['state']['0.weight'][key], new_state_dict['state']['0.weight'][key])
def test_quantized_comms_on_small_model(self) -> None:
(baseline_model, _) = self._train_quadratic_with_comms_dtype(communication_dtype=CommunicationDType.DEFAULT)
baseline_params = baseline_model.parameters()
communication_dtypes = [CommunicationDType.FP32, CommunicationDType.FP16, CommunicationDType.BF16]
for communication_dtype in communication_dtypes:
with self.subTest(f'Compare against {communication_dtype} communications:'):
machine_epsilon = torch.finfo(dtype_mapping[communication_dtype.value]).eps
(low_precision_model, _) = self._train_quadratic_with_comms_dtype(communication_dtype=communication_dtype)
low_precision_params = low_precision_model.parameters()
for (baseline_param, low_precision_param) in zip(baseline_params, low_precision_params):
torch.testing.assert_close(baseline_param, low_precision_param, rtol=machine_epsilon, atol=machine_epsilon) |
class MessagingConfigBase(BaseModel):
service_type: MessagingServiceType
details: Optional[Union[(MessagingServiceDetailsMailgun, MessagingServiceDetailsTwilioEmail, MessagingServiceDetailsMailchimpTransactional)]]
class Config():
use_enum_values = False
orm_mode = True
extra = Extra.forbid |
def create_transaction_signature(unsigned_txn: UnsignedTransactionAPI, private_key: datatypes.PrivateKey, chain_id: int=None) -> VRS:
transaction_parts = rlp.decode(rlp.encode(unsigned_txn))
if chain_id:
transaction_parts_for_signature = (transaction_parts + [int_to_big_endian(chain_id), b'', b''])
else:
transaction_parts_for_signature = transaction_parts
message = rlp.encode(transaction_parts_for_signature)
signature = private_key.sign_msg(message)
(canonical_v, r, s) = signature.vrs
if chain_id:
v = ((canonical_v + (chain_id * 2)) + EIP155_CHAIN_ID_OFFSET)
else:
v = (canonical_v + V_OFFSET)
return VRS((v, r, s)) |
_checkable
class Cache(Protocol):
def initialize(self, vocab: Vocab, task: LLMTask) -> None:
def add(self, doc: Doc) -> None:
def prompt_template(self) -> Optional[str]:
_template.setter
def prompt_template(self, prompt_template: str) -> None:
def __contains__(self, doc: Doc) -> bool:
def __getitem__(self, doc: Doc) -> Optional[Doc]: |
.parametrize('reader_cls', [SeekingReader, NonSeekingReader])
def test_crc_chunk_validation(reader_cls: Union[(Type[SeekingReader], Type[NonSeekingReader])]):
content = produce_corrupted_mcap(DEMO_MCAP, 'chunk')
reader = reader_cls(BytesIO(content), validate_crcs=True)
with pytest.raises(CRCValidationError):
for _ in reader.iter_messages():
pass |
def test_get_authenticator_deviceflow():
cfg = PlatformConfig(auth_mode=AuthType.DEVICEFLOW)
with pytest.raises(AuthenticationError):
get_authenticator(cfg, get_client_config())
authn = get_authenticator(cfg, get_client_config(device_authorization_endpoint=DEVICE_AUTH_ENDPOINT))
assert isinstance(authn, DeviceCodeAuthenticator) |
class TestGetNormalizedBoundingBoxListForLayoutGraphic():
def test_should_scale_coordinates(self):
result = get_normalized_bounding_box_list_for_layout_graphic(LayoutGraphic(coordinates=LayoutPageCoordinates(x=10, y=10, width=20, height=20, page_number=0), page_meta=LayoutPageMeta.for_coordinates(LayoutPageCoordinates(x=0, y=0, width=100, height=1000, page_number=0))))
LOGGER.debug('result: %r', result)
assert (len(result) == 1)
assert (result[0] == LayoutPageCoordinates(x=0.1, y=0.01, width=0.2, height=0.02, page_number=0))
def test_should_scale_coordinates_and_adjust_page_number_to_y(self):
result = get_normalized_bounding_box_list_for_layout_graphic(LayoutGraphic(coordinates=LayoutPageCoordinates(x=10, y=10, width=20, height=20, page_number=5), page_meta=LayoutPageMeta.for_coordinates(LayoutPageCoordinates(x=0, y=0, width=100, height=1000, page_number=5))))
LOGGER.debug('result: %r', result)
assert (len(result) == 1)
assert (result[0] == LayoutPageCoordinates(x=0.1, y=5.01, width=0.2, height=0.02, page_number=5)) |
def make_stub_module(clsid):
spec = GetLatestTypelibSpec(clsid)
(ole_items, _, _, _) = BuildOleItems(spec)
import_froms = [ast.ImportFrom('collections.abc', [ast.alias('Iterator')], 0), ast.ImportFrom('typing', [ast.alias('Any'), ast.alias('Callable'), ast.alias('Union')], 0), ast.ImportFrom('pythoncom', [ast.alias('Empty'), ast.alias('Missing')], 0), ast.ImportFrom('pywintypes', [ast.alias('IID'), ast.alias('Time')], 0), ast.ImportFrom('koapy.common', [ast.alias('EventInstance')], 0)]
class_defs = []
for (clsid, ole_item) in ole_items.items():
item_class_defs = make_class_defs(ole_item)
class_defs.extend(item_class_defs)
mod_body = (import_froms + class_defs)
mod = ast.Module(mod_body, [])
return mod |
class VideoChatParticipantsInvited(JsonDeserializable):
def de_json(cls, json_string):
if (json_string is None):
return None
obj = cls.check_json(json_string)
if ('users' in obj):
obj['users'] = [User.de_json(u) for u in obj['users']]
return cls(**obj)
def __init__(self, users=None, **kwargs):
self.users: List[User] = users |
class OptionSeriesDumbbellDataEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
def reply(fn):
def _call(*args, user, **kwargs):
def render(tpl: str, **kwargs):
template = get_template((('messages/' + tpl) + '.txt'))
return template.render(user=user, **kwargs)
return fn(*args, **kwargs, user=user, render=render)
return with_user(_call) |
def probe_context(transport_domain, transport_address, context_engine_id, context_name):
if context_engine_id:
candidate = [context_engine_id, context_name, '.'.join([str(x) for x in transport_domain])]
else:
candidate = [context_name, '.'.join([str(x) for x in transport_domain])]
if (transport_domain[:len(udp.domainName)] == udp.domainName):
candidate.append(transport_address[0])
elif (udp6 and (transport_domain[:len(udp6.domainName)] == udp6.domainName)):
candidate.append(str(transport_address[0]).replace(':', '_'))
elif (unix and (transport_domain[:len(unix.domainName)] == unix.domainName)):
candidate.append(transport_address)
candidate = [str(x) for x in candidate if x]
while candidate:
(yield rfc1902.OctetString(os.path.normpath(os.path.sep.join(candidate)).replace(os.path.sep, '/')).asOctets())
del candidate[(- 1)]
if context_engine_id:
for candidate in probe_context(transport_domain, transport_address, None, context_name):
(yield candidate) |
class TraitsDockPane(DockPane):
model = Instance(HasTraits)
ui = Instance('traitsui.ui.UI')
def trait_context(self):
if self.model:
return {'object': self.model, 'pane': self}
return super().trait_context()
def destroy(self):
if (self.ui is not None):
self.ui.dispose()
self.ui = None
super().destroy()
def create_contents(self, parent):
self.ui = self.edit_traits(kind='subpanel', parent=parent)
return self.ui.control |
class TaskState(HasStrictTraits):
task = Instance(Task)
layout = Instance(TaskLayout)
initialized = Bool(False)
central_pane = Instance(ITaskPane)
dock_panes = List(Instance(IDockPane))
menu_bar_manager = Instance(IMenuBarManager)
status_bar_manager = Instance(IStatusBarManager)
tool_bar_managers = List(Instance(IToolBarManager))
def get_dock_pane(self, id):
for pane in self.dock_panes:
if (pane.id == id):
return pane
return None |
class SecureAggregator():
def __init__(self, config: Dict[(str, FixedPointConfig)]):
self.converters = {}
for key in config.keys():
self.converters[key] = instantiate(config[key])
self._aggregate_overflows = 0
def _check_converter_dict_items(self, model: nn.Module) -> None:
unset_configs = {param_name for (param_name, _param) in FLModelParamUtils.get_trainable_named_parameters(model) if (param_name not in self.converters.keys())}
if unset_configs:
error_msg = f'Not all layers have their corresponding fixed point config. The layers {unset_configs} do not have configs.'
raise ValueError(error_msg)
def params_to_fixedpoint(self, model: nn.Module) -> None:
self._check_converter_dict_items(model)
state_dict = model.state_dict()
for (name, _) in FLModelParamUtils.get_trainable_named_parameters(model):
converter = self.converters[name]
state_dict[name] = converter.to_fixedpoint(state_dict[name])
converter.logger.debug(f'{name} has {converter.get_convert_overflow(reset=False)} overflow(s)during fixed point conversion')
model.load_state_dict(state_dict)
def params_to_float(self, model: nn.Module) -> None:
self._check_converter_dict_items(model)
state_dict = model.state_dict()
for (name, _) in FLModelParamUtils.get_trainable_named_parameters(model):
state_dict[name] = self.converters[name].to_float(state_dict[name])
model.load_state_dict(state_dict)
def get_aggregate_overflow(self, reset: bool=False):
overflow = self._aggregate_overflows
if reset:
self._aggregate_overflows = 0
return overflow
def _generate_noise_mask(self, update_params: Iterator[Tuple[(str, nn.Parameter)]]) -> Iterator[Tuple[(str, nn.Parameter)]]:
pass
def apply_noise_mask(self, update_params: Iterator[Tuple[(str, nn.Parameter)]]) -> None:
pass
def _get_denoise_mask(self) -> Iterator[Tuple[(str, nn.Parameter)]]:
pass
def apply_denoise_mask(self, model_aggregate_params: Iterator[Tuple[(str, nn.Parameter)]]) -> None:
pass
def update_aggr_overflow_and_model(self, model: nn.Module):
state_dict = model.state_dict()
for (name, _) in FLModelParamUtils.get_trainable_named_parameters(model):
numbers = state_dict[name]
converter = self.converters[name]
overflow_matrix = torch.div(numbers, (converter.max_value + 1), rounding_mode='floor')
overflow_matrix = torch.where((overflow_matrix < 0), torch.zeros_like(overflow_matrix), overflow_matrix)
self._aggregate_overflows += int(torch.sum(overflow_matrix).item())
converter.logger.debug(f'{name} has {self._aggregate_overflows} overflow(s) during aggregation')
numbers = torch.where((numbers >= 0), torch.remainder(numbers, (converter.max_value + 1)), numbers)
numbers = torch.where((numbers < 0), torch.remainder(numbers, converter.min_value), numbers)
state_dict[name] = numbers
model.load_state_dict(state_dict)
def calc_avg_overflow_percentage(self, users_per_round: int, model: nn.Module, report_rounds: int) -> Tuple[(float, float)]:
num_params = sum((p.numel() for p in model.parameters() if p.requires_grad))
convert_overflow_perc = (sum(((converter.get_convert_overflow(reset=True) * 100) for converter in self.converters.values())) / ((num_params * users_per_round) * report_rounds))
aggregate_overflow_perc = ((self.get_aggregate_overflow(reset=True) * 100) / ((num_params * users_per_round) * report_rounds))
return (convert_overflow_perc, aggregate_overflow_perc) |
(auto_attribs=True)
class RequestIO():
request_id: str
date: datetime.datetime
json_input: 'RequestIOJsonInput'
json_output: 'RequestIOJsonOutput'
status_code: int
logs: str
duration_in_seconds: int
additional_properties: Dict[(str, Any)] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[(str, Any)]:
request_id = self.request_id
date = self.date.isoformat()
json_input = self.json_input.to_dict()
json_output = self.json_output.to_dict()
status_code = self.status_code
logs = self.logs
duration_in_seconds = self.duration_in_seconds
field_dict: Dict[(str, Any)] = {}
field_dict.update(self.additional_properties)
field_dict.update({'request_id': request_id, 'date': date, 'json_input': json_input, 'json_output': json_output, 'status_code': status_code, 'logs': logs, 'duration_in_seconds': duration_in_seconds})
return field_dict
def from_dict(cls: Type[T], src_dict: Dict[(str, Any)]) -> T:
from ..models.request_io_json_input import RequestIOJsonInput
from ..models.request_io_json_output import RequestIOJsonOutput
d = src_dict.copy()
request_id = d.pop('request_id')
date = isoparse(d.pop('date'))
json_input = RequestIOJsonInput.from_dict(d.pop('json_input'))
json_output = RequestIOJsonOutput.from_dict(d.pop('json_output'))
status_code = d.pop('status_code')
logs = d.pop('logs')
duration_in_seconds = d.pop('duration_in_seconds')
request_io = cls(request_id=request_id, date=date, json_input=json_input, json_output=json_output, status_code=status_code, logs=logs, duration_in_seconds=duration_in_seconds)
request_io.additional_properties = d
return request_io
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return (key in self.additional_properties) |
class OptionPlotoptionsStreamgraphSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_preference_module(graph, bus, config, plugin_engine):
graph.register_instance('tomate.bus', bus)
graph.register_instance('tomate.plugin', plugin_engine)
graph.register_instance('tomate.config', config)
scan_to_graph(['tomate.ui.dialogs.preference'], graph)
instance = graph.get('tomate.ui.preference')
assert isinstance(instance, PreferenceDialog)
assert (graph.get('tomate.ui.preference') is instance) |
def ast_reinit_in_condition_true() -> AbstractSyntaxTree:
true_value = LogicCondition.initialize_true((context := LogicCondition.generate_new_context()))
ast = AbstractSyntaxTree((root := SeqNode(true_value)), condition_map={logic_cond('a', context): Condition(OperationType.less, [Variable('i'), Constant(10)]), logic_cond('b', context): Condition(OperationType.equal, [Variable('x'), Constant(1)])})
code_node = ast._add_code_node(instructions=[Assignment(Variable('x'), Constant(1)), Assignment(Variable('i'), Constant(0))])
code_node_true = ast._add_code_node([Assignment(Variable('i'), Constant(1))])
condition_node = ast._add_condition_node_with(logic_cond('b', context), code_node_true)
loop_node = ast.factory.create_while_loop_node(condition=logic_cond('a', context))
loop_node_body = ast._add_code_node([Assignment(Variable('x'), BinaryOperation(OperationType.multiply, [Variable('x'), Constant(2)])), Assignment(Variable('i'), BinaryOperation(OperationType.plus, [Variable('i'), Constant(1)]))])
ast._add_nodes_from((condition_node, loop_node))
ast._add_edges_from(((root, code_node), (root, condition_node), (root, loop_node), (loop_node, loop_node_body)))
ast._code_node_reachability_graph.add_reachability(code_node, loop_node_body)
root._sorted_children = (code_node, loop_node)
return ast |
class tFAWController(Module):
def __init__(self, tfaw):
self.valid = valid = Signal()
self.ready = ready = Signal(reset=1)
ready.attr.add('no_retiming')
if (tfaw is not None):
count = Signal(max=max(tfaw, 2))
window = Signal(tfaw)
self.sync += window.eq(Cat(valid, window))
self.comb += count.eq(reduce(add, [window[i] for i in range(tfaw)]))
self.sync += If((count < 4), If((count == 3), ready.eq((~ valid))).Else(ready.eq(1))) |
def lambda_handler(event, context):
response = event.get('response')
request = event.get('request')
session = request.get('session')
expectedAnswer = request.get('privateChallengeParameters').get('answer')
challengeAnswer = request.get('challengeAnswer')
if (expectedAnswer == challengeAnswer):
user_pool_id = event.get('userPoolId')
user_name = event.get('userName')
attribute_update_response = cognito_client.admin_update_user_attributes(UserPoolId=user_pool_id, Username=user_name, UserAttributes=[{'Name': 'email_verified', 'Value': 'true'}])
response.update({'answerCorrect': True})
else:
response.update({'answerCorrect': False})
print(event)
return event |
class OpenAIChatMessage(LLMChatMessage):
function_call: Optional[FunctionCall] = None
def __init__(self, role: str, content: Optional[str]=None, function_call: Optional[FunctionCall]=None) -> None:
super().__init__(role=role, content=content)
self.function_call = function_call
def __str__(self) -> str:
return f'OpenAIChatMessage(role={self.role}, content={self.content}, function_call={self.function_call})'
def to_dict(self) -> Dict[(str, Any)]:
if (self.function_call is None):
return {'role': self.role, 'content': self.content}
return {'role': self.role, 'content': self.content, 'function_call': self.function_call.to_dict()}
def from_completion_result(cls, completion_result: OpenAIChatCompletionResult) -> 'OpenAIChatMessage':
return cls(role=completion_result.get_role(), content=completion_result.get_content(), function_call=completion_result.get_function_call()) |
def extractFrolicsaboundWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesColumnSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Calendar(Html.Html):
name = 'Calendar'
requirements = ('jquery',)
_option_cls = OptCalendars.OptionDays
tag = 'table'
def __init__(self, page: primitives.PageModel, content: Optional[str], width: tuple, height: tuple, align: Optional[str], options: Optional[dict], html_code: Optional[str], profile: Optional[Union[(bool, dict)]], verbose: bool=False):
super(Calendar, self).__init__(page, content, html_code, css_attrs={'width': width, 'height': height}, profile=profile, options=options, verbose=verbose)
self.labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
(self.tasks, self.caption) = ({}, '')
self.style.css.border_collapse = 'collapse'
self.style.css.border_spacing = 0
if (align is not None):
if (align == 'center'):
self.style.css.margin_left = 'auto'
self.style.css.margin_right = 'auto'
else:
self.style.css.text_align = align
def options(self) -> OptCalendars.OptionDays:
return super().options
def click(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None, source_event: str=None, on_ready: bool=False):
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
self.__click = JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)
return self
def task(self, name: str, start: str, capacity: Union[(List[float], float)], end: Optional[str]=None, weekend: bool=False, options: dict=None):
if ((self.options.unit != 100) and (options is None)):
options = {'unit': self.options.unit}
if ((options is not None) and ('unit' in options)):
if isinstance(capacity, list):
capacity = [((100 * c) / options['unit']) for c in capacity]
else:
capacity = ((100 * capacity) / options['unit'])
if (name not in self.tasks):
self.tasks[name] = self.page.theme.charts[len(self.tasks)]
i = 0
for dt in self._vals:
if ('date' in dt):
if (dt['weekend'] and (not weekend)):
continue
if (start <= dt['date']):
if (not isinstance(capacity, list)):
value = capacity
elif (i >= len(capacity)):
value = capacity[(- 1)]
else:
value = capacity[i]
if ((end is not None) and (end >= dt['date'])):
dt['tasks'].append({'name': name, 'capacity': value, 'color': self.tasks[name]})
i += 1
elif (end is None):
dt['tasks'].append({'name': name, 'capacity': value, 'color': self.tasks[name]})
i += 1
else:
i = 0
for dt in self._vals:
if ('date' in dt):
if (dt['weekend'] and (not weekend)):
continue
if (start <= dt['date']):
if (not isinstance(capacity, list)):
value = capacity
elif (i >= len(capacity)):
value = capacity[(- 1)]
else:
value = capacity[i]
if ((end is not None) and (end >= dt['date'])):
for t in dt['tasks']:
if (t['name'] == name):
t['capacity'] = value
i += 1
break
elif (end is None):
for t in dt['tasks']:
if (t['name'] == name):
t['capacity'] = value
i += 1
break
def weekly(self, name, start, capacity, frequency: int=1, weekend: bool=False, options: Optional[dict]=None):
dt = datetime.date(*map((lambda x: int(x)), start.split('-')))
c = []
month = dt.month
while (dt.month == month):
if ((len(c) % (frequency * 5)) == 0):
c.append(capacity)
else:
c.append(0)
dt += datetime.timedelta(days=1)
self.task(name, start, c, weekend=weekend, options=options)
def __str__(self):
header = [("<th style='width:%s%%;%s'>%s</th>" % ((100 / len(self.labels)), Defaults.inline(self.options.header), d)) for d in self.labels]
(body, row) = ([], [])
for (i, day) in enumerate(self.val):
if ('number' in day):
(total_capacity, tooltip) = (0, [('<b>%s</b>' % day['date'])])
for t in day.get('tasks', []):
c = t.get('capacity', 0)
total_capacity += c
tooltip.append(('<div>%s: %s%%</div>' % (t['name'], c)))
if (total_capacity > 100):
day['total_capacity'] = total_capacity
day['style'] = Defaults.inline(self.options.overload)
numer_day = ("<div style='%(style)s' data-html='true' data-toggle='tooltip' title='overload: %(total_capacity)s%%'>%(number)s</div>" % day)
else:
day['style'] = Defaults.inline(self.options.number)
numer_day = ("<div style='%(style)s'>%(number)s</div>" % day)
tasks = ('<div>%s</div>' % ''.join([("<div style='width:100%%;height:20px;display:block;vertical-align:middle'><div style='background:%(color)s;width:100%%;height:%(capacity)s%%;display:inline-block' title='%(name)s: %(capacity)s%%'></div></div>" % t) for t in day.get('tasks', [])]))
cell_style = Defaults.inline(self.options.today)
if day.get('today', False):
row.append(("<td data-placement='right' data-toggle='tooltip' data-html='true' title='<div>%s</div>' style='%s;background:%s'>%s%s</td>" % (''.join(tooltip), cell_style, self.page.theme.success.light, numer_day, tasks)))
else:
row.append(("<td data-placement='right' data-toggle='tooltip' data-html='true' title='<div>%s</div>' style='%s'>%s%s</td>" % (''.join(tooltip), cell_style, numer_day, tasks)))
else:
row.append("<td style='padding:0'></td>")
if ((i % len(self.labels)) == 0):
body.append(('<tr>%s</tr>' % ''.join(row)))
row = []
if row:
for i in range((7 - len(row))):
row.append("<td style='padding:0'></td>")
body.append(('<tr>%s</tr>' % ''.join(row)))
return ('<%(tag)s %(strAttr)s><caption style="text-align:right">%(caption)s</caption><tr>%(header)s</tr>%(content)s</%(tag)s>' % {'strAttr': self.get_attrs(css_class_names=self.style.get_classes()), 'caption': self.caption, 'header': ''.join(header), 'content': ''.join(body), 'tag': self.tag}) |
class TestWhiteBoxRWLockReadD(unittest.TestCase):
def test_read_vs_downgrade_read(self) -> None:
c_rwlock_1 = rwlock.RWLockReadD()
c_rwlock_2 = rwlock.RWLockReadD()
def assert_internal_state() -> None:
self.assertEqual(int(c_rwlock_1.v_read_count), int(c_rwlock_2.v_read_count))
self.assertEqual(bool(c_rwlock_1.c_resource.locked()), bool(c_rwlock_2.c_resource.locked()))
self.assertEqual(bool(c_rwlock_1.c_lock_read_count.locked()), bool(c_rwlock_2.c_lock_read_count.locked()))
assert_internal_state()
a_read_lock = c_rwlock_1.gen_rlock()
a_read_lock.acquire()
a_downgrade_lock: Union[(rwlock.Lockable, rwlock.LockableD)] = c_rwlock_2.gen_wlock()
a_downgrade_lock.acquire()
assert isinstance(a_downgrade_lock, rwlock.LockableD)
a_downgrade_lock = a_downgrade_lock.downgrade()
assert_internal_state()
a_read_lock.release()
a_downgrade_lock.release()
assert_internal_state()
def test_read_vs_downgrade_write(self) -> None:
c_rwlock_1 = rwlock.RWLockWriteD()
c_rwlock_2 = rwlock.RWLockWriteD()
def assert_internal_state() -> None:
self.assertEqual(int(c_rwlock_1.v_read_count), int(c_rwlock_2.v_read_count))
self.assertEqual(int(c_rwlock_1.v_write_count), int(c_rwlock_2.v_write_count))
self.assertEqual(bool(c_rwlock_1.c_lock_read_count.locked()), bool(c_rwlock_2.c_lock_read_count.locked()))
self.assertEqual(bool(c_rwlock_1.c_lock_write_count.locked()), bool(c_rwlock_2.c_lock_write_count.locked()))
self.assertEqual(bool(c_rwlock_1.c_lock_read_entry.locked()), bool(c_rwlock_2.c_lock_read_entry.locked()))
self.assertEqual(bool(c_rwlock_1.c_lock_read_try.locked()), bool(c_rwlock_2.c_lock_read_try.locked()))
self.assertEqual(bool(c_rwlock_1.c_resource.locked()), bool(c_rwlock_2.c_resource.locked()))
assert_internal_state()
a_read_lock = c_rwlock_1.gen_rlock()
a_read_lock.acquire()
a_downgrade_lock: Union[(rwlock.LockableD, rwlock.Lockable)] = c_rwlock_2.gen_wlock()
a_downgrade_lock.acquire()
assert isinstance(a_downgrade_lock, rwlock.LockableD)
a_downgrade_lock = a_downgrade_lock.downgrade()
assert_internal_state()
a_read_lock.release()
a_downgrade_lock.release()
assert_internal_state()
def test_read_vs_downgrade_fair(self) -> None:
c_rwlock_1 = rwlock.RWLockFairD()
c_rwlock_2 = rwlock.RWLockFairD()
def assert_internal_state() -> None:
self.assertEqual(int(c_rwlock_1.v_read_count), int(c_rwlock_2.v_read_count))
self.assertEqual(bool(c_rwlock_1.c_lock_read_count.locked()), bool(c_rwlock_2.c_lock_read_count.locked()))
self.assertEqual(bool(c_rwlock_1.c_lock_read.locked()), bool(c_rwlock_2.c_lock_read.locked()))
self.assertEqual(bool(c_rwlock_1.c_lock_write.locked()), bool(c_rwlock_2.c_lock_write.locked()))
assert_internal_state()
a_read_lock = c_rwlock_1.gen_rlock()
a_read_lock.acquire()
a_downgrade_lock: Union[(rwlock.LockableD, rwlock.Lockable)] = c_rwlock_2.gen_wlock()
a_downgrade_lock.acquire()
assert isinstance(a_downgrade_lock, rwlock.LockableD)
a_downgrade_lock = a_downgrade_lock.downgrade()
assert_internal_state()
a_read_lock.release()
a_downgrade_lock.release()
assert_internal_state() |
def setup_remote_hmmdb(db, dbtype, qtype):
if (':' in db):
(dbname, host, port) = map(str.strip, db.split(':'))
dbpath = host
port = int(port)
else:
dbname = db
dbpath = None
host = None
port = None
if (dbname in get_hmmer_databases()):
(dbfile, port) = get_db_info(dbname)
db = dbname
else:
dbfile = dbname
idmap_file = (dbfile + '.idmap')
if (not pexists(idmap_file)):
raise EmapperException(f'idmap file {idmap_file} not found')
return (dbname, dbpath, host, port, idmap_file) |
class TestPerturbText(unittest.TestCase):
def setUp(self) -> None:
ner_pipeline = get_ner_pipeline()
self.perturber = PerturbText(INTENT_DATASET, ner_pipeline=ner_pipeline, batch_size=8, perturbations_per_sample=5)
def test_perturb_names(self):
print(self.perturber.perturb_names())
def test_perturb_location(self):
print(self.perturber.perturb_location())
def test_perturb_number(self):
print(self.perturber.perturb_number())
def test_perturb_typos(self):
print(self.perturber.perturb_typos(typo_probability=0.05))
def test_paraphrase(self):
similar_sentences = self.perturber.paraphrase(model='gpt-3.5-turbo', temperature=0.0)
print(similar_sentences) |
(scope='function')
def erasure_policy_hmac(db: Session, oauth_client: ClientDetail, storage_config: StorageConfig) -> Generator:
erasure_policy = Policy.create(db=db, data={'name': 'hmac policy', 'key': 'hmac_policy', 'client_id': oauth_client.id})
erasure_rule = Rule.create(db=db, data={'action_type': ActionType.erasure.value, 'client_id': oauth_client.id, 'name': 'hmac erasure rule', 'policy_id': erasure_policy.id, 'masking_strategy': {'strategy': HmacMaskingStrategy.name, 'configuration': {}}})
erasure_rule_target = RuleTarget.create(db=db, data={'client_id': oauth_client.id, 'data_category': DataCategory('user.name').value, 'rule_id': erasure_rule.id})
(yield erasure_policy)
try:
erasure_rule_target.delete(db)
except ObjectDeletedError:
pass
try:
erasure_rule.delete(db)
except ObjectDeletedError:
pass
try:
erasure_policy.delete(db)
except ObjectDeletedError:
pass |
class TestStringRelatedField(APISimpleTestCase):
def setUp(self):
self.instance = MockObject(pk=1, name='foo')
self.field = serializers.StringRelatedField()
def test_string_related_representation(self):
representation = self.field.to_representation(self.instance)
assert (representation == '<MockObject name=foo, pk=1>') |
class build_ext_options():
def build_options(self):
if hasattr(self.compiler, 'initialize'):
self.compiler.initialize()
self.compiler.platform = sys.platform[:6]
for e in self.extensions:
e.extra_compile_args = COMPILE_OPTIONS.get(self.compiler.compiler_type, COMPILE_OPTIONS['other'])
e.extra_link_args = LINK_OPTIONS.get(self.compiler.compiler_type, LINK_OPTIONS['other']) |
_action_type(ofproto.OFPAT_PUSH_VLAN, ofproto.OFP_ACTION_PUSH_SIZE)
class OFPActionPushVlan(OFPAction):
def __init__(self, ethertype=ether.ETH_TYPE_8021Q, type_=None, len_=None):
super(OFPActionPushVlan, self).__init__()
self.ethertype = ethertype
def parser(cls, buf, offset):
(type_, len_, ethertype) = struct.unpack_from(ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset)
return cls(ethertype)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_PUSH_PACK_STR, buf, offset, self.type, self.len, self.ethertype) |
class OptionPlotoptionsBellcurveStatesSelect(Options):
def animation(self) -> 'OptionPlotoptionsBellcurveStatesSelectAnimation':
return self._config_sub_data('animation', OptionPlotoptionsBellcurveStatesSelectAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionPlotoptionsBellcurveStatesSelectHalo':
return self._config_sub_data('halo', OptionPlotoptionsBellcurveStatesSelectHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionPlotoptionsBellcurveStatesSelectMarker':
return self._config_sub_data('marker', OptionPlotoptionsBellcurveStatesSelectMarker) |
def update_mask(db_root, mask_db, src_dbs, offset=0):
bits = set()
mask_db_file = ('%s/mask_%s.db' % (db_root, mask_db))
if os.path.exists(mask_db_file):
with util.OpenSafeFile(mask_db_file, 'r') as f:
for line in f:
line = line.split()
assert (len(line) == 2)
assert (line[0] == 'bit')
bits.add(line[1])
for src_db in src_dbs:
seg_db_file = ('%s/segbits_%s.db' % (db_root, src_db))
if (not os.path.exists(seg_db_file)):
continue
with util.OpenSafeFile(seg_db_file, 'r') as f:
for line in f:
line = line.split()
for bit in line[1:]:
if (bit[0] == '!'):
continue
if (offset != 0):
m = re.match('(\\d+)_(\\d+)', bit)
bit = ('%02d_%02d' % (int(m.group(1)), (int(m.group(2)) + offset)))
bits.add(bit)
if (len(bits) > 0):
with util.OpenSafeFile(mask_db_file, 'w') as f:
for bit in sorted(bits):
print(('bit %s' % bit), file=f) |
def calculate_message_call_gas(state: State, gas: Uint, to: Address, value: U256) -> MessageCallGas:
create_gas_cost = (Uint(0) if account_exists(state, to) else GAS_NEW_ACCOUNT)
transfer_gas_cost = (Uint(0) if (value == 0) else GAS_CALL_VALUE)
cost = (((GAS_CALL + gas) + create_gas_cost) + transfer_gas_cost)
stipend = (gas if (value == 0) else (GAS_CALL_STIPEND + gas))
return MessageCallGas(cost, stipend) |
class RemoteControlledTank(MoveTank):
def __init__(self, left_motor_port, right_motor_port, polarity='inversed', speed=400, channel=1):
MoveTank.__init__(self, left_motor_port, right_motor_port)
self.set_polarity(polarity)
left_motor = self.motors[left_motor_port]
right_motor = self.motors[right_motor_port]
self.speed_sp = speed
self.remote = InfraredSensor()
self.remote.on_channel1_top_left = self.make_move(left_motor, self.speed_sp)
self.remote.on_channel1_bottom_left = self.make_move(left_motor, (self.speed_sp * (- 1)))
self.remote.on_channel1_top_right = self.make_move(right_motor, self.speed_sp)
self.remote.on_channel1_bottom_right = self.make_move(right_motor, (self.speed_sp * (- 1)))
self.channel = channel
def make_move(self, motor, dc_sp):
def move(state):
if state:
motor.run_forever(speed_sp=dc_sp)
else:
motor.stop()
return move
def main(self):
try:
while True:
self.remote.process()
sleep(0.01)
except (KeyboardInterrupt, Exception) as e:
log.exception(e)
self.off() |
class Bridge(object):
_DEFAULT_VALUE = {'priority': bpdu.DEFAULT_BRIDGE_PRIORITY, 'sys_ext_id': 0, 'max_age': bpdu.DEFAULT_MAX_AGE, 'hello_time': bpdu.DEFAULT_HELLO_TIME, 'fwd_delay': bpdu.DEFAULT_FORWARD_DELAY}
def __init__(self, dp, logger, config, send_ev_func):
super(Bridge, self).__init__()
self.dp = dp
self.logger = logger
self.dpid_str = {'dpid': dpid_to_str(dp.id)}
self.send_event = send_ev_func
bridge_conf = config.get('bridge', {})
values = self._DEFAULT_VALUE
for (key, value) in bridge_conf.items():
values[key] = value
system_id = list(dp.ports.values())[0].hw_addr
self.bridge_id = BridgeId(values['priority'], values['sys_ext_id'], system_id)
self.bridge_times = Times(0, values['max_age'], values['hello_time'], values['fwd_delay'])
self.root_priority = Priority(self.bridge_id, 0, None, None)
self.root_times = self.bridge_times
self.ports = {}
self.ports_state = {}
self.ports_conf = config.get('ports', {})
for ofport in dp.ports.values():
self.port_add(ofport)
if ((dp.ofproto == ofproto_v1_2) or (dp.ofproto == ofproto_v1_3)):
ofctl = OfCtl_v1_2later(self.dp)
ofctl.add_bpdu_pkt_in_flow()
def is_root_bridge(self):
return bool((self.bridge_id.value == self.root_priority.root_id.value))
def delete(self):
for port in self.ports.values():
port.delete()
def port_add(self, ofport):
if (ofport.port_no <= MAX_PORT_NO):
port_conf = self.ports_conf.get(ofport.port_no, {})
self.ports[ofport.port_no] = Port(self.dp, self.logger, port_conf, self.send_event, self.recalculate_spanning_tree, self.topology_change_notify, self.bridge_id, self.bridge_times, ofport)
self.ports_state[ofport.port_no] = ofport.state
def port_delete(self, ofp_port):
self.link_down(ofp_port)
self.ports[ofp_port.port_no].delete()
del self.ports[ofp_port.port_no]
del self.ports_state[ofp_port.port_no]
def link_up(self, ofp_port):
port = self.ports[ofp_port.port_no]
port.up(DESIGNATED_PORT, self.root_priority, self.root_times)
self.ports_state[ofp_port.port_no] = ofp_port.state
def link_down(self, ofp_port):
port = self.ports[ofp_port.port_no]
init_stp_flg = bool((port.role is ROOT_PORT))
port.down(PORT_STATE_DISABLE, msg_init=True)
self.ports_state[ofp_port.port_no] = ofp_port.state
if init_stp_flg:
self.recalculate_spanning_tree()
def packet_in_handler(self, msg):
dp = msg.datapath
if (dp.ofproto == ofproto_v1_0):
in_port_no = msg.in_port
else:
assert ((dp.ofproto == ofproto_v1_2) or (dp.ofproto == ofproto_v1_3))
in_port_no = None
for match_field in msg.match.fields:
if (match_field.header == dp.ofproto.OXM_OF_IN_PORT):
in_port_no = match_field.value
break
if (in_port_no not in self.ports):
return
in_port = self.ports[in_port_no]
if (in_port.state == PORT_STATE_DISABLE):
return
pkt = packet.Packet(msg.data)
if (bpdu.ConfigurationBPDUs in pkt):
(bpdu_pkt,) = pkt.get_protocols(bpdu.ConfigurationBPDUs)
if (bpdu_pkt.message_age > bpdu_pkt.max_age):
log_msg = 'Drop BPDU packet which message_age exceeded.'
self.logger.debug(log_msg, extra=self.dpid_str)
return
(rcv_info, rcv_tc) = in_port.rcv_config_bpdu(bpdu_pkt)
if (rcv_info is SUPERIOR):
self.logger.info('[port=%d] Receive superior BPDU.', in_port_no, extra=self.dpid_str)
self.recalculate_spanning_tree(init=False)
elif rcv_tc:
self.send_event(EventTopologyChange(self.dp))
if (in_port.role is ROOT_PORT):
self._forward_tc_bpdu(rcv_tc)
elif (bpdu.TopologyChangeNotificationBPDUs in pkt):
in_port.transmit_ack_bpdu()
self.topology_change_notify(None)
elif (bpdu.RstBPDUs in pkt):
pass
else:
self.send_event(EventPacketIn(msg))
def recalculate_spanning_tree(self, init=True):
for port in self.ports.values():
if (port.state is not PORT_STATE_DISABLE):
port.down(PORT_STATE_BLOCK, msg_init=init)
if init:
self.send_event(EventTopologyChange(self.dp))
port_roles = {}
self.root_priority = Priority(self.bridge_id, 0, None, None)
self.root_times = self.bridge_times
if init:
self.logger.info('Root bridge.', extra=self.dpid_str)
for port_no in self.ports:
port_roles[port_no] = DESIGNATED_PORT
else:
(port_roles, self.root_priority, self.root_times) = self._spanning_tree_algorithm()
for (port_no, role) in port_roles.items():
if (self.ports[port_no].state is not PORT_STATE_DISABLE):
self.ports[port_no].up(role, self.root_priority, self.root_times)
def _spanning_tree_algorithm(self):
port_roles = {}
root_port = self._select_root_port()
if (root_port is None):
self.logger.info('Root bridge.', extra=self.dpid_str)
root_priority = self.root_priority
root_times = self.root_times
for port_no in self.ports:
if (self.ports[port_no].state is not PORT_STATE_DISABLE):
port_roles[port_no] = DESIGNATED_PORT
else:
self.logger.info('Non root bridge.', extra=self.dpid_str)
root_priority = root_port.designated_priority
root_times = root_port.designated_times
port_roles[root_port.ofport.port_no] = ROOT_PORT
d_ports = self._select_designated_port(root_port)
for port_no in d_ports:
port_roles[port_no] = DESIGNATED_PORT
for port in self.ports.values():
if (port.state is not PORT_STATE_DISABLE):
port_roles.setdefault(port.ofport.port_no, NON_DESIGNATED_PORT)
return (port_roles, root_priority, root_times)
def _select_root_port(self):
root_port = None
for port in self.ports.values():
root_msg = (self.root_priority if (root_port is None) else root_port.designated_priority)
port_msg = port.designated_priority
if ((port.state is PORT_STATE_DISABLE) or (port_msg is None)):
continue
if (root_msg.root_id.value > port_msg.root_id.value):
result = SUPERIOR
elif (root_msg.root_id.value == port_msg.root_id.value):
if (root_msg.designated_bridge_id is None):
result = INFERIOR
else:
result = Stp.compare_root_path(port_msg.root_path_cost, root_msg.root_path_cost, port_msg.designated_bridge_id.value, root_msg.designated_bridge_id.value, port_msg.designated_port_id.value, root_msg.designated_port_id.value)
else:
result = INFERIOR
if (result is SUPERIOR):
root_port = port
return root_port
def _select_designated_port(self, root_port):
d_ports = []
root_msg = root_port.designated_priority
for port in self.ports.values():
port_msg = port.designated_priority
if ((port.state is PORT_STATE_DISABLE) or (port.ofport.port_no == root_port.ofport.port_no)):
continue
if ((port_msg is None) or (port_msg.root_id.value != root_msg.root_id.value)):
d_ports.append(port.ofport.port_no)
else:
result = Stp.compare_root_path(root_msg.root_path_cost, (port_msg.root_path_cost - port.path_cost), self.bridge_id.value, port_msg.designated_bridge_id.value, port.port_id.value, port_msg.designated_port_id.value)
if (result is SUPERIOR):
d_ports.append(port.ofport.port_no)
return d_ports
def topology_change_notify(self, port_state):
notice = False
if (port_state is PORT_STATE_FORWARD):
for port in self.ports.values():
if (port.role is DESIGNATED_PORT):
notice = True
break
else:
notice = True
if notice:
self.send_event(EventTopologyChange(self.dp))
if self.is_root_bridge:
self._transmit_tc_bpdu()
else:
self._transmit_tcn_bpdu()
def _transmit_tc_bpdu(self):
for port in self.ports.values():
port.transmit_tc_bpdu()
def _transmit_tcn_bpdu(self):
root_port = None
for port in self.ports.values():
if (port.role is ROOT_PORT):
root_port = port
break
if root_port:
root_port.transmit_tcn_bpdu()
def _forward_tc_bpdu(self, fwd_flg):
for port in self.ports.values():
port.send_tc_flg = fwd_flg |
class EmovityStation(BikeShareStation):
def __init__(self, latitude, longitude, bikes, free, fuzzle):
dom = html.fromstring(fuzzle)
text = dom.xpath('//div/text()')
name = text[0]
uid = next(iter(re.findall('(\\d+)\\s*-', name)))
super(EmovityStation, self).__init__(name=text[0], latitude=float(latitude), longitude=float(longitude), bikes=int(bikes), free=int(free), extra={'uid': uid}) |
class TestSecureAggregationIntegration():
def _load_data(self, num_users: int=26):
shard_size = 1
local_batch_size = 1
dummy_dataset = DummyAlphabetDataset(num_rows=num_users)
(data_provider, data_loader) = DummyAlphabetDataset.create_data_provider_and_loader(dummy_dataset, shard_size, local_batch_size, DummyAlphabetFLModel())
assertEqual(data_loader.num_total_users, (num_users / shard_size))
assertEqual(data_loader.num_total_users, data_provider.num_train_users())
return (data_provider, data_loader.train_batch_size)
def _train_fl_model(self, sec_agg_enable: bool=False, fixedpoint=None, num_users: int=26, users_per_round: int=26, epochs: int=1, metrics_reporter=None, report_train_metrics: bool=False, report_train_metrics_after_aggregation: bool=False, train_metrics_reported_per_epoch: int=1):
global_fl_model = DummyAlphabetFLModel()
(data_provider, _) = self._load_data(num_users)
world_size = 1
sync_trainer = create_sync_trainer(model=global_fl_model, local_lr=0.1, users_per_round=users_per_round, epochs=epochs, user_epochs_per_round=1, do_eval=True, server_config=(SyncSecAggServerConfig(aggregation_type=AggregationType.AVERAGE, fixedpoint=fixedpoint) if sec_agg_enable else SyncServerConfig(aggregation_type=AggregationType.AVERAGE)))
sync_trainer.cfg.train_metrics_reported_per_epoch = train_metrics_reported_per_epoch
sync_trainer.cfg.report_train_metrics = report_train_metrics
sync_trainer.cfg.report_train_metrics_after_aggregation = report_train_metrics_after_aggregation
if (metrics_reporter is None):
metrics_reporter = FakeMetricReporter()
(global_fl_model, _eval_metric) = sync_trainer.train(data_provider, metrics_reporter, num_total_users=data_provider.num_train_users(), distributed_world_size=world_size)
return global_fl_model
def test_secagg_not_equivalent_no_secagg(self) -> None:
fixedpoint = FixedPointConfig(num_bytes=1, scaling_factor=1000)
torch.manual_seed(1)
fl_model_with_secure_trainer = self._train_fl_model(sec_agg_enable=True, fixedpoint=fixedpoint)
torch.manual_seed(1)
fl_model_with_trainer = self._train_fl_model()
assertNotEqual(FLModelParamUtils.get_mismatched_param([fl_model_with_trainer.fl_get_module(), fl_model_with_secure_trainer.fl_get_module()], 1e-06), '')
def test_secagg_not_equivalent_no_secagg_large_range(self) -> None:
fixedpoint = FixedPointConfig(num_bytes=7, scaling_factor=1)
torch.manual_seed(1)
fl_model_with_secure_trainer = self._train_fl_model(sec_agg_enable=True, fixedpoint=fixedpoint)
torch.manual_seed(1)
fl_model_with_trainer = self._train_fl_model()
assertNotEqual(FLModelParamUtils.get_mismatched_param([fl_model_with_trainer.fl_get_module(), fl_model_with_secure_trainer.fl_get_module()], 1e-06), '')
def test_overflow_reporting(self) -> None:
fixedpoint = FixedPointConfig(num_bytes=1, scaling_factor=100)
metrics_reporter = MetricsReporterWithMockedChannels()
self._train_fl_model(sec_agg_enable=True, fixedpoint=fixedpoint, users_per_round=2, epochs=3, metrics_reporter=metrics_reporter, report_train_metrics=True, report_train_metrics_after_aggregation=True, train_metrics_reported_per_epoch=26)
def count_word(result, word):
return str(result).count(word)
assertEqual(count_word(metrics_reporter.stdout_results, 'overflow per round'), 39, metrics_reporter.stdout_results)
assertEqual(count_word(metrics_reporter.tensorboard_results, 'overflow per round'), 78, metrics_reporter.tensorboard_results) |
class OrderStatisticsTicketSchema(Schema):
class Meta():
type_ = 'order-statistics-ticket'
self_view = 'v1.order_statistics_ticket_detail'
self_view_kwargs = {'id': '<id>'}
inflect = dasherize
id = fields.Str()
identifier = fields.Str()
tickets = fields.Method('tickets_count')
orders = fields.Method('orders_count')
sales = fields.Method('sales_count')
def tickets_count(self, obj):
obj_id = obj.id
total = db.session.query(func.sum(OrderTicket.quantity.label('sum'))).join(Order.order_tickets).filter((OrderTicket.ticket_id == obj_id)).scalar()
draft = db.session.query(func.sum(OrderTicket.quantity.label('sum'))).join(Order.order_tickets).filter((OrderTicket.ticket_id == obj_id), (Order.status == 'draft')).scalar()
cancelled = db.session.query(func.sum(OrderTicket.quantity.label('sum'))).join(Order.order_tickets).filter((OrderTicket.ticket_id == obj_id), (Order.status == 'cancelled')).scalar()
pending = db.session.query(func.sum(OrderTicket.quantity.label('sum'))).join(Order.order_tickets).filter((OrderTicket.ticket_id == obj_id), (Order.status == 'pending')).scalar()
expired = db.session.query(func.sum(OrderTicket.quantity.label('sum'))).join(Order.order_tickets).filter((OrderTicket.ticket_id == obj_id), (Order.status == 'expired')).scalar()
placed = db.session.query(func.sum(OrderTicket.quantity.label('sum'))).join(Order.order_tickets).filter((OrderTicket.ticket_id == obj_id), (Order.status == 'placed')).scalar()
completed = db.session.query(func.sum(OrderTicket.quantity.label('sum'))).join(Order.order_tickets).filter((OrderTicket.ticket_id == obj_id), (Order.status == 'completed')).scalar()
result = {'total': (total or 0), 'draft': (draft or 0), 'cancelled': (cancelled or 0), 'pending': (pending or 0), 'expired': (expired or 0), 'placed': (placed or 0), 'completed': (completed or 0)}
return result
def orders_count(self, obj):
obj_id = obj.id
total = get_count(db.session.query(Order).join(Order.order_tickets).filter((OrderTicket.ticket_id == obj_id)))
draft = get_count(db.session.query(Order).join(Order.order_tickets).filter((OrderTicket.ticket_id == obj_id), (Order.status == 'draft')))
cancelled = get_count(db.session.query(Order).join(Order.order_tickets).filter((OrderTicket.ticket_id == obj_id), (Order.status == 'cancelled')))
pending = get_count(db.session.query(Order).join(Order.order_tickets).filter((OrderTicket.ticket_id == obj_id), (Order.status == 'pending')))
expired = get_count(db.session.query(Order).join(Order.order_tickets).filter((OrderTicket.ticket_id == obj_id), (Order.status == 'expired')))
placed = get_count(db.session.query(Order).join(Order.order_tickets).filter((OrderTicket.ticket_id == obj_id), (Order.status == 'placed')))
completed = get_count(db.session.query(Order).join(Order.order_tickets).filter((OrderTicket.ticket_id == obj_id), (Order.status == 'completed')))
result = {'total': (total or 0), 'draft': (draft or 0), 'cancelled': (cancelled or 0), 'pending': (pending or 0), 'expired': (expired or 0), 'placed': (placed or 0), 'completed': (completed or 0)}
return result
def sales_count(self, obj):
obj_id = obj.id
draft = calculated_sale_by_status(obj_id, 'draft')
cancelled = calculated_sale_by_status(obj_id, 'cancelled')
pending = calculated_sale_by_status(obj_id, 'pending')
expired = calculated_sale_by_status(obj_id, 'expired')
placed = calculated_sale_by_status(obj_id, 'placed')
completed = calculated_sale_by_status(obj_id, 'completed')
total = (((((draft + cancelled) + pending) + expired) + placed) + completed)
result = {'total': (total or 0), 'draft': (draft or 0), 'cancelled': (cancelled or 0), 'pending': (pending or 0), 'expired': (expired or 0), 'placed': (placed or 0), 'completed': (completed or 0)}
return result |
class SearchFilterAnnotatedFieldTests(TestCase):
def setUpTestData(cls):
SearchFilterModel.objects.create(title='abc', text='def')
SearchFilterModel.objects.create(title='ghi', text='jkl')
def test_search_in_annotated_field(self):
class SearchListView(generics.ListAPIView):
queryset = SearchFilterModel.objects.annotate(title_text=Upper(Concat(models.F('title'), models.F('text')))).all()
serializer_class = SearchFilterAnnotatedSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('title_text',)
view = SearchListView.as_view()
request = factory.get('/', {'search': 'ABCDEF'})
response = view(request)
assert (len(response.data) == 1)
assert (response.data[0]['title_text'] == 'ABCDEF')
def test_must_call_distinct_subsequent_m2m_fields(self):
f = filters.SearchFilter()
queryset = SearchFilterModelM2M.objects.annotate(title_text=Upper(Concat(models.F('title'), models.F('text')))).all()
assert f.must_call_distinct(queryset, ['attributes'])
assert f.must_call_distinct(queryset, ['title_text', 'attributes']) |
def test_centered_product_same_mode_raises_exceptions_if_frame_1_frame_2_different_lengths():
with pytest.raises(scared.PreprocessError):
scared.preprocesses.high_order.CenteredProduct(frame_1=range(60), mode='same')
with pytest.raises(scared.PreprocessError):
scared.preprocesses.high_order.CenteredProduct(frame_1=range(60), frame_2=range(10), mode='same')
with pytest.raises(scared.PreprocessError):
scared.preprocesses.high_order.CenteredProduct(frame_1=60, frame_2=range(10), mode='same')
with pytest.raises(scared.PreprocessError):
scared.preprocesses.high_order.CenteredProduct(frame_1=range(60), frame_2=10, mode='same') |
class CaseInsensitiveDict(MutableMapping):
def __init__(self, data: Optional[Iterable[Tuple[(str, Any)]]]=None, **kwargs: Any):
self._store: Dict[(str, Tuple[(str, Any)])] = dict()
if (data is None):
data = {}
self.update(data, **kwargs)
def __setitem__(self, key: str, value: Any) -> None:
self._store[key.lower()] = (key, value)
def __getitem__(self, key: str) -> Any:
return self._store[key.lower()][1]
def __delitem__(self, key: str) -> None:
del self._store[key.lower()]
def __iter__(self) -> Iterator[str]:
return (casedkey for (casedkey, mappedvalue) in self._store.values())
def __len__(self) -> int:
return len(self._store)
def lower_items(self) -> Iterator[Tuple[(str, Any)]]:
return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items())
def __eq__(self, other: object) -> bool:
if isinstance(other, Mapping):
other = CaseInsensitiveDict(other)
else:
return NotImplemented
return (dict(self.lower_items()) == dict(other.lower_items()))
def copy(self) -> CaseInsensitiveDict:
return CaseInsensitiveDict(self._store.values())
def __repr__(self) -> str:
return ('%s(%r)' % (self.__class__.__name__, dict(self.items()))) |
def top_n_correlations(n, column, days=180):
df = generate_oura_correlations(lookback_days=days)
positive = df[column].nlargest(n).reset_index()
positive.columns = ['Positive', 'Pos Corr Coef.']
negative = df[column].nsmallest(n).reset_index()
negative.columns = ['Negative', 'Neg Corr Coef.']
return pd.merge(positive, negative, left_index=True, right_index=True) |
class ObjectIdentity(object):
(ST_DIRTY, ST_CLEAN) = (1, 2)
def __init__(self, *args, **kwargs):
self._args = args
self._kwargs = kwargs
self._mibSourcesToAdd = None
self._modNamesToLoad = None
self._asn1SourcesToAdd = None
self._asn1SourcesOptions = None
self._state = self.ST_DIRTY
self._indices = ()
self._oid = ()
self._label = ()
self._modName = self._symName = ''
self._mibNode = None
def getMibSymbol(self):
if (self._state & self.ST_CLEAN):
return (self._modName, self._symName, self._indices)
else:
raise SmiError(('%s object not fully initialized' % self.__class__.__name__))
def getOid(self):
if (self._state & self.ST_CLEAN):
return self._oid
else:
raise SmiError(('%s object not fully initialized' % self.__class__.__name__))
def getLabel(self):
if (self._state & self.ST_CLEAN):
return self._label
else:
raise SmiError(('%s object not fully initialized' % self.__class__.__name__))
def getMibNode(self):
if (self._state & self.ST_CLEAN):
return self._mibNode
else:
raise SmiError(('%s object not fully initialized' % self.__class__.__name__))
def isFullyResolved(self):
return (self._state & self.ST_CLEAN)
def addAsn1MibSource(self, *asn1Sources, **kwargs):
if (self._asn1SourcesToAdd is None):
self._asn1SourcesToAdd = asn1Sources
else:
self._asn1SourcesToAdd += asn1Sources
if self._asn1SourcesOptions:
self._asn1SourcesOptions.update(kwargs)
else:
self._asn1SourcesOptions = kwargs
return self
def addMibSource(self, *mibSources):
if (self._mibSourcesToAdd is None):
self._mibSourcesToAdd = mibSources
else:
self._mibSourcesToAdd += mibSources
return self
def loadMibs(self, *modNames):
if (self._modNamesToLoad is None):
self._modNamesToLoad = modNames
else:
self._modNamesToLoad += modNames
return self
def resolveWithMib(self, mibViewController):
if (self._mibSourcesToAdd is not None):
((debug.logger & debug.FLAG_MIB) and debug.logger(('adding MIB sources %s' % ', '.join(self._mibSourcesToAdd))))
mibViewController.mibBuilder.addMibSources(*[ZipMibSource(x) for x in self._mibSourcesToAdd])
self._mibSourcesToAdd = None
if (self._asn1SourcesToAdd is None):
addMibCompiler(mibViewController.mibBuilder, ifAvailable=True, ifNotAdded=True)
else:
((debug.logger & debug.FLAG_MIB) and debug.logger(('adding MIB compiler with source paths %s' % ', '.join(self._asn1SourcesToAdd))))
addMibCompiler(mibViewController.mibBuilder, sources=self._asn1SourcesToAdd, searchers=self._asn1SourcesOptions.get('searchers'), borrowers=self._asn1SourcesOptions.get('borrowers'), destination=self._asn1SourcesOptions.get('destination'), ifAvailable=self._asn1SourcesOptions.get('ifAvailable'), ifNotAdded=self._asn1SourcesOptions.get('ifNotAdded'))
self._asn1SourcesToAdd = self._asn1SourcesOptions = None
if (self._modNamesToLoad is not None):
((debug.logger & debug.FLAG_MIB) and debug.logger(('loading MIB modules %s' % ', '.join(self._modNamesToLoad))))
mibViewController.mibBuilder.loadModules(*self._modNamesToLoad)
self._modNamesToLoad = None
if (self._state & self.ST_CLEAN):
return self
(MibScalar, MibTableColumn) = mibViewController.mibBuilder.importSymbols('SNMPv2-SMI', 'MibScalar', 'MibTableColumn')
self._indices = ()
if isinstance(self._args[0], ObjectIdentity):
self._args[0].resolveWithMib(mibViewController)
if (len(self._args) == 1):
((debug.logger & debug.FLAG_MIB) and debug.logger(('resolving %s as OID or label' % self._args)))
try:
self._oid = rfc1902.ObjectName(self._args[0])
except PyAsn1Error:
if isinstance(self._args[0], (list, tuple)):
(prefix, label, suffix) = mibViewController.getNodeName(self._args[0])
elif ('.' in self._args[0]):
(prefix, label, suffix) = mibViewController.getNodeNameByOid(tuple(self._args[0].split('.')))
else:
modName = self._args[0]
mibViewController.mibBuilder.loadModules(modName)
if self._kwargs.get('last'):
(prefix, label, suffix) = mibViewController.getLastNodeName(modName)
else:
(prefix, label, suffix) = mibViewController.getFirstNodeName(modName)
if suffix:
try:
suffix = tuple((int(x) for x in suffix))
except ValueError:
raise SmiError(('Unknown object name component %r' % (suffix,)))
self._oid = rfc1902.ObjectName((prefix + suffix))
else:
(prefix, label, suffix) = mibViewController.getNodeNameByOid(self._oid)
((debug.logger & debug.FLAG_MIB) and debug.logger(('resolved %r into prefix %r and suffix %r' % (self._args, prefix, suffix))))
(modName, symName, _) = mibViewController.getNodeLocation(prefix)
self._modName = modName
self._symName = symName
self._label = label
(mibNode,) = mibViewController.mibBuilder.importSymbols(modName, symName)
self._mibNode = mibNode
((debug.logger & debug.FLAG_MIB) and debug.logger(('resolved prefix %r into MIB node %r' % (prefix, mibNode))))
if isinstance(mibNode, MibTableColumn):
if suffix:
(rowModName, rowSymName, _) = mibViewController.getNodeLocation(mibNode.name[:(- 1)])
(rowNode,) = mibViewController.mibBuilder.importSymbols(rowModName, rowSymName)
self._indices = rowNode.getIndicesFromInstId(suffix)
elif isinstance(mibNode, MibScalar):
if suffix:
self._indices = (rfc1902.ObjectName(suffix),)
elif suffix:
self._indices = (rfc1902.ObjectName(suffix),)
self._state |= self.ST_CLEAN
((debug.logger & debug.FLAG_MIB) and debug.logger(('resolved indices are %r' % (self._indices,))))
return self
elif (len(self._args) > 1):
if (self._args[0] and self._args[1]):
self._modName = self._args[0]
self._symName = self._args[1]
elif self._args[0]:
mibViewController.mibBuilder.loadModules(self._args[0])
if self._kwargs.get('last'):
(prefix, label, suffix) = mibViewController.getLastNodeName(self._args[0])
else:
(prefix, label, suffix) = mibViewController.getFirstNodeName(self._args[0])
(self._modName, self._symName, _) = mibViewController.getNodeLocation(prefix)
else:
(prefix, label, suffix) = mibViewController.getNodeName(self._args[1:])
(self._modName, self._symName, _) = mibViewController.getNodeLocation(prefix)
(mibNode,) = mibViewController.mibBuilder.importSymbols(self._modName, self._symName)
self._mibNode = mibNode
self._oid = rfc1902.ObjectName(mibNode.getName())
(prefix, label, suffix) = mibViewController.getNodeNameByOid(self._oid)
self._label = label
((debug.logger & debug.FLAG_MIB) and debug.logger(('resolved %r into prefix %r and suffix %r' % (self._args, prefix, suffix))))
if isinstance(mibNode, MibTableColumn):
(rowModName, rowSymName, _) = mibViewController.getNodeLocation(mibNode.name[:(- 1)])
(rowNode,) = mibViewController.mibBuilder.importSymbols(rowModName, rowSymName)
if self._args[2:]:
try:
instIds = rowNode.getInstIdFromIndices(*self._args[2:])
self._oid += instIds
self._indices = rowNode.getIndicesFromInstId(instIds)
except PyAsn1Error as exc:
raise SmiError(('Instance index %r to OID conversion failure at object %r: %s' % (self._args[2:], mibNode.getLabel(), exc)))
elif self._args[2:]:
if self._args[2:]:
instId = rfc1902.ObjectName('.'.join((str(x) for x in self._args[2:])))
self._oid += instId
self._indices = (instId,)
self._state |= self.ST_CLEAN
((debug.logger & debug.FLAG_MIB) and debug.logger(('resolved indices are %r' % (self._indices,))))
return self
else:
raise SmiError('Non-OID, label or MIB symbol')
def prettyPrint(self):
if (self._state & self.ST_CLEAN):
s = rfc1902.OctetString()
return ('%s::%s%s%s' % (self._modName, self._symName, ((self._indices and '.') or ''), '.'.join((((x.isSuperTypeOf(s, matchConstraints=False) and ('"%s"' % x.prettyPrint())) or x.prettyPrint()) for x in self._indices))))
else:
raise SmiError(('%s object not fully initialized' % self.__class__.__name__))
def __repr__(self):
return ('%s(%s)' % (self.__class__.__name__, ', '.join((repr(x) for x in self._args))))
def __str__(self):
if (self._state & self.ST_CLEAN):
return str(self._oid)
else:
raise SmiError(('%s object not properly initialized' % self.__class__.__name__))
def __eq__(self, other):
if (self._state & self.ST_CLEAN):
return (self._oid == other)
else:
raise SmiError(('%s object not properly initialized' % self.__class__.__name__))
def __ne__(self, other):
if (self._state & self.ST_CLEAN):
return (self._oid != other)
else:
raise SmiError(('%s object not properly initialized' % self.__class__.__name__))
def __lt__(self, other):
if (self._state & self.ST_CLEAN):
return (self._oid < other)
else:
raise SmiError(('%s object not properly initialized' % self.__class__.__name__))
def __le__(self, other):
if (self._state & self.ST_CLEAN):
return (self._oid <= other)
else:
raise SmiError(('%s object not properly initialized' % self.__class__.__name__))
def __gt__(self, other):
if (self._state & self.ST_CLEAN):
return (self._oid > other)
else:
raise SmiError(('%s object not properly initialized' % self.__class__.__name__))
def __ge__(self, other):
if (self._state & self.ST_CLEAN):
return (self._oid > other)
else:
raise SmiError(('%s object not properly initialized' % self.__class__.__name__))
def __nonzero__(self):
if (self._state & self.ST_CLEAN):
return (self._oid != 0)
else:
raise SmiError(('%s object not properly initialized' % self.__class__.__name__))
def __bool__(self):
if (self._state & self.ST_CLEAN):
return bool(self._oid)
else:
raise SmiError(('%s object not properly initialized' % self.__class__.__name__))
def __getitem__(self, i):
if (self._state & self.ST_CLEAN):
return self._oid[i]
else:
raise SmiError(('%s object not properly initialized' % self.__class__.__name__))
def __len__(self):
if (self._state & self.ST_CLEAN):
return len(self._oid)
else:
raise SmiError(('%s object not properly initialized' % self.__class__.__name__))
def __add__(self, other):
if (self._state & self.ST_CLEAN):
return (self._oid + other)
else:
raise SmiError(('%s object not properly initialized' % self.__class__.__name__))
def __radd__(self, other):
if (self._state & self.ST_CLEAN):
return (other + self._oid)
else:
raise SmiError(('%s object not properly initialized' % self.__class__.__name__))
def __hash__(self):
if (self._state & self.ST_CLEAN):
return hash(self._oid)
else:
raise SmiError(('%s object not properly initialized' % self.__class__.__name__))
def __getattr__(self, attr):
if (self._state & self.ST_CLEAN):
if (attr in ('asTuple', 'clone', 'subtype', 'isPrefixOf', 'isSameTypeWith', 'isSuperTypeOf', 'getTagSet', 'getEffectiveTagSet', 'getTagMap', 'tagSet', 'index')):
return getattr(self._oid, attr)
raise AttributeError(attr)
else:
raise SmiError(('%s object not properly initialized for accessing %s' % (self.__class__.__name__, attr))) |
def single(wosclient, wos_query, xml_query=None, count=5, offset=1):
records = _get_records(wosclient, wos_query, count, offset)
xml = _re.sub(' xmlns="[^"]+"', '', records, count=1).encode('utf-8')
if (not xml_query):
return prettify(xml)
xml = _ET.fromstring(xml)
return [el.text for el in xml.findall(xml_query)] |
def get_point_of_reference(unit, count, epoch=None):
if (unit == 'seconds'):
multiplier = 1
elif (unit == 'minutes'):
multiplier = 60
elif (unit == 'hours'):
multiplier = 3600
elif (unit == 'days'):
multiplier = (3600 * 24)
elif (unit == 'weeks'):
multiplier = ((3600 * 24) * 7)
elif (unit == 'months'):
multiplier = ((3600 * 24) * 30)
elif (unit == 'years'):
multiplier = ((3600 * 24) * 365)
else:
raise ValueError(f'Invalid unit: {unit}.')
if (not epoch):
epoch = time.time()
epoch = fix_epoch(epoch)
return (epoch - (multiplier * count)) |
class ExampleValidatedSerializer(serializers.Serializer):
integer = serializers.IntegerField(validators=(MaxValueValidator(limit_value=99), MinValueValidator(limit_value=(- 11))))
string = serializers.CharField(validators=(MaxLengthValidator(limit_value=10), MinLengthValidator(limit_value=2)))
regex = serializers.CharField(validators=(RegexValidator(regex='[ABC]12{3}'),), help_text='must have an A, B, or C followed by 1222')
lst = serializers.ListField(validators=(MaxLengthValidator(limit_value=10), MinLengthValidator(limit_value=2)))
decimal1 = serializers.DecimalField(max_digits=6, decimal_places=2, coerce_to_string=False)
decimal2 = serializers.DecimalField(max_digits=5, decimal_places=0, coerce_to_string=False, validators=(DecimalValidator(max_digits=17, decimal_places=4),))
decimal3 = serializers.DecimalField(max_digits=8, decimal_places=2, coerce_to_string=True)
decimal4 = serializers.DecimalField(max_digits=8, decimal_places=2, coerce_to_string=True, validators=(DecimalValidator(max_digits=17, decimal_places=4),))
decimal5 = serializers.DecimalField(max_digits=6, decimal_places=2)
email = serializers.EmailField(default='')
url = serializers.URLField(default=' allow_null=True)
uuid = serializers.UUIDField()
ip4 = serializers.IPAddressField(protocol='ipv4')
ip6 = serializers.IPAddressField(protocol='ipv6')
ip = serializers.IPAddressField()
duration = serializers.DurationField(validators=(MinValueValidator(timedelta(seconds=10)),)) |
def evaluate_type(value):
if isinstance(value, list):
evaluated_type = 'array'
elif isinstance(value, dict):
evaluated_type = 'string'
else:
try:
float(value)
try:
if (str(int(float(value))) == str(value)):
int_str = str(int(float(value)))
value_str = str(value)
if (int_str == value_str):
evaluated_type = 'integer'
else:
evaluated_type = 'number'
else:
evaluated_type = 'number'
except ValueError:
evaluated_type = 'string'
except ValueError:
evaluated_type = 'string'
return evaluated_type |
def test_evaluate_base_rule():
rule_address_match = SingleRule(value_path=['address'], relation='equals', comparison='2')
rule_location_match = SingleRule(value_path=['location'], relation='equals', comparison=[22, 12])
rule_location_no_match = SingleRule(value_path=['location'], relation='equals', comparison=[22, 10])
meta_match = MetaRule(rules=[rule_address_match, rule_location_match], relation=all)
meta_no_match = MetaRule(rules=[rule_address_match, rule_location_no_match], relation=all)
sub_path_match = SubPathRule(base_path=['ip_and_uri_finder.ip_v4'], meta_rule=meta_match)
sub_path_no_match = SubPathRule(base_path=['ip_and_uri_finder.ip_v4'], meta_rule=meta_no_match)
assert _evaluate_sub_path_rule(IPS, sub_path_match)
assert (not _evaluate_sub_path_rule(IPS, sub_path_no_match)) |
def _update_embedding_config():
global EMBEDDING_NAME_TO_PARAMETER_CLASS_CONFIG
for (param_cls, models) in _EMBEDDING_PARAMETER_CLASS_TO_NAME_CONFIG.items():
models = [m.strip() for m in models.split(',')]
for model in models:
if (model not in EMBEDDING_NAME_TO_PARAMETER_CLASS_CONFIG):
EMBEDDING_NAME_TO_PARAMETER_CLASS_CONFIG[model] = param_cls |
class QueryStub(object):
def __init__(self, channel):
self.Allowance = channel.unary_unary('/cosmos.feegrant.v1beta1.Query/Allowance', request_serializer=cosmos_dot_feegrant_dot_v1beta1_dot_query__pb2.QueryAllowanceRequest.SerializeToString, response_deserializer=cosmos_dot_feegrant_dot_v1beta1_dot_query__pb2.QueryAllowanceResponse.FromString)
self.Allowances = channel.unary_unary('/cosmos.feegrant.v1beta1.Query/Allowances', request_serializer=cosmos_dot_feegrant_dot_v1beta1_dot_query__pb2.QueryAllowancesRequest.SerializeToString, response_deserializer=cosmos_dot_feegrant_dot_v1beta1_dot_query__pb2.QueryAllowancesResponse.FromString) |
def test_flatten_dict_by_key():
NESTED_DICT = dict(top=2, another_top=2)
DICT = dict(top=1, nested=NESTED_DICT)
flatten_dict = flatten_dict_by_key(DICT, 'nested')
assert (flatten_dict.get('top') == 2)
assert (flatten_dict.get('another_top') == 2)
assert (flatten_dict.get('nested') is None)
flatten_dict = flatten_dict_by_key(DICT, 'nope')
assert (flatten_dict.get('top') == 1)
assert (flatten_dict.get('another_top') is None)
assert (json.dumps(flatten_dict.get('nested'), sort_keys=True) == json.dumps(NESTED_DICT, sort_keys=True)) |
def fetch_transaction_obligated_amount_by_internal_award_id(internal_award_id: int) -> Optional[Decimal]:
_sum = FinancialAccountsByAwards.objects.filter(award_id=internal_award_id).aggregate(Sum('transaction_obligated_amount'))
if _sum:
return _sum['transaction_obligated_amount__sum']
return None |
class Transactions(Model):
def __init__(self, **kwargs: Any) -> None:
self._pending_transaction_timeout = kwargs.pop('pending_transaction_timeout', 30)
super().__init__(**kwargs)
self._pending_proposals = defaultdict((lambda : {}))
self._pending_initial_acceptances = defaultdict((lambda : {}))
self._locked_txs = {}
self._locked_txs_as_buyer = {}
self._locked_txs_as_seller = {}
self._last_update_for_transactions = deque()
self._nonce = 0
def pending_proposals(self) -> Dict[(DialogueLabel, Dict[(MessageId, Terms)])]:
return self._pending_proposals
def pending_initial_acceptances(self) -> Dict[(DialogueLabel, Dict[(MessageId, Terms)])]:
return self._pending_initial_acceptances
def get_next_nonce(self) -> str:
self._nonce += 1
return str(self._nonce)
def update_confirmed_transactions(self) -> None:
confirmed_tx_ids = self.context.shared_state.pop('confirmed_tx_ids', [])
for transaction_id in confirmed_tx_ids:
self._locked_txs.pop(transaction_id, None)
self._locked_txs_as_buyer.pop(transaction_id, None)
self._locked_txs_as_seller.pop(transaction_id, None)
def cleanup_pending_transactions(self) -> None:
queue = self._last_update_for_transactions
timeout = datetime.timedelta(0, self._pending_transaction_timeout)
if (len(queue) == 0):
return
(next_date, next_item) = queue[0]
while ((datetime.datetime.now() - next_date) > timeout):
queue.popleft()
transaction_id = next_item
self.context.logger.debug('removing transaction from pending list: {}'.format(transaction_id))
self._locked_txs.pop(transaction_id, None)
self._locked_txs_as_buyer.pop(transaction_id, None)
self._locked_txs_as_seller.pop(transaction_id, None)
if (len(queue) == 0):
break
(next_date, next_item) = queue[0]
def add_pending_proposal(self, dialogue_label: DialogueLabel, proposal_id: int, terms: Terms) -> None:
enforce(((dialogue_label not in self._pending_proposals) and (proposal_id not in self._pending_proposals[dialogue_label])), 'Proposal is already in the list of pending proposals.')
self._pending_proposals[dialogue_label][proposal_id] = terms
def pop_pending_proposal(self, dialogue_label: DialogueLabel, proposal_id: int) -> Terms:
enforce(((dialogue_label in self._pending_proposals) and (proposal_id in self._pending_proposals[dialogue_label])), 'Cannot find the proposal in the list of pending proposals.')
terms = self._pending_proposals[dialogue_label].pop(proposal_id)
return terms
def add_pending_initial_acceptance(self, dialogue_label: DialogueLabel, proposal_id: int, terms: Terms) -> None:
enforce(((dialogue_label not in self._pending_initial_acceptances) and (proposal_id not in self._pending_initial_acceptances[dialogue_label])), 'Initial acceptance is already in the list of pending initial acceptances.')
self._pending_initial_acceptances[dialogue_label][proposal_id] = terms
def pop_pending_initial_acceptance(self, dialogue_label: DialogueLabel, proposal_id: int) -> Terms:
enforce(((dialogue_label in self._pending_initial_acceptances) and (proposal_id in self._pending_initial_acceptances[dialogue_label])), 'Cannot find the initial acceptance in the list of pending initial acceptances.')
terms = self._pending_initial_acceptances[dialogue_label].pop(proposal_id)
return terms
def _register_transaction_with_time(self, transaction_id: str) -> None:
now = datetime.datetime.now()
self._last_update_for_transactions.append((now, transaction_id))
def add_locked_tx(self, terms: Terms, role: FipaDialogue.Role) -> None:
as_seller = (role == FipaDialogue.Role.SELLER)
transaction_id = terms.id
enforce((transaction_id not in self._locked_txs), 'This transaction is already a locked transaction.')
self._register_transaction_with_time(transaction_id)
self._locked_txs[transaction_id] = terms
if as_seller:
self._locked_txs_as_seller[transaction_id] = terms
else:
self._locked_txs_as_buyer[transaction_id] = terms
def pop_locked_tx(self, terms: Terms) -> Terms:
transaction_id = terms.id
enforce((transaction_id in self._locked_txs), 'Cannot find this transaction in the list of locked transactions.')
terms = self._locked_txs.pop(transaction_id)
self._locked_txs_as_buyer.pop(transaction_id, None)
self._locked_txs_as_seller.pop(transaction_id, None)
return terms
def ownership_state_after_locks(self, is_seller: bool) -> OwnershipState:
all_terms = (list(self._locked_txs_as_seller.values()) if is_seller else list(self._locked_txs_as_buyer.values()))
ownership_state = cast(OwnershipState, self.context.decision_maker_handler_context.ownership_state)
ownership_state_after_locks = ownership_state.apply_transactions(all_terms)
return ownership_state_after_locks |
class TestsAchromatic(util.ColorAsserts, unittest.TestCase):
def test_achromatic(self):
self.assertEqual(Color('#222222').convert('hct').is_achromatic(), True)
self.assertEqual(Color('srgb', ([1e-09] * 3)).convert('hct').set('c', (lambda x: (x + 1e-08))).is_achromatic(), True)
self.assertEqual(Color('hct', [270, 1e-05, NaN]).is_achromatic(), True)
self.assertEqual(Color('hct', [270, NaN, 0]).is_achromatic(), True)
self.assertEqual(Color('hct', [270, 50, 0]).is_achromatic(), True)
self.assertEqual(Color('hct', [270, 50, NaN]).is_achromatic(), True)
self.assertEqual(Color('hct', [270, NaN, 20]).is_achromatic(), False)
self.assertEqual(Color('hct', [270, NaN, NaN]).is_achromatic(), True)
(self.assertEqual(Color('hct', [29.546, 0.60569, (- 9.0536)]).is_achromatic(), True),)
self.assertEqual(Color('hct', [270, (- 20), 50]).is_achromatic(), False) |
def test_call_with_attributes():
provider = providers.Factory(Example)
provider.add_attributes(attribute1='a1', attribute2='a2')
instance1 = provider()
instance2 = provider()
assert (instance1.attribute1 == 'a1')
assert (instance1.attribute2 == 'a2')
assert (instance2.attribute1 == 'a1')
assert (instance2.attribute2 == 'a2')
assert (instance1 is not instance2)
assert isinstance(instance1, Example)
assert isinstance(instance2, Example) |
class JavaFile(object):
def __init__(self, read_file, write_file):
self.read_file = read_file
self.write_file = write_file
self.java_file_c = java_file_c.JavaFile(read_file, write_file)
def read(self, data_len, return_on_barrier=False):
if return_on_barrier:
data = self.java_file_c.readBytesTillBarrier(data_len)
else:
data = self.java_file_c.readBytes(data_len)
if (0 == len(data)):
raise EOFError('file reach end!')
return data
def write(self, data, data_len):
return self.java_file_c.writeBytes(data, data_len) |
class VernaiApi(ProviderInterface, TextInterface):
provider_name = 'vernai'
def __init__(self, api_keys: Dict={}):
self.api_settings = load_provider(ProviderDataEnum.KEY, provider_name=self.provider_name, api_keys=api_keys)
self.api_key = self.api_settings['api_key']
self.url_emotion_detection = '
def text__emotion_detection(self, text: str) -> ResponseType[EmotionDetectionDataClass]:
response = requests.post(url=self.url_emotion_detection, headers={'Authorization': f'{self.api_key}'}, data={'text': text})
if (response.status_code != 200):
raise ProviderException(message=response.text, code=response.status_code)
original_response = response.json()
items: Sequence[EmotionItem] = []
for entity in original_response.get('scores', []):
items.append(EmotionItem(emotion=EmotionEnum.from_str(entity.get('name', '')), emotion_score=entity.get('value', 0)))
return ResponseType[EmotionDetectionDataClass](original_response=original_response, standardized_response=EmotionDetectionDataClass(items=items, text=text)) |
class TestShell(Nubia):
def __init__(self, commands, name='test_shell'):
super(TestShell, self).__init__(name, plugin=TestPlugin(commands), testing=True)
async def run_cli_line(self, raw_line):
cli_args_list = raw_line.split()
args = (await self._pre_run(cli_args_list))
return (await self.run_cli(args))
async def run_interactive_line(self, raw_line, cli_args=None):
cli_args = (cli_args or 'test_shell connect')
cli_args_list = cli_args.split()
args = (await self._pre_run(cli_args_list))
io_loop = (await self._create_interactive_io_loop(args))
return (await io_loop.parse_and_evaluate(raw_line)) |
def register_toy_coco_dataset(dataset_name, num_images=3, image_size=(5, 10), num_classes=(- 1), num_keypoints=0):
(width, height) = image_size
with make_temp_directory('detectron2go_tmp_dataset') as dataset_dir:
image_dir = os.path.join(dataset_dir, 'images')
os.makedirs(image_dir)
image_generator = LocalImageGenerator(image_dir, width=width, height=height)
with _register_toy_dataset(dataset_name, image_generator, num_images=num_images, num_classes=num_classes, num_keypoints=num_keypoints):
(yield) |
class Yubikey(IntervalModule):
interval = 1
format = 'Yubikey: '
unlocked_format = 'Yubikey: '
timeout = 5
color = '#00FF00'
unlock_color = '#FF0000'
settings = (('format', 'Format string'), ('unlocked_format', 'Format string when the key is unlocked'), ('timeout', 'How long the Yubikey will be unlocked (default: 5)'), ('color', 'Standard color'), ('unlock_color', 'Set the color used when the Yubikey is unlocked'))
on_leftclick = ['set_lock', True]
find_regex = re.compile('.*yubikey.*id=(?P<yubid>\\d+).*$', re.IGNORECASE)
status_regex = re.compile('.*device enabled.*(?P<status>\\d)$', re.IGNORECASE)
lock_file = f'/var/tmp/Yubikey-{os.geteuid()}.lock'
def __init__(self):
super().__init__()
def _device_id(self):
command = run_through_shell('xinput list')
rval = ''
if (command.rc == 0):
for line in command.out.splitlines():
match = self.find_regex.match(line)
if match:
rval = match.groupdict().get('yubid', '')
break
return rval
def device_status(self):
rval = 'notfound'
if (not self._device_id):
return rval
result = run_through_shell(f'xinput list-props {self._device_id}')
if (result.rc == 0):
match = self.status_regex.match(result.out.splitlines()[1])
if (match and ('status' in match.groupdict())):
status = int(match.groupdict()['status'])
if status:
rval = 'unlocked'
else:
rval = 'locked'
return rval
def _check_lock(self):
try:
st = os.stat(self.lock_file)
if (int((time.time() - st.st_ctime)) > self.timeout):
self.set_lock()
except IOError:
self.set_lock()
def set_lock(self, unlock=False):
if unlock:
command = 'enable'
else:
command = 'disable'
run_through_shell(f'xinput {command} {self._device_id}')
open(self.lock_file, mode='w').close()
def _clear_lock(self):
try:
os.unlink(self.lock_file)
except FileNotFoundError:
pass
def run(self):
status = self.device_status()
if (status == 'notfound'):
self._clear_lock()
self.output = {'full_text': ''}
elif (status == 'unlocked'):
self.output = {'full_text': self.unlocked_format, 'color': self.unlock_color}
self._check_lock()
elif (status == 'locked'):
self.output = {'full_text': self.format, 'color': self.color}
else:
self.output = {'full_text': f'Error: {status}'} |
def test_fetch_mixed_no_local_registry():
with TemporaryDirectory() as tmp_dir:
with cd(tmp_dir):
name = 'my_first_aea'
runner = CliRunner()
result = runner.invoke(cli, ['fetch', 'fetchai/my_first_aea'], catch_exceptions=False)
assert (result.exit_code == 0), result.stdout
assert os.path.exists(name)
assert ('Trying remote registry (`--remote`).' in result.stdout) |
def test_absent_attribute_in_template(tmpdir, merge_files_oneLR, assert_log):
path = os.path.join(str(tmpdir), 'absent-attribute-in-template.dlis')
content = ['data/chap3/start.dlis.part', 'data/chap3/template/absent.dlis.part', 'data/chap3/template/default.dlis.part', 'data/chap3/object/object.dlis.part']
merge_files_oneLR(path, content)
with dlis.load(path) as (f, *tail):
obj = f.object('VERY_MUCH_TESTY_SET', 'OBJECT', 1, 1)
assert obj.attic['DEFAULT_ATTRIBUTE'].value
assert_log('Absent Attribute in object set template') |
def scan_targets(access_bed, sample_bams, min_depth, min_gap, min_length, procs):
bait_chunks = []
logging.info('Scanning for enriched regions in:\n %s', '\n '.join(sample_bams))
with parallel.pick_pool(procs) as pool:
args_iter = ((bed_chunk, sample_bams, min_depth, min_gap, min_length) for bed_chunk in parallel.to_chunks(access_bed))
for (bed_chunk_fname, bait_chunk) in pool.map(_scan_depth, args_iter):
bait_chunks.append(bait_chunk)
parallel.rm(bed_chunk_fname)
baits = GA(pd.concat(bait_chunks))
baits['depth'] /= len(sample_bams)
return baits |
class ExoType(Enum):
F32 = auto()
F64 = auto()
I8 = auto()
I32 = auto()
R = auto()
Index = auto()
Bool = auto()
Size = auto()
def is_indexable(self):
return (self in [ExoType.Index, ExoType.Size])
def is_numeric(self):
return (self in [ExoType.F32, ExoType.F64, ExoType.I8, ExoType.I32, ExoType.R])
def is_bool(self):
return (self == ExoType.Bool) |
class PageThreadOwner(AbstractObject):
def __init__(self, api=None):
super(PageThreadOwner, self).__init__()
self._isPageThreadOwner = True
self._api = api
class Field(AbstractObject.Field):
thread_owner = 'thread_owner'
_field_types = {'thread_owner': 'Object'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class AirflowContainerTask(PythonAutoContainerTask[AirflowObj]):
def __init__(self, name: str, task_config: AirflowObj, inputs: Optional[Dict[(str, Type)]]=None, **kwargs):
super().__init__(name=name, task_config=task_config, interface=Interface(inputs=(inputs or {})), **kwargs)
self._task_resolver = airflow_task_resolver
def execute(self, **kwargs) -> Any:
logger.info('Executing Airflow task')
_get_airflow_instance(self.task_config).execute(context=airflow_context.Context()) |
.asyncio
.workspace_host
class TestTenantEmailDomainVerify():
async def test_unauthorized(self, unauthorized_dashboard_assertions: HTTPXResponseAssertion, test_client_dashboard: test_data: TestData):
response = (await test_client_dashboard.post(f"/tenants/{test_data['tenants']['default'].id}/email/verify"))
unauthorized_dashboard_assertions(response)
.authenticated_admin(mode='session')
async def test_not_existing(self, test_client_dashboard: not_existing_uuid: uuid.UUID):
response = (await test_client_dashboard.post(f'/tenants/{not_existing_uuid}/email/verify'))
assert (response.status_code == status.HTTP_404_NOT_FOUND)
.authenticated_admin(mode='session')
async def test_no_email_domain(self, test_client_dashboard: test_data: TestData):
response = (await test_client_dashboard.post(f"/tenants/{test_data['tenants']['default'].id}/email/verify"))
assert (response.status_code == status.HTTP_404_NOT_FOUND)
.authenticated_admin(mode='session')
async def test_error(self, test_client_dashboard: test_data: TestData, workspace_session: AsyncSession, tenant_email_domain_mock: MagicMock):
tenant = test_data['tenants']['default']
email_domain = test_data['email_domains']['bretagne.duchy']
tenant_repository = TenantRepository(workspace_session)
tenant.email_domain = email_domain
(await tenant_repository.update(tenant))
tenant_email_domain_mock.verify_domain.side_effect = TenantEmailDomainError('Error')
response = (await test_client_dashboard.post(f'/tenants/{tenant.id}/email/verify'))
assert (response.status_code == status.HTTP_400_BAD_REQUEST)
.authenticated_admin(mode='session')
async def test_valid(self, test_client_dashboard: test_data: TestData, workspace_session: AsyncSession, tenant_email_domain_mock: MagicMock):
tenant = test_data['tenants']['default']
email_domain = test_data['email_domains']['bretagne.duchy']
tenant_repository = TenantRepository(workspace_session)
tenant.email_domain = email_domain
(await tenant_repository.update(tenant))
async def verify_domain_mock(*args, **kwargs):
tenant_repository = TenantRepository(workspace_session)
_tenant = (await tenant_repository.get_by_id(tenant.id))
assert (_tenant is not None)
_tenant.email_domain = email_domain
(await tenant_repository.update(_tenant))
return _tenant
tenant_email_domain_mock.verify_domain.side_effect = verify_domain_mock
response = (await test_client_dashboard.post(f'/tenants/{tenant.id}/email/verify'))
is_htmx = ('HX-Request' in test_client_dashboard.headers)
if is_htmx:
assert (response.status_code == status.HTTP_200_OK)
assert response.headers['hx-redirect'].endswith(f'/{tenant.id}/email/domain')
else:
assert (response.status_code == status.HTTP_303_SEE_OTHER)
assert response.headers['location'].endswith(f'/{tenant.id}/email/domain') |
def test_call_address_reflector_single_name(address_reflector_contract, call):
with contract_ens_addresses(address_reflector_contract, [('dennisthepeasant.eth', '0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413')]):
result = call(contract=address_reflector_contract, contract_function='reflect', func_args=['dennisthepeasant.eth'])
assert (result == '0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413') |
class OptionPlotoptionsDependencywheelEvents(Options):
def afterAnimate(self):
return self._config_get(None)
def afterAnimate(self, value: Any):
self._config(value, js_type=False)
def checkboxClick(self):
return self._config_get(None)
def checkboxClick(self, value: Any):
self._config(value, js_type=False)
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def hide(self):
return self._config_get(None)
def hide(self, value: Any):
self._config(value, js_type=False)
def legendItemClick(self):
return self._config_get(None)
def legendItemClick(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def show(self):
return self._config_get(None)
def show(self, value: Any):
self._config(value, js_type=False) |
(scope='module')
def datadir(tmpdir_factory, geth_zipfile_version):
zipfile_path = absolute_datadir(geth_zipfile_version)
base_dir = tmpdir_factory.mktemp('goethereum')
tmp_datadir = os.path.join(str(base_dir), 'datadir')
with zipfile.ZipFile(zipfile_path, 'r') as zip_ref:
zip_ref.extractall(tmp_datadir)
return tmp_datadir |
def block_transaction_to_dict(transaction: SignedTransactionAPI, header: BlockHeaderAPI) -> RpcBlockTransactionResponse:
data = cast(RpcBlockTransactionResponse, transaction_to_dict(transaction))
data['blockHash'] = encode_hex(header.hash)
data['blockNumber'] = hex(header.block_number)
return data |
class RecurseNetworks(models.Model):
pg_notify_channel = 'pdns_notify'
pg_notify_payload = 'pdns_recurse_modified'
log_object_name = 'dnsdist recurse networks'
id = models.AutoField(primary_key=True, help_text='Unique handle for network entries')
subnet = models.CharField(_('Subnet'), max_length=50, null=False, db_index=True, help_text='Network subnet to allow recursion from (format: x.x.x.x/yy)')
net_name = models.CharField(_('Network name'), max_length=50, db_index=True, help_text='Network name in the danube networks list')
change_date = models.IntegerField(_('Changed'), null=True, default=None, help_text='Timestamp of the last update.')
class Meta():
app_label = 'pdns'
verbose_name = _('Recursion subnet')
verbose_name_plural = _('Recursion subnets')
db_table = 'recurse_networks'
def __unicode__(self):
return ('(%s: %s)' % (self.net_name, self.subnet))
def save(self, *args, **kwargs):
logger.info('Saving allowed recurse network "%s" (id=%s) with content "%s"', self.net_name, self.id, self.subnet)
self.change_date = epoch()
return super(RecurseNetworks, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
logger.info('Deleting allowed recurse network "%s" (id=%s) with content "%s"', self.net_name, self.id, self.subnet)
return super(RecurseNetworks, self).delete(*args, **kwargs)
def add_entry(cls, subnet, net_name):
logger.info('Adding allowed recurse network "%s" with content "%s"', net_name, subnet)
return cls.objects.create(net_name=net_name, subnet=subnet, change_date=epoch())
def get_entries(cls, id=None, subnet=None, net_name=None):
kwargs = {}
if id:
kwargs.update({'id': id})
if subnet:
kwargs.update({'subnet': subnet})
if net_name:
kwargs.update({'net_name': net_name})
return cls.objects.filter(**kwargs)
def delete_entries(cls, id=None, subnet=None, net_name=None):
nets = cls.get_entries(id=id, subnet=subnet, net_name=net_name)
for net in nets:
net.delete()
return True
def update_entry(cls, id, new_subnet, new_net_name):
net = cls.objects.get(id=id)
logger.info('Updating %s entry with content subnet="%s", net_name="%s" (old content: "%s", "%s")', cls.log_object_name, new_subnet, new_net_name, net.subnet, net.net_name)
net.subnet = new_subnet
net.net_name = new_net_name
return net.save()
def add_or_update_entry(cls, subnet, net_name):
try:
net = cls.objects.get(subnet=subnet, net_name=net_name)
except cls.DoesNotExist:
return cls.add_entry(subnet, net_name)
else:
return cls.update_entry(net.id, subnet, net_name)
def web_data(self):
return {'subnet': self.subnet, 'net_name': self.net_name, 'change_date': self.change_date}
def my_name(self):
return str(self.log_object_name)
def desc(self):
return text_type(self)
def web_desc(self):
return ('%s: %s' % (self.subnet, self.net_name))
def changed(self):
if self.change_date:
return datetime.utcfromtimestamp(self.change_date)
else:
return None
def send_pg_notify(cls):
with connections['pdns'].cursor() as cursor:
logger.info(('Sending pg_notify() with channel "%s" and payload "%s"' % (cls.pg_notify_channel, cls.pg_notify_payload)))
cursor.execute(("select pg_notify('%s', '%s')" % (cls.pg_notify_channel, cls.pg_notify_payload)))
def post_save_entry(cls, sender, instance, **kwargs):
cls.send_pg_notify()
def post_delete_entry(cls, sender, instance, **kwargs):
cls.send_pg_notify() |
def check_does_not_have_ids(node, raise_error):
if (nodes.section is type(node)):
if any((name.startswith('fls_') for name in node['names'])):
raise_error('section should not have an id', location=node)
else:
should_not_have_id(node, type(node).__name__, raise_error)
for child in node.children:
check_does_not_have_ids(child, raise_error) |
class group_add(group_mod):
version = 4
type = 15
command = 0
def __init__(self, xid=None, group_type=None, group_id=None, buckets=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (group_type != None):
self.group_type = group_type
else:
self.group_type = 0
if (group_id != None):
self.group_id = group_id
else:
self.group_id = 0
if (buckets != None):
self.buckets = buckets
else:
self.buckets = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.command))
packed.append(struct.pack('!B', self.group_type))
packed.append(('\x00' * 1))
packed.append(struct.pack('!L', self.group_id))
packed.append(loxi.generic_util.pack_list(self.buckets))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = group_add()
_version = reader.read('!B')[0]
assert (_version == 4)
_type = reader.read('!B')[0]
assert (_type == 15)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_command = reader.read('!H')[0]
assert (_command == 0)
obj.group_type = reader.read('!B')[0]
reader.skip(1)
obj.group_id = reader.read('!L')[0]
obj.buckets = loxi.generic_util.unpack_list(reader, ofp.common.bucket.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.group_type != other.group_type):
return False
if (self.group_id != other.group_id):
return False
if (self.buckets != other.buckets):
return False
return True
def pretty_print(self, q):
q.text('group_add {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('group_type = ')
value_name_map = {0: 'OFPGT_ALL', 1: 'OFPGT_SELECT', 2: 'OFPGT_INDIRECT', 3: 'OFPGT_FF'}
if (self.group_type in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.group_type], self.group_type)))
else:
q.text(('%#x' % self.group_type))
q.text(',')
q.breakable()
q.text('group_id = ')
q.text(('%#x' % self.group_id))
q.text(',')
q.breakable()
q.text('buckets = ')
q.pp(self.buckets)
q.breakable()
q.text('}') |
class Compose(Rule):
first: Rule
second: Rule
def __init__(self, first: Rule, second: Rule, name: str='compose') -> None:
Rule.__init__(self, name)
self.first = first
self.second = second
def apply(self, test: Any) -> RuleResult:
rule_result = self.first.apply(test)
if isinstance(rule_result, Success):
return self.second.apply(rule_result.result)
return rule_result
def __str__(self) -> str:
a = str(self.first)
b = str(self.second)
return f'compose( {a}, {b} )'
def always_succeeds(self) -> bool:
return (self.first.always_succeeds() and self.second.always_succeeds()) |
def train(model, train_loader, val_loader, optimizer, init_lr=0.002, checkpoint_dir=None, checkpoint_interval=None, nepochs=None, clip_thresh=1.0):
if use_cuda:
model = model.cuda()
criterion = nn.CrossEntropyLoss()
global global_step, global_epoch
while (global_epoch < nepochs):
model.train()
h = open(logfile_name, 'a')
running_loss = 0.0
for (step, (x, mel, fname)) in tqdm(enumerate(train_loader)):
current_lr = learning_rate_decay(init_lr, global_step)
for param_group in optimizer.param_groups:
param_group['lr'] = current_lr
optimizer.zero_grad()
(x, mel) = (Variable(x), Variable(mel))
if use_cuda:
(x, mel) = (x.cuda(), mel.cuda())
val_outputs = model(mel)
loss = criterion(val_outputs, x)
loss.backward(retain_graph=False)
grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), clip_thresh)
optimizer.step()
if ((global_step % checkpoint_interval) == 0):
save_checkpoint(model, optimizer, global_step, checkpoint_dir, global_epoch)
log_value('Training Loss', float(loss.item()), global_step)
log_value('gradient norm', grad_norm, global_step)
log_value('learning rate', current_lr, global_step)
global_step += 1
running_loss += loss.item()
averaged_loss = (running_loss / len(train_loader))
log_value('loss (per epoch)', averaged_loss, global_epoch)
h.write((((('Loss after epoch ' + str(global_epoch)) + ': ') + format((running_loss / len(train_loader)))) + '\n'))
h.close()
recall = validate_model(model, val_loader)
log_value('Unweighted Recall per epoch', recall, global_epoch)
global_epoch += 1 |
class CompWithInit2(event.Component):
foo1 = event.IntProp(1)
foo2 = event.IntProp(2, settable=True)
foo3 = event.IntProp(3)
def init(self, set_foos):
if set_foos:
self._mutate_foo1(11)
self.set_foo2(12)
self.set_foo3(13)
def set_foo3(self, v):
self._mutate_foo3((v + 100)) |
class FitFromDictMixin():
def _fit_from_dict(self, X: pd.DataFrame, user_dict_: Dict) -> pd.DataFrame:
X = check_X(X)
variables = list(user_dict_.keys())
self.variables_ = check_numerical_variables(X, variables)
_check_contains_na(X, self.variables_)
_check_contains_inf(X, self.variables_)
self.feature_names_in_ = X.columns.tolist()
self.n_features_in_ = X.shape[1]
return X |
class TestMisc(util.ColorAsserts, unittest.TestCase):
def test_max_precision(self):
self.assertEqual(Color('purple').convert('lab').to_string(precision=(- 1)), 'lab(29. 56. -36.)')
def test_percent_bool_list(self):
self.assertEqual(Color('purple').convert('lab').set('alpha', 0.5).to_string(percent=[True, False, False, True]), 'lab(29.692% 56.112 -36.293 / 50%)')
def test_percent_bool_list_implied_false(self):
self.assertEqual(Color('purple').convert('lab').set('alpha', 0.5).to_string(percent=[True]), 'lab(29.692% 56.112 -36.293 / 0.5)')
def test_repr(self):
self.assertEqual(str(Color('red')), 'color(srgb 1 0 0 / 1)')
def test_repr_percent(self):
self.assertEqual(str(Color('white').convert('lab')), 'color(--lab 100 0 0 / 1)')
def test_color_indexing(self):
c1 = Color('red')
self.assertEqual(len(c1), 4)
self.assertEqual(c1[0], 1)
self.assertEqual(c1[:], [1, 0, 0, 1])
c1[2] = 1
self.assertEqual(c1[2], 1)
c2 = Color('blue')
c1[:] = c2[:]
self.assertColorEqual(c1, c2)
def test_none(self):
c = Color('color(srgb 1 none 1)')
self.assertEqual(c[:(- 1):2], [1, 1])
self.assertTrue(c.is_nan('green'))
self.assertTrue(Color('color(srgb 1 1 1 / none)').is_nan('alpha'))
def test_percent_none(self):
c = Color('color(--lch none 0 none)')
self.assertEqual(c[1], 0)
self.assertTrue((c.is_nan('l') and c.is_nan('h')))
c = Color('hsl(30 none none)')
self.assertEqual(c[0], 30)
self.assertTrue((c.is_nan('s') and c.is_nan('l')))
def test_normalize(self):
self.assertColorEqual(Color('white').normalize(), Color('white'))
self.assertColorNotEqual(Color('hsl(270 0% 50%)').normalize(), Color('hsl(270 0% 50%)'))
def test_color_dict(self):
c1 = Color('red')
d = c1.to_dict()
c2 = Color(d)
self.assertEqual(c1, c2)
def test_dict_input(self):
c1 = Color('srgb', [1, 0, 1], 0.5)
c2 = Color({'space': 'srgb', 'coords': [1, 0, 1], 'alpha': 0.5})
c3 = Color({'space': 'srgb', 'coords': [1, 0, 1], 'alpha': 0.5})
self.assertColorEqual(c1, c2)
self.assertColorEqual(c1, c3)
def test_white(self):
self.assertEqual(Color('red').white(), [0., 1, 1.])
def test_less_input(self):
with self.assertRaises(ValueError):
Color('color(srgb 1)')
def test_non_ident(self):
with self.assertRaises(ValueError):
Color('color(1 1 1 / 0.5)')
def test_missing_alpha(self):
with self.assertRaises(ValueError):
Color('color(srgb 1 1 1 /)')
def test_bad_color_channel_type(self):
with self.assertRaises(ValueError):
Color('color(srgb 1deg 1 1)')
def test_missing_end(self):
with self.assertRaises(ValueError):
Color('color(srgb 1 1 1')
def test_less_raw_input(self):
self.assertEqual(Color('srgb', [1]), Color('srgb', [1, NaN, NaN]))
def test_too_many_input(self):
with self.assertRaises(ValueError):
Color('color(srgb 1 0 0 0 / 1)')
def test_too_many_raw_input(self):
with self.assertRaises(AttributeError):
Color('srgb', [1, 0, 0, 0])
def test_bad_input(self):
with self.assertRaises(TypeError):
Color(3)
def test_bad_sytnax_input(self):
with self.assertRaises(ValueError):
Color('nope')
with self.assertRaises(ValueError):
Color('nope', [0, 0, 0])
def test_bad_class(self):
c = ColorAll('hunter-lab', [0, 0, 0])
with self.assertRaises(ValueError):
Color(c)
def test_bad_data_input(self):
with self.assertRaises(ValueError):
Color('srgb')
def test_missing_values(self):
with self.assertRaises(ValueError):
Color('color(srgb)')
def test_missing_inputs(self):
coords = Color('srgb', [])[:(- 1)]
for c in coords:
self.assertTrue(math.isnan(c))
def test_new(self):
c1 = Color('purple')
c2 = c1.new('purple')
self.assertEqual(c1, c2)
def test_clone(self):
c1 = Color('purple')
c2 = c1.clone()
self.assertEqual(c1, c2)
def test_update(self):
c1 = Color('orange')
c2 = Color('purple')
self.assertNotEqual(c1, c2)
c2.update(c1)
self.assertEqual(c1, c2)
def test_update_different_space(self):
c1 = Color('orange')
c2 = c1.convert('hsl')
c3 = Color('purple').convert('hsl')
self.assertEqual(c2, c3.update(c1))
def test_update_self(self):
c1 = Color('orange')
self.assertEqual(c1.update(c1), c1)
def test_mutate(self):
c1 = Color('orange').convert('lch')
c2 = Color('orange')
self.assertNotEqual(c1, c2)
c2.mutate(c1)
self.assertEqual(c1, c2)
def test_convert(self):
c1 = Color('orange')
c2 = c1.convert('hsl')
self.assertColorEqual(c2, Color('hsl(39, 100%, 50%)'), precision=0)
def test_convert_fit(self):
c1 = Color('color(srgb 2 -1 0)')
self.assertFalse(c1.in_gamut())
c2 = c1.convert('hsl', fit=True)
self.assertTrue(c2.in_gamut())
c3 = c1.convert('hsl').clone().fit()
self.assertColorEqual(c2, c3)
def test_convert_fit_clip(self):
c1 = Color('color(srgb 2 -1 0)')
self.assertFalse(c1.in_gamut())
c2 = c1.convert('hsl', fit='clip')
self.assertTrue(c2.in_gamut())
c3 = c1.convert('hsl').clone().fit(method='clip')
self.assertColorEqual(c2, c3)
c4 = c1.convert('hsl', fit=True)
self.assertColorNotEqual(c2, c4)
def test_property(self):
c1 = Color('red')
self.assertEqual(c1.get('green'), 0)
c1.set('green', 0.5)
self.assertEqual(c1.get('green'), 0.5)
def test_get(self):
c1 = Color('orange')
self.assertEqual(c1.get('red'), 1.0)
def test_get_num(self):
c1 = Color('orange')
self.assertEqual(c1.get('0'), 1.0)
def test_get_no_nans(self):
self.assertEqual(Color('white').convert('lch').get('h', nans=False), 0.0)
self.assertEqual(Color('white').convert('lch').get(['c', 'h'], nans=False), [0.0, 0.0])
def test_space_get(self):
c1 = Color('orange')
self.assertEqual(c1.get('hsl.lightness'), 0.5)
def test_space_get_num(self):
c1 = Color('orange')
self.assertEqual(c1.get('hsl.2'), 0.5)
def test_get_bad(self):
c1 = Color('orange')
with self.assertRaises(ValueError):
c1.get('bad')
def test_get_bad_chain(self):
c1 = Color('orange')
with self.assertRaises(ValueError):
c1.get('hsl.hue.wrong')
def test_set(self):
c1 = Color('orange')
c1.set('red', 0.5)
self.assertEqual(c1.get('red'), 0.5)
def test_set_num(self):
c1 = Color('orange')
c1.set('red', 0.5)
self.assertEqual(c1.get('0'), 0.5)
def test_set_undefined(self):
def set_test(value):
assert (not math.isnan(value))
return value
c1 = Color('gray').convert('hsl')
c1.set('hue', set_test, nans=False)
def test_set_undefined_space(self):
def set_test(value):
assert (not math.isnan(value))
return value
Color('gray').set('hsl.hue', set_test, nans=False)
def test_multi_set_undefined(self):
def set_test(value):
assert (not math.isnan(value))
return value
c1 = Color('gray').convert('hsl')
c1.set({'lightness': 0.5, 'hue': set_test}, nans=False)
def test_multi_set_undefined_space(self):
def set_test(value):
assert (not math.isnan(value))
return value
Color('gray').set({'hsl.saturation': 0.5, 'hsl.hue': set_test}, nans=False)
def test_multi_set(self):
color = Color('orange')
color2 = color.clone()
color2.convert('oklch', in_place=True)
color2.set('hue', 270).set('lightness', (lambda l: (l - (l * 0.25))))
color2.convert('srgb', in_place=True).set('alpha', 0.5)
color.set({'oklch.lightness': (lambda l: (l - (l * 0.25))), 'alpha': 0.5, 'oklch.hue': 270})
self.assertColorEqual(color, color2)
def test_bad_multi_set_dict(self):
with self.assertRaises(ValueError):
Color('red').set({'red': 0}, 0)
def test_bad_multi_set_string(self):
with self.assertRaises(ValueError):
Color('red').set('red')
def test_multi_get(self):
color = Color('orange')
color.get(['oklch.lightness', 'alpha', 'oklch.hue'])
oklch = color.convert('oklch')
self.assertEqual(color.get(['oklch.lightness', 'alpha', 'oklch.hue']), [oklch['l'], color[(- 1)], oklch['h']])
def test_space_set(self):
c1 = Color('orange')
c1.set('hsl.hue', 270)
self.assertEqual(c1.get('hsl.hue'), 270)
def test_space_set_num(self):
c1 = Color('orange')
c1.set('hsl.0', 270)
self.assertEqual(c1.get('hsl.0'), 270)
def test_function_set(self):
c1 = Color('orange')
c1.set('red', (lambda x: (x * 0.3)))
self.assertEqual(c1.get('red'), 0.3)
def test_set_bad(self):
c1 = Color('orange')
with self.assertRaises(ValueError):
c1.set('bad', 0.5)
def test_set_bad_chain(self):
c1 = Color('orange')
with self.assertRaises(ValueError):
c1.set('hsl.hue.wrong', 0.5)
def test_set_bad_input(self):
c1 = Color('orange')
with self.assertRaises(ValueError):
c1.set('red', 'bad')
def test_is_achromatic(self):
from coloraide.spaces.srgb.css import sRGB
class TempsRGB(sRGB):
def is_achromatic(self, coords):
return None
class TempColor(Color):
TempColor.register(TempsRGB(), overwrite=True)
self.assertFalse(TempColor('red').is_achromatic())
self.assertTrue(TempColor('gray').is_achromatic())
def test_is_nan_false(self):
self.assertFalse(Color('red').convert('hsl').is_nan('hue'))
def test_is_nan_true(self):
self.assertTrue(Color('white').convert('hsl').is_nan('hue'))
def test_is_nan_false_different_space(self):
self.assertFalse(Color('red').is_nan('hsl.hue'))
def test_is_nan_true_different_space(self):
self.assertTrue(Color('white').is_nan('hsl.hue'))
def test_match(self):
obj = Color.match('red')
self.assertEqual(obj.color, Color('red'))
self.assertEqual(obj.start, 0)
self.assertEqual(obj.end, 3)
def test_match_no_fullmatch(self):
obj = Color.match('red more text')
self.assertEqual(obj.color, Color('red'))
self.assertEqual(obj.start, 0)
self.assertEqual(obj.end, 3)
def test_match_fullmatch(self):
self.assertIsNone(Color.match('red more text', fullmatch=True))
self.assertIsNotNone(Color.match('red', fullmatch=True))
def test_match_offset(self):
obj = Color.match('yellow green #0000FF rgb(1, 0, 0)', start=7)
self.assertEqual(obj.color, Color('green'))
self.assertEqual(obj.start, 7)
self.assertEqual(obj.end, 12)
obj = Color.match('yellow green #0000FF rgb(1, 0, 0)', start=13)
self.assertEqual(obj.color, Color('blue'))
self.assertEqual(obj.start, 13)
self.assertEqual(obj.end, 20)
obj = Color.match('yellow green #0000FF rgb(255, 0, 0)', start=21)
self.assertEqual(obj.color, Color('red'))
self.assertEqual(obj.start, 21)
self.assertEqual(obj.end, 35)
def test_mask_in_place(self):
c1 = Color('white')
c2 = c1.mask('red')
self.assertNotEqual(c1, c2)
self.assertIsNot(c1, c2)
c3 = c1.mask('red', in_place=True)
self.assertEqual(c1, c3)
self.assertIs(c1, c3)
def test_parse_float(self):
self.assertColorEqual(Color('color(srgb 3.2e-2 0.1e+1 0.1e1 / 0.5)'), Color('color(srgb 0.032 1 1 / 0.5)'))
self.assertColorEqual(Color('color(srgb +3.2e-2 +0.1e+1 +0.1e1 / 0.5)'), Color('color(srgb 0.032 1 1 / 0.5)'))
def test_random_space(self):
c = Color.random('srgb')
self.assertEqual(c.space(), 'srgb')
c = Color.random('hsl')
self.assertEqual(c.space(), 'hsl')
def test_random_range(self):
for _ in range(10):
for c in Color.random('srgb'):
self.assertTrue((0 <= c <= 1))
def test_random_limits(self):
for _ in range(10):
for (i, c) in enumerate(Color.random('srgb', limits=[None, (0, 0.5)])):
if (i == 1):
self.assertTrue((0 <= c <= 0.5))
else:
self.assertTrue((0 <= c <= 1)) |
class UserPasswordUpdateView(MenuItemMixin, FormView):
form_class = PasswordChangeForm
template_name = 'registration/password.html'
menu_parameters = 'password'
_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(UserPasswordUpdateView, self).dispatch(request, *args, **kwargs)
def get_object(self):
return self.request.user
def get_form_kwargs(self):
kwargs = super(UserPasswordUpdateView, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def form_valid(self, form):
form.save()
messages.success(self.request, _('Your password has been successfully updated'))
update_session_auth_hash(self.request, form.user)
return super(UserPasswordUpdateView, self).form_valid(form)
def get_success_url(self):
return reverse('account-password') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.