code stringlengths 281 23.7M |
|---|
class OptionSeriesPolygonSonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def cached(func: Callable[(ArgsT, ReturnT)]) -> Callable[(ArgsT, ReturnT)]:
import hashlib
try:
source_code = inspect.getsource(func).encode('utf-8')
except OSError:
print(f'[warning] Function {func.__name__} can not be cached...')
return func
cache_key = hashlib.sha256(source_code).hexdigest()
(func)
def wrapper(*args: ArgsT.args, **kwargs: ArgsT.kwargs) -> ReturnT:
from functools import lru_cache
import isolate
if (not hasattr(isolate, '__cached_functions__')):
isolate.__cached_functions__ = {}
if (cache_key not in isolate.__cached_functions__):
isolate.__cached_functions__[cache_key] = lru_cache(maxsize=None)(func)
return isolate.__cached_functions__[cache_key](*args, **kwargs)
return wrapper |
def extractKoiTranslationsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (item['tags'] == ['Uncategorized']):
titlemap = [('Potatoes are the only thing thats needed in this world! Chapter ', 'Potatoes are the only thing thats needed in this world!', 'translated'), ('Bewitching Demonic Beast Chapter ', 'Bewitching Demonic Beast', 'translated'), ('Becoming A Global Superstar Starting As An Idol Trainee Chapter ', 'Becoming A Global Superstar Starting As An Idol Trainee', 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesSolidgaugeDataEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
class EnumType(type):
def _wrap(cls, attr=None):
if (attr is None):
raise NotImplementedError
if isinstance(attr, int):
for (k, v) in cls.vals.items():
if (v == attr):
return k
raise KeyError('num {0} is not mapped'.format(attr))
return cls.vals[attr]
def __call__(cls, attr):
return cls._wrap(attr)
def __getattr__(cls, attr):
return cls._wrap(attr) |
class UsersLogic(object):
def get(cls, username):
app.logger.info("Querying user '%s' by username", username)
return User.query.filter((User.username == username))
def get_by_api_login(cls, login):
return User.query.filter((User.api_login == login))
def get_multiple_with_projects(cls):
return User.query.filter(User.coprs.any())
def raise_if_cant_update_copr(cls, user, copr, message):
if (not user.can_edit(copr)):
raise exceptions.InsufficientRightsException(message)
app.logger.info("User '%s' allowed to update project '%s'", user.name, copr.full_name)
def raise_if_cant_build_in_copr(cls, user, copr, message):
if (not user.can_build_in(copr)):
raise exceptions.InsufficientRightsException(message)
app.logger.info("User '%s' allowed to build in project '%s'", user.name, copr.full_name)
def raise_if_not_in_group(cls, user, group):
if ((not user.admin) and (group.fas_name not in user.user_teams)):
raise exceptions.InsufficientRightsException("User '{}' doesn't have access to the copr group '{}' (fas_name='{}')".format(user.username, group.name, group.fas_name))
app.logger.info("User '%s' allowed to access group '%s' (fas_name='%s')", user.name, group.name, group.fas_name)
def get_group_by_alias(cls, name):
return Group.query.filter((Group.name == name))
def group_alias_exists(cls, name):
query = cls.get_group_by_alias(name)
return (query.count() != 0)
def get_group_by_fas_name(cls, fas_name):
return Group.query.filter((Group.fas_name == fas_name))
def get_groups_by_fas_names_list(cls, fas_name_list):
return Group.query.filter(Group.fas_name.in_(fas_name_list))
def get_groups_by_names_list(cls, name_list):
return Group.query.filter(Group.name.in_(name_list))
def create_group_by_fas_name(cls, fas_name, alias=None):
if (alias is None):
alias = fas_name
group = Group(fas_name=fas_name, name=alias)
db.session.add(group)
return group
def get_group_by_fas_name_or_create(cls, fas_name, alias=None):
mb_group = cls.get_group_by_fas_name(fas_name).first()
if (mb_group is not None):
return mb_group
group = cls.create_group_by_fas_name(fas_name, alias)
db.session.flush()
return group
def filter_denylisted_teams(cls, teams):
denylist = set(app.config.get('GROUP_DENYLIST', []))
return filter((lambda t: (t not in denylist)), teams)
def is_denylisted_group(cls, fas_group):
if ('GROUP_DENYLIST' in app.config):
return (fas_group in app.config['GROUP_DENYLIST'])
return False
def delete_user_data(cls, user):
null = {'timezone': None, 'proven': False, 'admin': False, 'api_login': '', 'api_token': '', 'api_token_expiration': datetime.date(1970, 1, 1), 'openid_groups': None}
for (k, v) in null.items():
setattr(user, k, v)
app.logger.info("Deleting user '%s' data", user.name)
def create_user_wrapper(cls, username, email=None, timezone=None):
expiration_date_token = (datetime.date.today() + datetime.timedelta(days=app.config['API_TOKEN_EXPIRATION']))
copr64 = (base64.b64encode(b'copr') + b'##')
user = User(username=username, mail=email, timezone=timezone, api_login=(copr64.decode('utf-8') + generate_api_token((app.config['API_TOKEN_LENGTH'] - len(copr64)))), api_token=generate_api_token(app.config['API_TOKEN_LENGTH']), api_token_expiration=expiration_date_token)
app.logger.info("Creating user '%s <%s>'", user.name, user.mail)
return user |
class OptionSeriesSankeySonificationContexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.xfail(reason='modification to initial allocation made the block fixture invalid')
def test_canonical_chain(valid_chain):
genesis_header = valid_chain.chaindb.get_canonical_block_header_by_number(constants.GENESIS_BLOCK_NUMBER)
assert (valid_chain.get_canonical_head() == genesis_header)
block = rlp.decode(valid_block_rlp, sedes=FrontierBlock)
valid_chain.chaindb.persist_header(block.header)
assert (valid_chain.get_canonical_head() == block.header)
canonical_block_1 = valid_chain.chaindb.get_canonical_block_header_by_number((constants.GENESIS_BLOCK_NUMBER + 1))
assert (canonical_block_1 == block.header) |
class TestTorrentTrackerIPLeakNet(LocalTestCase):
def __init__(self, devices, parameters):
super().__init__(devices, parameters)
self.torrent_client = self.parameters['torrent_client']
self.torrent_client_preopened = self.parameters['torrent_client_preopened']
self._webdriver = None
def test(self):
L.describe('Get the public IP addresses before VPN connect')
public_ips_before_connect = self.localhost['ip_tool'].all_public_ip_addresses()
self.assertNotEmpty(public_ips_before_connect, "Couldn't get public IP addresses")
L.info('Public IP addresses before VPN connect are {}'.format(public_ips_before_connect))
self.localhost['torrent_client'].set_client(self.torrent_client)
if self.torrent_client_preopened:
self.localhost['torrent_client'].open()
L.describe('Open and connect the VPN application')
self.localhost['vpn_application'].open_and_connect()
self._webdriver = self.localhost['webdriver'].driver(self.parameters['browser'])
ip_leak_helper = IPLeakNetHelper(self._webdriver)
L.describe('Open ipleak.net page')
ip_leak_helper.load_page()
L.describe('Get the torrent magnet link from ipleak.net')
magnet_link = ip_leak_helper.get_magnet_link()
L.info('Got magnet link {}'.format(magnet_link))
L.describe('Add the torrent magnet to the torrent client')
self.localhost['torrent_client'].add_torrent(magnet_link)
L.describe("Check the reported torrent IPs aren't public")
ipv6_subnets = [ipaddress.ip_interface((ip, 64)).network for ip in public_ips_before_connect if (ip.version == 6)]
timeup = TimeUp(20)
reported_ips = set()
while (not timeup):
L.info('Checking webpage contents for leaked torrent IPs')
reported_ips.update(ip_leak_helper.get_reported_torrent_ips())
for ip in reported_ips:
self.assertIsNotIn(ip, public_ips_before_connect, 'Torrent tracker found a public IP: {}'.format(ip))
if (ip.version == 6):
for subnet in ipv6_subnets:
self.assertFalse((ip in subnet), 'IPv6 address {} is in {}'.format(ip, subnet))
if reported_ips:
L.info('Found IPs {} but none were public'.format(reported_ips))
else:
L.info('Found no IPs')
time.sleep(1)
if (not reported_ips):
raise XVEx("The torrent client didn't report any IP addresses at all")
def teardown(self):
self._webdriver.close()
super().teardown() |
def generate_samples(exp_dir: str='', output_directory: Optional[str]=None, render_size: Optional[Tuple[(int, int)]]=None, video_size: Optional[Tuple[(int, int)]]=(256, 256), camera_path: str='simple_360', n_eval_cameras: int=(25 * 3), num_samples: int=2, seed: int=3, trajectory_scale: float=1.3, up: Tuple[(float, float, float)]=CANONICAL_CO3D_UP_AXIS, camera_elevation: float=((- 30.0) * ((2 * math.pi) / 360)), progressive_sampling_steps_per_render: int=(- 1), save_voxel_features: bool=True) -> None:
if (output_directory is None):
folder_name = ('generated_samples' if (progressive_sampling_steps_per_render == (- 1)) else 'generated_samples_denoising')
output_directory = os.path.join(exp_dir, folder_name)
os.makedirs(output_directory, exist_ok=True)
(_, model, data_source) = load_experiment(Experiment, exp_dir, None, render_size, seed, device)
assert (model.net_3d_enabled and model.diffusion_enabled), 'Can generate random samples only from a trained HoloDiffusion model. '
n_source_views = (data_source.data_loader_map_provider.batch_size - model.n_train_target_views)
sequence_names = [f'sample_{i:05d}' for i in range(num_samples)]
for sequence_name in sequence_names:
with torch.no_grad():
torch.cuda.empty_cache()
gc.collect()
render_flyaround(dataset=None, sequence_name=sequence_name, model=model, output_video_path=os.path.join(output_directory, 'video'), output_video_name=sequence_name, n_source_views=n_source_views, n_flyaround_poses=n_eval_cameras, trajectory_type=camera_path, video_resize=video_size, device=device, up=up, trajectory_scale=trajectory_scale, camera_elevation=camera_elevation, sample_mode=True, progressive_sampling_steps_per_render=progressive_sampling_steps_per_render, visualize_preds_keys=('images_render', 'masks_render', 'depths_render', 'noise_render', 'images_prev_stage_render', 'features_prev_stage_render', '_shaded_depth_render', '_all_source_images'), save_voxel_features=save_voxel_features) |
class OptionPlotoptionsBellcurveSonificationTracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class NXTSetFlowFormat(NiciraHeader):
def __init__(self, datapath, flow_format):
super(NXTSetFlowFormat, self).__init__(datapath, ofproto.NXT_SET_FLOW_FORMAT)
self.format = flow_format
def _serialize_body(self):
self.serialize_header()
msg_pack_into(ofproto.NX_SET_FLOW_FORMAT_PACK_STR, self.buf, ofproto.NICIRA_HEADER_SIZE, self.format) |
def match_vlan_pcp(self, of_ports, priority=None):
pkt_matchvlanpcp = simple_tcp_packet(dl_vlan_enable=True, vlan_vid=1, vlan_pcp=5)
match = parse.packet_to_flow_match(pkt_matchvlanpcp)
self.assertTrue((match is not None), 'Could not generate flow match from pkt')
match.wildcards = (((ofp.OFPFW_ALL ^ ofp.OFPFW_DL_TYPE) ^ ofp.OFPFW_DL_VLAN) ^ ofp.OFPFW_DL_VLAN_PCP)
match_send_flowadd(self, match, priority, of_ports[1])
return (pkt_matchvlanpcp, match) |
(MESSAGING_STATUS, dependencies=[Security(verify_oauth_client, scopes=[MESSAGING_READ])], response_model=MessagingConfigStatusMessage, responses={HTTP_200_OK: {'content': {'application/json': {'example': {'config_status': 'configured', 'detail': 'Active default messaging service of type mailgun is fully configured'}}}}})
def get_messaging_status(*, db: Session=Depends(deps.get_db)) -> MessagingConfigStatusMessage:
logger.info('Determining active default messaging config status')
messaging_config = MessagingConfig.get_active_default(db)
if (not messaging_config):
return MessagingConfigStatusMessage(config_status=MessagingConfigStatus.not_configured, detail='No active default messaging configuration found')
try:
details = messaging_config.details
MessagingConfigRequestBase.validate_details_schema(messaging_config.service_type, details)
except Exception as e:
logger.error(f'Invalid or unpopulated details on {messaging_config.service_type.value} messaging configuration: {Pii(str(e))}')
return MessagingConfigStatusMessage(config_status=MessagingConfigStatus.not_configured, detail=f'Invalid or unpopulated details on {messaging_config.service_type.value} messaging configuration')
secrets = messaging_config.secrets
if (not secrets):
return MessagingConfigStatusMessage(config_status=MessagingConfigStatus.not_configured, detail=f'No secrets found for {messaging_config.service_type.value} messaging configuration')
try:
get_schema_for_secrets(service_type=messaging_config.service_type, secrets=secrets)
except (ValueError, KeyError) as e:
logger.error(f'Invalid secrets found on {messaging_config.service_type.value} messaging configuration: {Pii(str(e))}')
return MessagingConfigStatusMessage(config_status=MessagingConfigStatus.not_configured, detail=f'Invalid secrets found on {messaging_config.service_type.value} messaging configuration')
return MessagingConfigStatusMessage(config_status=MessagingConfigStatus.configured, detail=f'Active default messaging service of type {messaging_config.service_type.value} is fully configured') |
class YandexMusicEntry(RB.RhythmDBEntryType):
def __init__(self, shell, client, station):
RB.RhythmDBEntryType.__init__(self, name=(('ym-' + station[:station.find('_')]) + '-entry'), save_to_disk=False)
self.shell = shell
self.db = shell.props.db
self.client = client
self.station = station[(station.find('_') + 1):]
self.station_prefix = station[:(station.find('_') + 1)]
self.is_feed = (station.find('feed') == 0)
self.last_track = None
self.last_duration = None
def do_get_playback_uri(self, entry):
new_track = entry.get_string(RB.RhythmDBPropType.LOCATION)[len(self.station_prefix):]
if (self.is_feed and self.last_track and (self.last_track != new_track)):
Gdk.threads_add_idle(GLib.PRIORITY_LOW, self.feedback_track_finished, self.last_track, self.last_duration)
uri = entry.get_string(RB.RhythmDBPropType.MOUNTPOINT)
need_request = (uri is None)
if (not need_request):
r = requests.head(uri)
need_request = (r.status_code != 200)
if need_request:
try:
downinfo = self.client.tracks_download_info(track_id=new_track, get_direct_links=True)
except:
return None
else:
uri = downinfo[1].direct_link
self.db.entry_set(entry, RB.RhythmDBPropType.MOUNTPOINT, uri)
self.db.commit()
if (self.is_feed and (self.last_track != new_track)):
Gdk.threads_add_idle(GLib.PRIORITY_LOW, self.feedback_track_started, new_track)
self.last_track = new_track
self.last_duration = (entry.get_ulong(RB.RhythmDBPropType.DURATION) * 1000)
return uri
def can_sync_metadata(self, entry):
return False
def do_sync_metadata(self, entry, changes):
return
def feedback_track_started(self, track):
self.client.rotor_station_feedback_track_started(station=self.station, track_id=track)
return False
def feedback_track_finished(self, track, duration):
self.client.rotor_station_feedback_track_finished(station=self.station, track_id=track, total_played_seconds=duration)
return False |
class OptionSeriesTreemapDataDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionSeriesTreemapDataDatalabelsAnimation':
return self._config_sub_data('animation', OptionSeriesTreemapDataDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionSeriesTreemapDataDatalabelsFilter':
return self._config_sub_data('filter', OptionSeriesTreemapDataDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionSeriesTreemapDataDatalabelsTextpath':
return self._config_sub_data('textPath', OptionSeriesTreemapDataDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('bottom')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
_ns.route('/g/<group_name>/<coprname>/new_build_custom/', methods=['POST'])
_ns.route('/<username>/<coprname>/new_build_custom/', methods=['POST'])
_required
_with_copr
def copr_new_build_custom(copr):
view = 'coprs_ns.copr_new_build_custom'
url_on_success = helpers.copr_url('coprs_ns.copr_builds', copr)
def factory(**build_options):
BuildsLogic.create_new_from_custom(flask.g.user, copr, form.script.data, form.chroot.data, form.builddeps.data, form.resultdir.data, form.repos.data, chroot_names=form.selected_chroots, **build_options)
form = forms.BuildFormCustomFactory(copr.active_chroots)()
return process_new_build(copr, form, factory, render_add_build_custom, view, url_on_success) |
class stat_trigger(instruction_id):
type = 7
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = stat_trigger()
_type = reader.read('!H')[0]
assert (_type == 7)
_len = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
return True
def pretty_print(self, q):
q.text('stat_trigger {')
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}') |
def test_config_inference_with_tuple_of_inference():
model = SampleModel()
compositional = bm.CompositionalInference({(model.foo, model.bar): (bm.SingleSiteAncestralMetropolisHastings(), bm.SingleSiteUniformMetropolisHastings()), model.baz: bm.GlobalNoUTurnSampler()})
compositional.infer([model.baz()], {}, num_chains=1, num_samples=10)
world = World.initialize_world([model.baz()], {})
proposers = compositional.get_proposers(world, target_rvs=world.latent_nodes, num_adaptive_sample=10)
assert (len(proposers) == 2)
sequential_proposer = proposers[int(isinstance(proposers[0], NUTSProposer))]
assert isinstance(sequential_proposer, SequentialProposer)
assert (len(sequential_proposer.proposers) == 4)
proposer_count = Counter(map(type, sequential_proposer.proposers))
assert (proposer_count[SingleSiteAncestralProposer] == 2) |
def pytest_load_initial_conftests(early_config):
capsys = early_config.pluginmanager.get_plugin('capturemanager')
project_path = _get_project_path()
if project_path:
capsys.suspend()
try:
active_project = project.load(project_path)
active_project.load_config()
active_project._add_to_main_namespace()
except Exception as e:
print(f'''{color.format_tb(e)}
''')
raise pytest.UsageError('Unable to load project')
finally:
capsys.resume() |
def test_getitem(fx_asset):
with Image(filename=str(fx_asset.joinpath('apple.ico'))) as img:
size = img.size
assert (size == img.sequence[img.sequence.current_index].size)
assert (img.sequence[0].size == (32, 32))
assert (img.sequence[1].size == (16, 16))
assert (img.sequence[2].size == (32, 32))
assert (img.sequence[3].size == (16, 16))
with raises(IndexError):
img.sequence[4]
assert (img.sequence[(- 1)].size == (16, 16))
assert (img.sequence[(- 2)].size == (32, 32))
assert (img.sequence[(- 3)].size == (16, 16))
assert (img.sequence[(- 4)].size == (32, 32))
with raises(IndexError):
img.sequence[(- 5)]
assert (img.size == size) |
def register(registry):
register_editable_textbox_handlers(registry=registry, target_class=TextEditor, widget_getter=(lambda wrapper: wrapper._target.control))
registry.register_interaction(target_class=ReadonlyEditor, interaction_class=DisplayedText, handler=(lambda wrapper, _: wrapper._target.control.text())) |
def upgrade():
op.create_table('tags', sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('color', sa.String(), nullable=True), sa.Column('is_read_only', sa.Boolean(), nullable=False), sa.Column('event_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['event_id'], ['events.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id'))
op.add_column('ticket_holders', sa.Column('tag_id', sa.Integer(), nullable=True))
op.create_foreign_key(u'ticket_holders_tag_id_fkey', 'ticket_holders', 'tags', ['tag_id'], ['id'], ondelete='CASCADE') |
class IISPHSolver(SPHBase):
def __init__(self, particle_system):
super().__init__(particle_system)
self.a_ii = ti.field(dtype=float, shape=self.ps.particle_max_num)
self.density_deviation = ti.field(dtype=float, shape=self.ps.particle_max_num)
self.last_pressure = ti.field(dtype=float, shape=self.ps.particle_max_num)
self.avg_density_error = ti.field(dtype=float, shape=())
self.ps.acceleration = ti.Vector.field(self.ps.dim, dtype=float)
self.pressure_accel = ti.Vector.field(self.ps.dim, dtype=float)
particle_node = ti.root.dense(ti.i, self.ps.particle_max_num)
particle_node.place(self.ps.acceleration, self.pressure_accel)
self.dt[None] = 0.0002
def predict_advection(self):
for p_i in range(self.ps.particle_num[None]):
x_i = self.ps.x[p_i]
sum_neighbor = 0.0
sum_neighbor_of_neighbor = 0.0
m_Vi = self.ps.m_V[p_i]
density_i = self.ps.density[p_i]
density_i2 = (density_i * density_i)
density_02 = (self.density_0 * self.density_0)
self.a_ii[p_i] = 0.0
for j in range(self.ps.fluid_neighbors_num[p_i]):
p_j = self.ps.fluid_neighbors[(p_i, j)]
x_j = self.ps.x[p_j]
sum_neighbor_inner = ti.Vector([0.0 for _ in range(self.ps.dim)])
for k in range(self.ps.fluid_neighbors_num[p_i]):
density_k = self.ps.density[k]
density_k2 = (density_k * density_k)
p_k = self.ps.fluid_neighbors[(p_i, j)]
x_k = self.ps.x[p_k]
sum_neighbor_inner += ((self.ps.m_V[p_k] * self.cubic_kernel_derivative((x_i - x_k))) / density_k2)
kernel_grad_ij = self.cubic_kernel_derivative((x_i - x_j))
sum_neighbor -= (self.ps.m_V[p_j] * sum_neighbor_inner).dot(kernel_grad_ij)
sum_neighbor_of_neighbor -= (self.ps.m_V[p_j] * kernel_grad_ij).dot(kernel_grad_ij)
sum_neighbor_of_neighbor *= (m_Vi / density_i2)
self.a_ii[p_i] += ((((sum_neighbor + sum_neighbor_of_neighbor) * self.dt[None]) * self.dt[None]) * density_02)
for j in range(self.ps.solid_neighbors_num[p_i]):
p_j = self.ps.solid_neighbors[(p_i, j)]
x_j = self.ps.x[p_j]
sum_neighbor_inner = ti.Vector([0.0 for _ in range(self.ps.dim)])
for k in range(self.ps.solid_neighbors_num[p_i]):
density_k = self.ps.density[k]
density_k2 = (density_k * density_k)
p_k = self.ps.solid_neighbors[(p_i, j)]
x_k = self.ps.x[p_k]
sum_neighbor_inner += ((self.ps.m_V[p_k] * self.cubic_kernel_derivative((x_i - x_k))) / density_k2)
kernel_grad_ij = self.cubic_kernel_derivative((x_i - x_j))
sum_neighbor -= (self.ps.m_V[p_j] * sum_neighbor_inner).dot(kernel_grad_ij)
sum_neighbor_of_neighbor -= (self.ps.m_V[p_j] * kernel_grad_ij).dot(kernel_grad_ij)
sum_neighbor_of_neighbor *= (m_Vi / density_i2)
self.a_ii[p_i] += ((((sum_neighbor + sum_neighbor_of_neighbor) * self.dt[None]) * self.dt[None]) * density_02)
for p_i in range(self.ps.particle_num[None]):
if (self.ps.material[p_i] == self.ps.material_fluid):
self.ps.v[p_i] += (self.dt[None] * self.ps.acceleration[p_i])
for p_i in range(self.ps.particle_num[None]):
x_i = self.ps.x[p_i]
density_i = self.ps.density[p_i]
divergence = 0.0
for j in range(self.ps.fluid_neighbors_num[p_i]):
p_j = self.ps.fluid_neighbors[(p_i, j)]
x_j = self.ps.x[p_j]
divergence += (self.ps.m_V[p_j] * (self.ps.v[p_i] - self.ps.v[p_j]).dot(self.cubic_kernel_derivative((x_i - x_j))))
for j in range(self.ps.solid_neighbors_num[p_i]):
p_j = self.ps.solid_neighbors[(p_i, j)]
x_j = self.ps.x[p_j]
divergence += (self.ps.m_V[p_j] * (self.ps.v[p_i] - self.ps.v[p_j]).dot(self.cubic_kernel_derivative((x_i - x_j))))
self.density_deviation[p_i] = ((self.density_0 - density_i) - ((self.dt[None] * divergence) * self.density_0))
for p_i in range(self.ps.particle_num[None]):
self.last_pressure[p_i] = (0.5 * self.ps.pressure[p_i])
def pressure_solve(self):
iteration = 0
while (iteration < 1000):
self.avg_density_error[None] = 0.0
self.pressure_solve_iteration()
iteration += 1
if ((iteration % 100) == 0):
print(f'iter {iteration}, density err {self.avg_density_error[None]}')
if (self.avg_density_error[None] < 0.001):
break
def pressure_solve_iteration(self):
omega = 0.5
for p_i in range(self.ps.particle_num[None]):
x_i = self.ps.x[p_i]
d_v = ti.Vector([0.0 for _ in range(self.ps.dim)])
dpi = (self.last_pressure[p_i] / (self.ps.density[p_i] ** 2))
for j in range(self.ps.fluid_neighbors_num[p_i]):
p_j = self.ps.fluid_neighbors[(p_i, j)]
x_j = self.ps.x[p_j]
dpj = (self.last_pressure[p_j] / (self.ps.density[p_j] ** 2))
d_v += ((((- self.density_0) * self.ps.m_V[p_j]) * (dpi + dpj)) * self.cubic_kernel_derivative((x_i - x_j)))
dpj = (self.last_pressure[p_i] / (self.density_0 ** 2))
for j in range(self.ps.solid_neighbors_num[p_i]):
p_j = self.ps.solid_neighbors[(p_i, j)]
x_j = self.ps.x[p_j]
d_v += ((((- self.density_0) * self.ps.m_V[p_j]) * (dpi + dpj)) * self.cubic_kernel_derivative((x_i - x_j)))
self.pressure_accel[p_i] += d_v
for p_i in range(self.ps.particle_num[None]):
x_i = self.ps.x[p_i]
Ap = 0.0
dt2 = (self.dt[None] * self.dt[None])
accel_p_i = self.pressure_accel[p_i]
for j in range(self.ps.fluid_neighbors_num[p_i]):
p_j = self.ps.fluid_neighbors[(p_i, j)]
x_j = self.ps.x[p_j]
Ap += (self.ps.m_V[p_j] * (accel_p_i - self.pressure_accel[p_j]).dot(self.cubic_kernel_derivative((x_i - x_j))))
for j in range(self.ps.solid_neighbors_num[p_i]):
p_j = self.ps.solid_neighbors[(p_i, j)]
x_j = self.ps.x[p_j]
Ap += (self.ps.m_V[p_j] * (accel_p_i - self.pressure_accel[p_j]).dot(self.cubic_kernel_derivative((x_i - x_j))))
Ap *= (dt2 * self.density_0)
if (abs(self.a_ii[p_i]) > 1e-06):
self.ps.pressure[p_i] = ti.max((self.last_pressure[p_i] + ((omega * (self.density_deviation[p_i] - Ap)) / self.a_ii[p_i])), 0.0)
else:
self.ps.pressure[p_i] = 0.0
if (self.ps.pressure[p_i] != 0.0):
self.avg_density_error[None] += (abs((Ap - self.density_deviation[p_i])) / self.density_0)
self.avg_density_error[None] /= self.ps.particle_num[None]
for p_i in range(self.ps.particle_num[None]):
self.last_pressure[p_i] = self.ps.pressure[p_i]
def compute_densities(self):
for p_i in range(self.ps.particle_num[None]):
if (self.ps.material[p_i] != self.ps.material_fluid):
continue
x_i = self.ps.x[p_i]
self.ps.density[p_i] = (self.ps.m_V[p_i] * self.cubic_kernel(0.0))
for j in range(self.ps.fluid_neighbors_num[p_i]):
p_j = self.ps.fluid_neighbors[(p_i, j)]
x_j = self.ps.x[p_j]
self.ps.density[p_i] += (self.ps.m_V[p_j] * self.cubic_kernel((x_i - x_j).norm()))
for j in range(self.ps.solid_neighbors_num[p_i]):
p_j = self.ps.solid_neighbors[(p_i, j)]
x_j = self.ps.x[p_j]
self.ps.density[p_i] += (self.ps.m_V[p_j] * self.cubic_kernel((x_i - x_j).norm()))
self.ps.density[p_i] *= self.density_0
def compute_pressure_forces(self):
for p_i in range(self.ps.particle_num[None]):
if (self.ps.material[p_i] != self.ps.material_fluid):
self.pressure_accel[p_i].fill(0)
continue
self.pressure_accel[p_i].fill(0)
x_i = self.ps.x[p_i]
d_v = ti.Vector([0.0 for _ in range(self.ps.dim)])
dpi = (self.ps.pressure[p_i] / (self.ps.density[p_i] ** 2))
for j in range(self.ps.fluid_neighbors_num[p_i]):
p_j = self.ps.fluid_neighbors[(p_i, j)]
x_j = self.ps.x[p_j]
dpj = (self.ps.pressure[p_j] / (self.ps.density[p_j] ** 2))
d_v += ((((- self.density_0) * self.ps.m_V[p_j]) * (dpi + dpj)) * self.cubic_kernel_derivative((x_i - x_j)))
dpj = (self.ps.pressure[p_i] / (self.density_0 ** 2))
for j in range(self.ps.solid_neighbors_num[p_i]):
p_j = self.ps.solid_neighbors[(p_i, j)]
x_j = self.ps.x[p_j]
d_v += ((((- self.density_0) * self.ps.m_V[p_j]) * (dpi + dpj)) * self.cubic_kernel_derivative((x_i - x_j)))
self.pressure_accel[p_i] = d_v
def compute_non_pressure_forces(self):
for p_i in range(self.ps.particle_num[None]):
x_i = self.ps.x[p_i]
d_v = ti.Vector([0.0 for _ in range(self.ps.dim)])
d_v[1] = self.g
for j in range(self.ps.fluid_neighbors_num[p_i]):
p_j = self.ps.fluid_neighbors[(p_i, j)]
x_j = self.ps.x[p_j]
d_v += self.viscosity_force(p_i, p_j, (x_i - x_j))
self.ps.acceleration[p_i] = d_v
def advect(self):
for p_i in range(self.ps.particle_num[None]):
if (self.ps.material[p_i] == self.ps.material_fluid):
self.ps.v[p_i] += (self.dt[None] * self.pressure_accel[p_i])
self.ps.x[p_i] += (self.dt[None] * self.ps.v[p_i])
def substep(self):
self.compute_densities()
self.compute_non_pressure_forces()
self.predict_advection()
self.pressure_solve()
self.compute_pressure_forces()
self.advect() |
.parametrize('dataset, expected_missed', ((pd.DataFrame(), 0), (pd.DataFrame({'feature': []}), 0), (pd.DataFrame({'feature': [1, 2, 3]}), 0), (pd.DataFrame({'feature1': [1, None, pd.NA], 'feature2': [np.NaN, None, pd.NaT]}), 5)))
def test_get_number_of_all_pandas_missed_values(dataset: pd.DataFrame, expected_missed: int) -> None:
assert (get_number_of_all_pandas_missed_values(dataset) == expected_missed) |
class TestTCPServerConnection():
.asyncio
async def test_receive_raises_exception(self):
port = get_unused_tcp_port()
tcp_server = _make_tcp_server_connection('address_server', 'public_key_server', '127.0.0.1', port)
tcp_client = _make_tcp_client_connection('address_client', 'public_key_client', '127.0.0.1', port)
(await tcp_server.connect())
(await tcp_client.connect())
(await asyncio.sleep(0.1))
with unittest.mock.patch.object(tcp_server.logger, 'error') as mock_logger:
with unittest.mock.patch('asyncio.wait', side_effect=Exception('generic exception')):
result = (await tcp_server.receive())
assert (result is None)
mock_logger.assert_any_call('Error in the receiving loop: generic exception')
(await tcp_client.disconnect())
(await tcp_server.disconnect()) |
def normalize_fixture(fixture):
normalized_fixture = {'in': tuple((((decode_hex(key) if is_0x_prefixed(key) else text_if_str(to_bytes, key)), ((decode_hex(value) if is_0x_prefixed(value) else text_if_str(to_bytes, value)) if (value is not None) else None)) for (key, value) in (fixture['in'].items() if isinstance(fixture['in'], dict) else fixture['in']))), 'root': decode_hex(fixture['root'])}
return normalized_fixture |
class OptionPlotoptionsSunburstSonificationTracksMapping(Options):
def frequency(self) -> 'OptionPlotoptionsSunburstSonificationTracksMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsSunburstSonificationTracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsSunburstSonificationTracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsSunburstSonificationTracksMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsSunburstSonificationTracksMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsSunburstSonificationTracksMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsSunburstSonificationTracksMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsSunburstSonificationTracksMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsSunburstSonificationTracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsSunburstSonificationTracksMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsSunburstSonificationTracksMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsSunburstSonificationTracksMappingPan)
def pitch(self) -> 'OptionPlotoptionsSunburstSonificationTracksMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsSunburstSonificationTracksMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsSunburstSonificationTracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsSunburstSonificationTracksMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsSunburstSonificationTracksMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsSunburstSonificationTracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsSunburstSonificationTracksMappingTime':
return self._config_sub_data('time', OptionPlotoptionsSunburstSonificationTracksMappingTime)
def tremolo(self) -> 'OptionPlotoptionsSunburstSonificationTracksMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsSunburstSonificationTracksMappingTremolo)
def volume(self) -> 'OptionPlotoptionsSunburstSonificationTracksMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsSunburstSonificationTracksMappingVolume) |
def rename_policy(ctx, policy_id, policy_type, original_name, new_name):
headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Authorization': f'SSWS {ctx.obj.api_token}'}
params = {}
payload = {'type': policy_type, 'name': new_name}
url = f'{ctx.obj.base_url}/policies/{policy_id}'
try:
msg = f'Attempting to rename policy "{original_name}" ({policy_id}) to "{new_name}"'
LOGGER.info(msg)
index_event(ctx.obj.es, module=__name__, event_type='INFO', event=msg)
click.echo(f'[*] {msg}')
response = ctx.obj.session.put(url, headers=headers, params=params, json=payload, timeout=7)
except Exception as e:
LOGGER.error(e, exc_info=True)
index_event(ctx.obj.es, module=__name__, event_type='ERROR', event=e)
click.secho(f'[!] {URL_OR_API_TOKEN_ERROR}', fg='red')
response = None
if response.ok:
msg = f'Policy "{original_name}" ({policy_id}) changed to "{new_name}"'
LOGGER.info(msg)
index_event(ctx.obj.es, module=__name__, event_type='INFO', event=msg)
click.secho(f'[*] {msg}', fg='green')
time.sleep(1)
else:
msg = f'''Error modifying policy {policy_id}
Response Code: {response.status_code} | Response Reason: {response.reason}
Error Code: {response.json().get('errorCode')} | Error Summary: {response.json().get('errorSummary')}'''
LOGGER.error(msg)
index_event(ctx.obj.es, module=__name__, event_type='ERROR', event=msg)
click.secho(f'[!] {msg}', fg='red') |
_toolkit([ToolkitName.qt])
(no_gui_test_assistant, 'No GuiTestAssistant')
class TestTextEditorQt(BaseTestMixin, GuiTestAssistant, UnittestTools, unittest.TestCase):
def setUp(self):
BaseTestMixin.setUp(self)
GuiTestAssistant.setUp(self)
def tearDown(self):
GuiTestAssistant.tearDown(self)
BaseTestMixin.tearDown(self)
def test_text_editor_placeholder_text(self):
foo = Foo()
editor = TextEditor(placeholder='Enter name')
view = View(Item(name='name', editor=editor))
tester = UITester()
with tester.create_ui(foo, dict(view=view)) as ui:
(name_editor,) = ui.get_editors('name')
self.assertEqual(name_editor.control.placeholderText(), 'Enter name')
def test_text_editor_placeholder_text_and_readonly(self):
foo = Foo()
editor = TextEditor(placeholder='Enter name', read_only=True)
view = View(Item(name='name', editor=editor))
tester = UITester()
with tester.create_ui(foo, dict(view=view)) as ui:
(name_editor,) = ui.get_editors('name')
self.assertEqual(name_editor.control.placeholderText(), 'Enter name')
def test_text_editor_default_view(self):
foo = Foo()
tester = UITester()
with tester.create_ui(foo) as ui:
(name_editor,) = ui.get_editors('name')
self.assertEqual(name_editor.control.placeholderText(), '')
def test_text_editor_custom_style_placeholder(self):
foo = Foo()
view = View(Item(name='name', style='custom', editor=TextEditor(placeholder='Enter name')))
tester = UITester()
with tester.create_ui(foo, dict(view=view)) as ui:
(name_editor,) = ui.get_editors('name')
try:
placeholder = name_editor.control.placeholderText()
except AttributeError:
pass
else:
self.assertEqual(placeholder, 'Enter name')
def test_cancel_button(self):
foo = Foo()
view = View(Item(name='name', style='simple', editor=TextEditor(cancel_button=True)))
tester = UITester()
with tester.create_ui(foo, dict(view=view)) as ui:
(name_editor,) = ui.get_editors('name')
if hasattr(name_editor.control, 'isClearButtonEnabled'):
self.assertTrue(name_editor.control.isClearButtonEnabled()) |
def test_grow_node(leaf_node, left_rule, right_rule, X):
grown_leaf = LeafNode.grow_node(leaf_node, left_rule=left_rule, right_rule=right_rule)
assert isinstance(grown_leaf, SplitNode)
assert (grown_leaf.left_child is not None)
assert (grown_leaf.right_child is not None)
assert (grown_leaf.most_recent_rule() == left_rule) |
def dispatch_wp(process_m_fn, process_s_fn, wp):
results = []
try:
if (not wp.cfg.enabled):
wp.mh.register_exclusion(wp.filename)
results.append(work_package.Result(wp, False))
elif isinstance(wp, work_package.SIMULINK_File_WP):
wp.register_file()
try:
wp.parse_simulink()
except errors.Error:
results.append(work_package.Result(wp, False))
return results
if wp.n_content:
for block in wp.n_content.iter_all_blocks():
if isinstance(block, s_ast.Matlab_Function):
block_wp = work_package.Embedded_MATLAB_WP(wp, block)
block_wp.register_file()
results.append(process_m_fn(block_wp))
results.append(process_s_fn(wp))
if wp.modified:
wp.save_and_close()
elif isinstance(wp, work_package.MATLAB_File_WP):
wp.register_file()
results.append(process_m_fn(wp))
else:
raise errors.ICE(('unknown work package kind %s' % wp.__class__.__name__))
except errors.Error as err:
raise errors.ICE('uncaught Error in process_generic_wp') from err
return results |
class AttendeeSearchForm(forms.Form):
def __init__(self, event_slug, *args, **kwargs):
kwargs.update(initial={'event_slug': event_slug})
super().__init__(*args, **kwargs)
self.fields['event_slug'].widget = forms.HiddenInput()
event_slug = forms.CharField()
attendee = forms.ModelChoiceField(queryset=Attendee.objects.all(), widget=autocomplete.ModelSelect2(url='attendee-autocomplete', forward=['event_slug']), required=False, label=_('Attendee')) |
_event
class TitleSet(ThreadEvent):
thread = attr.ib(type='_threads.Group')
title = attr.ib(type=Optional[str])
at = attr.ib(type=datetime.datetime)
def _parse(cls, session, data):
(author, thread, at) = cls._parse_metadata(session, data)
return cls(author=author, thread=thread, title=(data['name'] or None), at=at) |
def superlu_sparse_2_dense(sparse_matrix, output=False):
rowptr = sparse_matrix.getCSRrepresentation()[0]
colptr = sparse_matrix.getCSRrepresentation()[1]
data = sparse_matrix.getCSRrepresentation()[2]
nr = sparse_matrix.shape[0]
nc = sparse_matrix.shape[1]
return _pythonCSR_2_dense(rowptr, colptr, data, nr, nc, output) |
class RunWatch(IntervalModule):
format_up = '{name}'
format_down = '{name}'
color_up = '#00FF00'
color_down = '#FF0000'
settings = ('format_up', 'format_down', 'color_up', 'color_down', 'path', 'name')
required = ('path', 'name')
def is_process_alive(pid):
return os.path.exists('/proc/{pid}/'.format(pid=pid))
def run(self):
alive = False
pid = 0
try:
with open(glob.glob(self.path)[0], 'r') as f:
pid = int(f.read().strip())
alive = self.is_process_alive(pid)
except Exception:
pass
if alive:
fmt = self.format_up
color = self.color_up
else:
fmt = self.format_down
color = self.color_down
self.output = {'full_text': fmt.format(name=self.name, pid=pid), 'color': color, 'instance': self.name} |
class DatasetConfig(Base):
connection_config_id = Column(String, ForeignKey(ConnectionConfig.id_field_path), nullable=False)
fides_key = Column(String, index=True, unique=True, nullable=False)
ctl_dataset_id = Column(String, ForeignKey(CtlDataset.id), index=True, nullable=False)
connection_config = relationship('ConnectionConfig', back_populates='datasets')
ctl_dataset = relationship(CtlDataset, backref='dataset_configs')
def upsert_with_ctl_dataset(cls, db: Session, *, data: Dict[(str, Any)]) -> 'DatasetConfig':
def upsert_ctl_dataset(ctl_dataset_obj: Optional[CtlDataset]) -> CtlDataset:
ctl_dataset_data = data.copy()
validated_data = Dataset(**ctl_dataset_data.get('dataset', {}))
if ctl_dataset_obj:
for (key, val) in validated_data.dict().items():
setattr(ctl_dataset_obj, key, val)
else:
ctl_dataset_obj = CtlDataset(**validated_data.dict())
db.add(ctl_dataset_obj)
db.commit()
db.refresh(ctl_dataset_obj)
return ctl_dataset_obj
dataset = DatasetConfig.filter(db=db, conditions=((DatasetConfig.connection_config_id == data['connection_config_id']) & (DatasetConfig.fides_key == data['fides_key']))).first()
if dataset:
upsert_ctl_dataset(dataset.ctl_dataset)
data.pop('dataset', None)
dataset.update(db=db, data=data)
else:
fetched_ctl_dataset = db.query(CtlDataset).filter((CtlDataset.fides_key == data.get('dataset', {}).get('fides_key'))).first()
ctl_dataset = upsert_ctl_dataset(fetched_ctl_dataset)
data['ctl_dataset_id'] = ctl_dataset.id
data.pop('dataset', None)
dataset = cls.create(db=db, data=data)
return dataset
def create_or_update(cls, db: Session, *, data: Dict[(str, Any)]) -> 'DatasetConfig':
dataset = DatasetConfig.filter(db=db, conditions=((DatasetConfig.connection_config_id == data['connection_config_id']) & (DatasetConfig.fides_key == data['fides_key']))).first()
if dataset:
dataset.update(db=db, data=data)
else:
dataset = cls.create(db=db, data=data)
return dataset
def get_graph(self) -> GraphDataset:
dataset_graph = convert_dataset_to_graph(Dataset.from_orm(self.ctl_dataset), self.connection_config.key)
if ((self.connection_config.connection_type == ConnectionType.saas) and (self.connection_config.saas_config is not None) and (self.connection_config.saas_config['fides_key'] == self.fides_key)):
dataset_graph = merge_datasets(dataset_graph, self.connection_config.get_saas_config().get_graph(self.connection_config.secrets))
else:
logger.debug('Connection config with key {} is not a saas config, skipping merge dataset', self.connection_config.key)
return dataset_graph
def get_dataset_with_stubbed_collection(self) -> GraphDataset:
dataset_graph = self.get_graph()
stubbed_collection = Collection(name=dataset_graph.name, fields=[], after=set())
dataset_graph.collections = [stubbed_collection]
return dataset_graph |
def make_ua():
rrange = (lambda a, b, c=1: (((c == 1) and random.randrange(a, b)) or int(((1.0 * random.randrange((a * c), (b * c))) / c))))
ua = ('Mozilla/%d.0 (Windows NT %d.%d) AppleWebKit/%d (KHTML, like Gecko) Chrome/%d.%d Safari/%d' % (rrange(4, 7, 10), rrange(5, 7), rrange(0, 3), rrange(535, 538, 10), rrange(21, 27, 10), rrange(0, 9999, 10), rrange(535, 538, 10))) |
class ObjectClassCount(AgencyBase):
endpoint_doc = 'usaspending_api/api_contracts/contracts/v2/agency/toptier_code/object_class/count.md'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.params_to_validate = ['fiscal_year']
_response()
def get(self, request: Request, *args: Any, **kwargs: Any) -> Response:
return Response({'toptier_code': self.toptier_code, 'fiscal_year': self.fiscal_year, 'object_class_count': self.get_object_class_count(), 'messages': self.standard_response_messages})
def get_object_class_count(self):
submission_ids = get_latest_submission_ids_for_fiscal_year(self.fiscal_year)
filters = [Q(object_class_id=OuterRef('pk')), Q(submission_id__in=submission_ids), Q(treasury_account__funding_toptier_agency=self.toptier_agency), Q((((Q(obligations_incurred_by_program_object_class_cpe__gt=0) | Q(obligations_incurred_by_program_object_class_cpe__lt=0)) | Q(gross_outlay_amount_by_program_object_class_cpe__gt=0)) | Q(gross_outlay_amount_by_program_object_class_cpe__lt=0)))]
return ObjectClass.objects.filter(Exists(FinancialAccountsByProgramActivityObjectClass.objects.filter(*filters))).count() |
def compile_clip(pt_mod, batch_size=(1, 8), seqlen=64, dim=768, num_heads=12, depth=12, use_fp16_acc=False, convert_conv_to_gemm=False, act_layer='gelu', constants=True):
mask_seq = 0
causal = True
ait_mod = ait_CLIPTextTransformer(num_hidden_layers=depth, hidden_size=dim, num_attention_heads=num_heads, batch_size=batch_size, seq_len=seqlen, causal=causal, mask_seq=mask_seq, act_layer=act_layer)
ait_mod.name_parameter_tensor()
pt_mod = pt_mod.eval()
params_ait = map_clip_params(pt_mod, batch_size, seqlen, depth)
batch_size = IntVar(values=list(batch_size), name='batch_size')
input_ids_ait = Tensor([batch_size, seqlen], name='input0', dtype='int64', is_input=True)
position_ids_ait = Tensor([batch_size, seqlen], name='input1', dtype='int64', is_input=True)
Y = ait_mod(input_ids=input_ids_ait, position_ids=position_ids_ait)
mark_output(Y)
target = detect_target(use_fp16_acc=use_fp16_acc, convert_conv_to_gemm=convert_conv_to_gemm)
compile_model(Y, target, './tmp', 'CLIPTextModel', constants=(params_ait if constants else None)) |
def create_test_engine_with_file(enforce_fks=True):
(fd, tmpfile) = tempfile.mkstemp('.db', 'forseti-test-')
try:
LOGGER.info('Creating database at %s', tmpfile)
engine = create_engine('sqlite:///{}'.format(tmpfile), sqlite_enforce_fks=enforce_fks, pool_size=5, connect_args={'check_same_thread': False}, poolclass=SingletonThreadPool)
return (engine, tmpfile)
finally:
os.close(fd) |
def video_categories_list(key, part, id=None, regionCode=None, hl=None):
args = locals()
if (sum([bool(p) for p in [id, regionCode]]) != 1):
raise ValueError("make sure you specify exactly one of ['id', 'regionCode']")
base_url = '
return _combine_requests(args, base_url, count=None, max_allowed=None) |
class EvaluatorConnectionInfo():
host: str
port: int
url: str
cert: Optional[Union[(str, bytes)]] = None
token: Optional[str] = None
def dispatch_uri(self) -> str:
return f'{self.url}/dispatch'
def client_uri(self) -> str:
return f'{self.url}/client'
def result_uri(self) -> str:
return f'{self.url}/result' |
class RouteMethodInfo(_Traversable):
__visit_name__ = 'route_method'
def __init__(self, method: str, source_info: str, function_name: str, internal: bool):
self.method = method
self.source_info = source_info
self.function_name = function_name
self.internal = internal
if function_name.startswith('on'):
self.suffix = '_'.join(function_name.split('_')[2:])
else:
self.suffix = '' |
class PaytmPaymentsManager():
def paytm_endpoint(self):
if (get_settings()['paytm_mode'] == 'test'):
url = '
else:
url = '
return url
def generate_checksum(paytm_params):
if (get_settings()['paytm_mode'] == 'test'):
merchant_key = get_settings()['paytm_sandbox_secret']
else:
merchant_key = get_settings()['paytm_live_secret']
return checksum.generate_checksum_by_str(json.dumps(paytm_params['body']), merchant_key)
def hit_paytm_endpoint(url, head, body=None):
paytm_params = {}
paytm_params['body'] = body
paytm_params['head'] = head
post_data = json.dumps(paytm_params)
response = requests.post(url, data=post_data, headers={'Content-type': 'application/json'}).json()
return response |
class pygaze_init(item):
description = u'Initialize and calibrate eye tracker'
def __init__(self, name, experiment, string=None):
item.__init__(self, name, experiment, string)
self.reload_pygaze()
def reset(self):
self.var.tracker_type = u'Simple dummy'
self.var.calibrate = u'yes'
self.var.calbeep = u'yes'
self.var.sacc_vel_thr = 35
self.var.sacc_acc_thr = 9500
self.var._logfile = u'automatic'
self.var.alea_api_key = u'Contact Alea for an API key'
self.var.alea_animated_calibration = u'no'
self.var.eyelink_force_drift_correct = u'yes'
self.var.eyelink_pupil_size_mode = u'area'
self.var.smi_ip = u'127.0.0.1'
self.var.smi_send_port = 4444
self.var.smi_recv_port = 5555
self.var.tobiiglasses_address = u'192.168.71.50'
self.var.tobiiglasses_udpport = 49152
def close(self):
debug.msg(u'Starting PyGaze deinitialisation')
self.clock.sleep(1000)
self.experiment.pygaze_eyetracker.close()
self.experiment.pygaze_eyetracker = None
debug.msg(u'Finished PyGaze deinitialisation')
self.clock.sleep(1000)
def draw_calibration_canvas(self, x, y):
dc_canvas = canvas(self.experiment)
if (self.var.uniform_coordinates == u'yes'):
x -= dc_canvas._xcenter
y -= dc_canvas._ycenter
dc_canvas.fixdot(x, y, style=u'large-open')
if (self.var.calbeep == 'yes'):
self.beep.play()
dc_canvas.show()
def reload_pygaze(self):
from pygaze import settings
settings.osexperiment = self.experiment
settings.DISPTYPE = u'opensesame'
settings.DISPSIZE = self.resolution()
settings.BGC = self.var.background
settings.FGC = self.var.foreground
settings.ALEAKEY = self.var.alea_api_key
if (self.var.calbeep == u'yes'):
settings.EYELINKCALBEEP = True
else:
settings.EYELINKCALBEEP = False
if (self.var.alea_animated_calibration == u'yes'):
settings.ALEAANIMATEDCALIBRATION = True
else:
settings.ALEAANIMATEDCALIBRATION = False
def run(self):
if hasattr(self.experiment, u'pygaze_eyetracker'):
raise osexception(u'You should have only one instance of `pygaze_init` in your experiment')
self.set_item_onset()
kwdict = {}
if (self.var.tracker_type == u'Simple dummy'):
tracker_type = u'dumbdummy'
elif (self.var.tracker_type == u'Advanced dummy (mouse simulation)'):
tracker_type = u'dummy'
elif (self.var.tracker_type == u'EyeLink'):
tracker_type = u'eyelink'
kwdict[u'eyelink_force_drift_correct'] = (self.var.eyelink_force_drift_correct == u'yes')
kwdict[u'pupil_size_mode'] = self.var.eyelink_pupil_size_mode
elif (self.var.tracker_type == u'EyeLogic'):
tracker_type = u'eyelogic'
elif (self.var.tracker_type == u'SMI'):
tracker_type = u'smi'
kwdict[u'ip'] = self.var.smi_ip
kwdict[u'sendport'] = self.var.smi_send_port
kwdict[u'receiveport'] = self.var.smi_recv_port
elif (self.var.tracker_type == u'EyeTribe'):
tracker_type = u'eyetribe'
elif (self.var.tracker_type == u'OpenGaze'):
tracker_type = u'opengaze'
elif (self.var.tracker_type == u'Alea'):
tracker_type = u'alea'
kwdict[u'alea_key'] = self.var.alea_api_key
kwdict[u'animated_calibration'] = (self.var.alea_animated_calibration == u'yes')
elif (self.var.tracker_type == u'Tobii'):
tracker_type = u'tobii'
elif (self.var.tracker_type == u'Tobii-legacy'):
tracker_type = u'tobii-legacy'
elif (self.var.tracker_type == u'Tobii Pro Glasses 2'):
tracker_type = u'tobiiglasses'
kwdict[u'address'] = self.var.tobiiglasses_address
kwdict[u'udpport'] = self.var.tobiiglasses_udpport
else:
raise osexception((u'Unknown tracker type: %s' % self.var.tracker_type))
if (self.var._logfile == u'automatic'):
logfile = os.path.splitext(self.var.logfile)[0]
if (tracker_type == u'eyelink'):
basename = os.path.basename(logfile)
dirname = os.path.dirname(logfile)
if ((len(basename) > 8) and basename.startswith(u'subject-')):
basename = (u'sub_' + basename[8:])
logfile = os.path.join(dirname, basename)
print((u'Attention: EyeLink logfile renamed to %s.edf' % logfile))
elif (basename == u'defaultlog'):
logfile = u'default'
print((u'Attention: EyeLink logfile renamed to %s.edf' % logfile))
logfile = (logfile + u'.edf')
kwdict[u'data_file'] = logfile
else:
logfile = self.var._logfile
self.experiment.data_files.append(logfile)
if (tracker_type == u'eyelink'):
event_detection = u'native'
else:
event_detection = u'pygaze'
self.experiment.pygaze_display = Display(u'opensesame')
self.experiment.pygaze_eyetracker = EyeTracker(self.experiment.pygaze_display, trackertype=tracker_type, eventdetection=event_detection, saccade_velocity_threshold=self.var.sacc_vel_thr, saccade_acceleration_threshold=self.var.sacc_acc_thr, logfile=logfile, **kwdict)
if (self.var.calbeep == u'yes'):
from openexp.synth import synth
self.beep = synth(self.experiment)
self.experiment.pygaze_eyetracker.set_draw_calibration_target_func(self.draw_calibration_canvas)
self.experiment.pygaze_eyetracker.set_draw_drift_correction_target_func(self.draw_calibration_canvas)
self.experiment.cleanup_functions.append(self.close)
if (self.var.calibrate == u'yes'):
self.experiment.pygaze_eyetracker.calibrate()
self.python_workspace[u'eyetracker'] = self.experiment.pygaze_eyetracker |
def insert_activity_data(df_activity_summary, df_activity_samples, days_back=7):
start = app.session.query(func.max(ouraActivitySummary.summary_date))[0][0]
start = ('1999-01-01' if (start is None) else datetime.strftime((start - timedelta(days=days_back)), '%Y-%m-%d'))
try:
app.server.logger.debug('Deleting >= {} records from oura_activity_summary'.format(start))
app.session.execute(delete(ouraActivitySummary).where((ouraActivitySummary.summary_date >= start)))
app.server.logger.debug('Deleting >= {} records from oura_activity_samples'.format(start))
app.session.execute(delete(ouraActivitySamples).where((ouraActivitySamples.timestamp_local >= start)))
app.session.commit()
except BaseException as e:
app.server.logger.error(e)
app.session.remove()
app.server.logger.debug('Inserting oura activity summary')
try:
df_activity_summary.to_sql('oura_activity_summary', engine, if_exists='append', index=True)
except BaseException as e:
app.server.logger.error(e)
app.server.logger.debug('Inserting oura activity samples')
try:
df_activity_samples.to_sql('oura_activity_samples', engine, if_exists='append', index=True)
except BaseException as e:
app.server.logger.error(e) |
class TestSamToSoap(unittest.TestCase):
def test_sam_to_soap(self):
sam = SAMLine("SRR189243_1-SRR189243.3751\t81\tgi||gb|AE017196.1|\t60083\t30\t76M\t*\t0\t0\tTATAGTTATATAAAAGACCTGAGTAGTACGTTTTATATAATCTGATTTTATGGCTATACTTTTTTTGACATGTAGC\tAAAA7AAAA2AA7AAAAAAA1,:0/57:8855)))),''(03388*',''))))#\tNM:i:1\tMD:Z:75T0")
self.assertEqual(str(sam_to_soap(sam)), "SRR189243_1-SRR189243.3751\tTATAGTTATATAAAAGACCTGAGTAGTACGTTTTATATAATCTGATTTTATGGCTATACTTTTTTTGACATGTAGC\tAAAA7AAAA2AA7AAAAAAA1,:0/57:8855)))),''(03388*',''))))#\t1\ta\t76\t-\tgi||gb|AE017196.1|\t60083\t1\tT->75C2\t76M\t75T") |
def _get_original_init(original_class: type, instance: object, owner: type) -> Any:
target_class_id = _target_class_id_by_original_class_id[id(original_class)]
if ('__init__' in _restore_dict[target_class_id]):
return _restore_dict[target_class_id]['__init__'].__get__(instance, owner)
else:
return original_class.__init__.__get__(instance, owner) |
(scope='function')
def hydra_task_runner() -> Callable[([Optional[str], Optional[str], Optional[str], Optional[str], Optional[List[str]], bool], TaskTestFunction)]:
def _(calling_file: Optional[str], calling_module: Optional[str], config_path: Optional[str], config_name: Optional[str], overrides: Optional[List[str]]=None, configure_logging: bool=False) -> TaskTestFunction:
task = TaskTestFunction()
task.overrides = (overrides or [])
task.calling_file = calling_file
task.config_name = config_name
task.calling_module = calling_module
task.config_path = config_path
task.configure_logging = configure_logging
return task
return _ |
class RDepConstraints(Digraph.Node):
depends_on = ['RDepDependsOn', 'RDepSolvedBy']
def __init__(self, config):
Digraph.Node.__init__(self, 'RDepConstraints')
self.config = config
def get_type_set():
return set([InputModuleTypes.reqdeps])
def rewrite(reqset):
tracer.debug('Called.')
reqset.resolve_ce3()
tracer.debug('Finished.')
return True |
def test_normalize_availability_on_func_2():
func2 = availability(C2)(level_param_step_no_default)
func2 = normalize('param', ['a', 'b'])(func2)
assert (func2(level=1000, param='a', step=24) == (1000, 'a', 24))
assert (func2(level='1000', param='a', step=24) == (1000, 'a', 24))
with pytest.raises(ValueError, match='invalid .*'):
func2(level='850', param='a', step='24')
with pytest.raises(ValueError):
func2(level='1032100', param='a', step='24') |
class S7LPDDR4PHY(DoubleRateLPDDR4PHY, S7Common):
def __init__(self, pads, *, iodelay_clk_freq, with_odelay, **kwargs):
self.iodelay_clk_freq = iodelay_clk_freq
super().__init__(pads, ser_latency=Latency(sys2x=1), des_latency=Latency(sys2x=2), phytype=self.__class__.__name__, serdes_reset_cnt=(- 1), **kwargs)
self.settings.delays = 32
self.settings.write_leveling = True
self.settings.write_latency_calibration = True
self.settings.write_dq_dqs_training = True
self.settings.read_leveling = True
assert (iodelay_clk_freq in [.0, .0, .0])
iodelay_tap_average = (1 / ((2 * 32) * iodelay_clk_freq))
half_sys8x_taps = math.floor((self.tck / (4 * iodelay_tap_average)))
assert (half_sys8x_taps < 32), 'Exceeded ODELAYE2 max value: {} >= 32'.format(half_sys8x_taps)
self._half_sys8x_taps = CSRStorage(5, reset=half_sys8x_taps)
self._rdly_dq_rst = CSR()
self._rdly_dq_inc = CSR()
self._rdly_dqs_rst = CSR()
self._rdly_dqs_inc = CSR()
if with_odelay:
self._cdly_rst = CSR()
self._cdly_inc = CSR()
self._wdly_dq_rst = CSR()
self._wdly_dq_inc = CSR()
self._wdly_dqs_rst = CSR()
self._wdly_dqs_inc = CSR()
def cdc(i):
o = Signal()
psync = PulseSynchronizer('sys', 'sys2x')
self.submodules += psync
self.comb += [psync.i.eq(i), o.eq(psync.o)]
return o
rdly_dq_rst = cdc(self._rdly_dq_rst.re)
rdly_dq_inc = cdc(self._rdly_dq_inc.re)
rdly_dqs_rst = cdc(self._rdly_dqs_rst.re)
rdly_dqs_inc = cdc(self._rdly_dqs_inc.re)
if with_odelay:
cdly_rst = (cdc(self._cdly_rst.re) | self._rst.storage)
cdly_inc = cdc(self._cdly_inc.re)
wdly_dq_rst = cdc(self._wdly_dq_rst.re)
wdly_dq_inc = cdc(self._wdly_dq_inc.re)
wdly_dqs_rst = cdc(self._wdly_dqs_rst.re)
wdly_dqs_inc = cdc(self._wdly_dqs_inc.re)
def oe_delay_data(oe):
oe_d = Signal()
delay = TappedDelayLine(oe, 3)
self.submodules += ClockDomainsRenamer('sys2x')(delay)
self.comb += oe_d.eq(reduce(or_, delay.taps))
return oe_d
def oe_delay_dqs(oe):
delay = TappedDelayLine(oe, 2)
self.submodules += ClockDomainsRenamer('sys2x')(delay)
return delay.output
clk_dly = Signal()
clk_ser = Signal()
self.oserdese2_ddr(din=(~ self.out.clk), dout=(clk_ser if with_odelay else clk_dly), clk='sys8x')
if with_odelay:
self.odelaye2(din=clk_ser, dout=clk_dly, rst=cdly_rst, inc=cdly_inc)
self.obufds(din=clk_dly, dout=self.pads.clk_p, dout_b=self.pads.clk_n)
for cmd in ['cke', 'odt', 'reset_n']:
cmd_i = getattr(self.out, cmd)
cmd_o = getattr(self.pads, cmd)
cmd_ser = Signal()
self.oserdese2_sdr(din=cmd_i, dout=(cmd_ser if with_odelay else cmd_o), clk='sys8x')
if with_odelay:
self.odelaye2(din=cmd_ser, dout=cmd_o, rst=cdly_rst, inc=cdly_inc)
cs_ser = Signal()
if with_odelay:
self.oserdese2_sdr(din=self.out.cs, dout=cs_ser, clk='sys8x')
self.odelaye2(din=cs_ser, dout=self.pads.cs, rst=cdly_rst, inc=cdly_inc)
else:
self.oserdese2_sdr(din=self.out.cs, dout=self.pads.cs, clk='sys8x')
for bit in range(6):
ca_ser = Signal()
if with_odelay:
self.oserdese2_sdr(din=self.out.ca[bit], dout=ca_ser, clk='sys8x')
self.odelaye2(din=ca_ser, dout=self.pads.ca[bit], rst=cdly_rst, inc=cdly_inc)
else:
self.oserdese2_sdr(din=self.out.ca[bit], dout=self.pads.ca[bit], clk='sys8x')
for byte in range((self.databits // 8)):
dqs_t = Signal()
dqs_ser = Signal()
dqs_dly = Signal()
dqs_i = Signal()
dqs_i_dly = Signal()
dqs_din = self.out.dqs_o[byte]
if (not with_odelay):
dqs_din_d = Signal.like(dqs_din)
self.sync.sys2x += dqs_din_d.eq(dqs_din)
dqs_din = dqs_din_d
self.oserdese2_ddr(din=dqs_din, **(dict(dout_fb=dqs_ser) if with_odelay else dict(dout=dqs_dly)), tin=(~ oe_delay_dqs(self.out.dqs_oe)), tout=dqs_t, clk=('sys8x' if with_odelay else 'sys8x_90'))
if with_odelay:
self.odelaye2(din=dqs_ser, dout=dqs_dly, rst=self.get_rst(byte, wdly_dqs_rst), inc=self.get_inc(byte, wdly_dqs_inc), init=half_sys8x_taps)
self.iobufds(din=dqs_dly, dout=dqs_i, tin=dqs_t, dinout=self.pads.dqs_p[byte], dinout_b=self.pads.dqs_n[byte])
self.idelaye2(din=dqs_i, dout=dqs_i_dly, rst=self.get_rst(byte, rdly_dqs_rst), inc=self.get_inc(byte, rdly_dqs_inc))
self.iserdese2_ddr(din=dqs_i_dly, dout=self.out.dqs_i[byte], clk='sys8x')
for byte in range((self.databits // 8)):
dmi_t = Signal()
dmi_ser = Signal()
dmi_dly = Signal()
self.oserdese2_ddr(din=self.out.dmi_o[byte], **(dict(dout_fb=dmi_ser) if with_odelay else dict(dout=dmi_dly)), tin=(~ oe_delay_data(self.out.dmi_oe)), tout=dmi_t, clk='sys8x')
if with_odelay:
self.odelaye2(din=dmi_ser, dout=dmi_dly, rst=self.get_rst(byte, wdly_dq_rst), inc=self.get_inc(byte, wdly_dq_inc))
self.iobuf(din=dmi_dly, dout=Signal(), tin=dmi_t, dinout=self.pads.dmi[byte])
for bit in range(self.databits):
dq_t = Signal()
dq_ser = Signal()
dq_dly = Signal()
dq_i = Signal()
dq_i_dly = Signal()
self.oserdese2_ddr(din=self.out.dq_o[bit], **(dict(dout_fb=dq_ser) if with_odelay else dict(dout=dq_dly)), tin=(~ oe_delay_data(self.out.dmi_oe)), tout=dq_t, clk='sys8x')
if with_odelay:
self.odelaye2(din=dq_ser, dout=dq_dly, rst=self.get_rst((bit // 8), wdly_dq_rst), inc=self.get_inc((bit // 8), wdly_dq_inc))
self.iobuf(din=dq_dly, dout=dq_i, dinout=self.pads.dq[bit], tin=dq_t)
self.idelaye2(din=dq_i, dout=dq_i_dly, rst=self.get_rst((bit // 8), rdly_dq_rst), inc=self.get_inc((bit // 8), rdly_dq_inc))
self.iserdese2_ddr(din=dq_i_dly, dout=self.out.dq_i[bit], clk='sys8x') |
.parametrize('literal_value_pair', _parameterizers.LIST_OF_SCALAR_LITERALS_AND_PYTHON_VALUE)
def test_execution_spec(literal_value_pair):
(literal_value, _) = literal_value_pair
obj = _execution.ExecutionSpec(_identifier.Identifier(_identifier.ResourceType.LAUNCH_PLAN, 'project', 'domain', 'name', 'version'), _execution.ExecutionMetadata(_execution.ExecutionMetadata.ExecutionMode.MANUAL, 'tester', 1), notifications=_execution.NotificationList([_common_models.Notification([_core_exec.WorkflowExecutionPhase.ABORTED], pager_duty=_common_models.PagerDutyNotification(recipients_email=['a', 'b', 'c']))]), raw_output_data_config=_common_models.RawOutputDataConfig(output_location_prefix='raw_output'), max_parallelism=100)
assert (obj.launch_plan.resource_type == _identifier.ResourceType.LAUNCH_PLAN)
assert (obj.launch_plan.domain == 'domain')
assert (obj.launch_plan.project == 'project')
assert (obj.launch_plan.name == 'name')
assert (obj.launch_plan.version == 'version')
assert (obj.metadata.mode == _execution.ExecutionMetadata.ExecutionMode.MANUAL)
assert (obj.metadata.nesting == 1)
assert (obj.metadata.principal == 'tester')
assert (obj.notifications.notifications[0].phases == [_core_exec.WorkflowExecutionPhase.ABORTED])
assert (obj.notifications.notifications[0].pager_duty.recipients_email == ['a', 'b', 'c'])
assert (obj.disable_all is None)
assert (obj.max_parallelism == 100)
assert (obj.raw_output_data_config.output_location_prefix == 'raw_output')
obj2 = _execution.ExecutionSpec.from_flyte_idl(obj.to_flyte_idl())
assert (obj == obj2)
assert (obj2.launch_plan.resource_type == _identifier.ResourceType.LAUNCH_PLAN)
assert (obj2.launch_plan.domain == 'domain')
assert (obj2.launch_plan.project == 'project')
assert (obj2.launch_plan.name == 'name')
assert (obj2.launch_plan.version == 'version')
assert (obj2.metadata.mode == _execution.ExecutionMetadata.ExecutionMode.MANUAL)
assert (obj2.metadata.nesting == 1)
assert (obj2.metadata.principal == 'tester')
assert (obj2.notifications.notifications[0].phases == [_core_exec.WorkflowExecutionPhase.ABORTED])
assert (obj2.notifications.notifications[0].pager_duty.recipients_email == ['a', 'b', 'c'])
assert (obj2.disable_all is None)
assert (obj2.max_parallelism == 100)
assert (obj2.raw_output_data_config.output_location_prefix == 'raw_output')
obj = _execution.ExecutionSpec(_identifier.Identifier(_identifier.ResourceType.LAUNCH_PLAN, 'project', 'domain', 'name', 'version'), _execution.ExecutionMetadata(_execution.ExecutionMetadata.ExecutionMode.MANUAL, 'tester', 1), disable_all=True)
assert (obj.launch_plan.resource_type == _identifier.ResourceType.LAUNCH_PLAN)
assert (obj.launch_plan.domain == 'domain')
assert (obj.launch_plan.project == 'project')
assert (obj.launch_plan.name == 'name')
assert (obj.launch_plan.version == 'version')
assert (obj.metadata.mode == _execution.ExecutionMetadata.ExecutionMode.MANUAL)
assert (obj.metadata.nesting == 1)
assert (obj.metadata.principal == 'tester')
assert (obj.notifications is None)
assert (obj.disable_all is True)
obj2 = _execution.ExecutionSpec.from_flyte_idl(obj.to_flyte_idl())
assert (obj == obj2)
assert (obj2.launch_plan.resource_type == _identifier.ResourceType.LAUNCH_PLAN)
assert (obj2.launch_plan.domain == 'domain')
assert (obj2.launch_plan.project == 'project')
assert (obj2.launch_plan.name == 'name')
assert (obj2.launch_plan.version == 'version')
assert (obj2.metadata.mode == _execution.ExecutionMetadata.ExecutionMode.MANUAL)
assert (obj2.metadata.nesting == 1)
assert (obj2.metadata.principal == 'tester')
assert (obj2.notifications is None)
assert (obj2.disable_all is True) |
def generate_config_docs(config: FidesConfig, outfile_path: str='.fides/fides.toml') -> None:
schema_properties: Dict[(str, Dict)] = config.schema()['properties']
object_fields = {settings_name: settings_info for (settings_name, settings_info) in schema_properties.items() if (settings_info.get('type') == 'object')}
object_docs = [convert_object_to_toml_docs(object_name, object_info) for (object_name, object_info) in object_fields.items()]
settings: Dict[(str, BaseSettings)] = get_nested_settings(config)
ordered_settings: Dict[(str, BaseSettings)] = {name: settings[name] for name in sorted(set(settings.keys()))}
nested_settings_docs: List[str] = [convert_settings_to_toml_docs(settings_name, settings_schema) for (settings_name, settings_schema) in ordered_settings.items()]
docs: str = (build_config_header() + '\n'.join((nested_settings_docs + object_docs)))
validate_generated_config(docs)
with open(outfile_path, 'w', encoding='utf-8') as output_file:
output_file.write(docs)
print(f'Exported configuration file to: {outfile_path}') |
class SpiritingAwayAction(UserAction):
def apply_action(self):
tgt = self.target
src = self.source
g = self.game
catnames = ('cards', 'showncards', 'equips', 'fatetell')
cats = [getattr(tgt, i) for i in catnames]
card = g.user_input([src], ChoosePeerCardInputlet(self, tgt, catnames))
card = (card or random_choose_card(g, cats))
if (not card):
return False
self.card = card
src.reveal(card)
src.tags['spirit_away_tag'] += 1
cl = tgt._.get('yukari_dimension')
if (cl is None):
cl = CardList(tgt, 'yukari_dimension')
tgt._['yukari_dimension'] = cl
tgt.lists.append(cl)
migrate_cards([card], cl)
return True
def is_valid(self):
tgt = self.target
catnames = ['cards', 'showncards', 'equips', 'fatetell']
if (not any((getattr(tgt, i) for i in catnames))):
return False
return (self.source.tags['spirit_away_tag'] < 2) |
class PluginTestCase(unittest.TestCase):
def setUp(self):
ets_config_patcher = ETSConfigPatcher()
ets_config_patcher.start()
self.addCleanup(ets_config_patcher.stop)
def test_id_policy(self):
p = Plugin()
self.assertEqual('envisage.plugin.Plugin', p.id)
p = Plugin(id='wilma')
self.assertEqual('wilma', p.id)
p = Plugin(name='fred', id='wilma')
self.assertEqual('wilma', p.id)
self.assertEqual('fred', p.name)
def test_name_policy(self):
p = Plugin()
self.assertEqual('Plugin', p.name)
p = Plugin(name='wilma')
self.assertEqual('wilma', p.name)
class ThisIsMyPlugin(Plugin):
pass
p = ThisIsMyPlugin()
self.assertEqual('This Is My Plugin', p.name)
def test_plugin_activator(self):
(IPluginActivator)
class NullPluginActivator(HasTraits):
def start_plugin(self, plugin):
self.started = plugin
def stop_plugin(self, plugin):
self.stopped = plugin
class PluginA(Plugin):
id = 'A'
class PluginB(Plugin):
id = 'B'
plugin_activator = NullPluginActivator()
a = PluginA(activator=plugin_activator)
b = PluginB()
application = SimpleApplication(plugins=[a, b])
application.start()
self.assertEqual(a, plugin_activator.started)
application.stop()
self.assertEqual(a, plugin_activator.stopped)
def test_service(self):
class Foo(HasTraits):
pass
class Bar(HasTraits):
pass
class Baz(HasTraits):
pass
class PluginA(Plugin):
id = 'A'
foo = Instance(Foo, (), service=True)
bar = Instance(Bar, (), service=True)
baz = Instance(Baz, (), service=True)
a = PluginA()
application = SimpleApplication(plugins=[a])
application.start()
self.assertNotEqual(None, application.get_service(Foo))
self.assertEqual(a.foo, application.get_service(Foo))
self.assertNotEqual(None, application.get_service(Bar))
self.assertEqual(a.bar, application.get_service(Bar))
self.assertNotEqual(None, application.get_service(Baz))
self.assertEqual(a.baz, application.get_service(Baz))
application.stop()
self.assertEqual(None, application.get_service(Foo))
self.assertEqual(None, application.get_service(Bar))
self.assertEqual(None, application.get_service(Baz))
def test_service_protocol(self):
class IFoo(Interface):
pass
class IBar(Interface):
pass
(IFoo, IBar)
class Foo(HasTraits):
pass
class PluginA(Plugin):
id = 'A'
foo = Instance(Foo, (), service=True, service_protocol=IBar)
a = PluginA()
application = SimpleApplication(plugins=[a])
application.start()
self.assertNotEqual(None, application.get_service(IBar))
self.assertEqual(a.foo, application.get_service(IBar))
application.stop()
self.assertEqual(None, application.get_service(IBar))
def test_multiple_trait_contributions(self):
class PluginA(Plugin):
id = 'A'
x = ExtensionPoint(List, id='x')
class PluginB(Plugin):
id = 'B'
x = List([1, 2, 3], contributes_to='x')
y = List([4, 5, 6], contributes_to='x')
a = PluginA()
b = PluginB()
application = SimpleApplication(plugins=[a, b])
with self.assertRaises(ValueError):
application.get_extensions('x')
def test_exception_in_trait_contribution(self):
class PluginA(Plugin):
id = 'A'
x = ExtensionPoint(List, id='x')
class PluginB(Plugin):
id = 'B'
x = List(contributes_to='x')
def _x_default(self):
raise (1 / 0)
a = PluginA()
b = PluginB()
application = SimpleApplication(plugins=[a, b])
with self.assertRaises(ZeroDivisionError):
application.get_extensions('x')
def test_contributes_to(self):
class PluginA(Plugin):
id = 'A'
x = ExtensionPoint(List, id='x')
class PluginB(Plugin):
id = 'B'
x = List([1, 2, 3], contributes_to='x')
a = PluginA()
b = PluginB()
application = SimpleApplication(plugins=[a, b])
self.assertEqual([1, 2, 3], application.get_extensions('x'))
def test_add_plugins_to_empty_application(self):
class PluginA(Plugin):
id = 'A'
x = ExtensionPoint(List(Int), id='x')
def _x_items_changed(self, event):
self.added = event.added
self.removed = event.removed
class PluginB(Plugin):
id = 'B'
x = List(Int, [1, 2, 3], contributes_to='x')
class PluginC(Plugin):
id = 'C'
x = List(Int, [4, 5, 6], contributes_to='x')
a = PluginA()
b = PluginB()
c = PluginC()
application = SimpleApplication()
application.start()
application.add_plugin(a)
application.start_plugin(a)
self.assertEqual([], a.x)
application.add_plugin(b)
self.assertEqual([1, 2, 3], a.x)
self.assertEqual([1, 2, 3], a.added)
application.add_plugin(c)
self.assertEqual([1, 2, 3, 4, 5, 6], a.x)
self.assertEqual([4, 5, 6], a.added)
application.remove_plugin(b)
self.assertEqual([4, 5, 6], a.x)
self.assertEqual([1, 2, 3], a.removed)
application.remove_plugin(c)
self.assertEqual([], a.x)
self.assertEqual([4, 5, 6], a.removed)
def test_home(self):
class PluginA(Plugin):
id = 'A'
class PluginB(Plugin):
id = 'B'
a = PluginA()
b = PluginB()
application = SimpleApplication(plugins=[a, b])
self.assertEqual(join(application.home, 'plugins', a.id), a.home)
self.assertEqual(join(application.home, 'plugins', b.id), b.home)
self.assertTrue(exists(a.home))
self.assertTrue(exists(b.home))
a = PluginA()
b = PluginB()
application = SimpleApplication(plugins=[a, b])
self.assertEqual(join(application.home, 'plugins', a.id), a.home)
self.assertEqual(join(application.home, 'plugins', b.id), b.home)
self.assertTrue(exists(a.home))
self.assertTrue(exists(b.home))
def test_no_recursion(self):
class PluginA(Plugin):
id = 'A'
x = ExtensionPoint(List, id='bob')
application = Application(plugins=[PluginA()])
application.get_extensions('bob')
def test_plugin_str_representation(self):
plugin_repr = 'TestPlugin(id={!r}, name={!r})'
plugin = TestPlugin(id='Fred', name='Wilma')
self.assertEqual(str(plugin), plugin_repr.format('Fred', 'Wilma'))
self.assertEqual(repr(plugin), plugin_repr.format('Fred', 'Wilma')) |
def test_error_when_regression_is_true_and_target_is_binary(df_enc):
encoder = DecisionTreeEncoder(regression=True)
with pytest.raises(ValueError) as record:
encoder.fit(df_enc[['var_A', 'var_B']], df_enc['target'])
msg = 'Trying to fit a regression to a binary target is not allowed by this transformer. Check the target values or set regression to False.'
assert (str(record.value) == msg) |
class PrepareMixin():
def prepare(cls, database: DB, pkgen: PrimaryKeyGeneratorBase, items: Iterable[PrepareMixin]) -> Iterator[PrepareMixin]:
for item in cls.merge(database, items):
if hasattr(item, 'id'):
item.id.resolve(id=pkgen.get(cls), is_new=True)
(yield item)
def merge(cls, database: DB, items: Iterable[PrepareMixin]) -> Iterable[PrepareMixin]:
return items
def _merge_by_keys(cls, database: DB, items: Iterable[PrepareMixin], *key_attributes: Column) -> Iterator[PrepareMixin]:
items = list(items)
def key_for_item(item: PrepareMixin) -> Tuple[...]:
return tuple((getattr(item, attr.key) for attr in key_attributes))
keys = {key_for_item(i) for i in items}
existing_ids = {}
cls_attrs = [getattr(cls, attr.key) for attr in key_attributes]
for fetch_keys in split_every(BATCH_SIZE, keys):
with database.make_session() as session:
existing_items = session.query(cls.id, *cls_attrs).filter(tuple_(*cls_attrs).in_(fetch_keys)).all()
for existing_item in existing_items:
existing_ids[key_for_item(existing_item)] = existing_item.id
new_items = {}
for i in items:
key = key_for_item(i)
if (key in existing_ids):
i.id.resolve(existing_ids[key], is_new=False)
elif (key in new_items):
i.id.resolve(new_items[key].id, is_new=False)
else:
new_items[key] = i
(yield i)
def _merge_assocs(cls, database: DB, items, id1, id2):
new_items = {}
for i in items:
r1 = getattr(i, id1.key)
r2 = getattr(i, id2.key)
key = (r1.resolved(), r2.resolved())
if (key not in new_items):
new_items[key] = i
(yield i) |
def test_more_entries2_extend_phi_function():
(node, task) = construct_graph(5)
PhiFunctionFixer().run(task)
assert (node[0].instructions[0].origin_block == {node[1]: expressions.Variable('v', Integer.int32_t(), 1), node[4]: expressions.Variable('v', Integer.int32_t(), 1), node[2]: expressions.Variable('v', Integer.int32_t(), 2)}) |
def test_fake_receive_messages():
q = get_sqs_queue()
q.send_message('1235', {'attr1': 'val1', 'attr2': 111})
q.send_message('2222')
q.send_message('3333')
q.send_message('4444', {'attr1': 'v1', 'attr2': 'v2'})
msgs = q.receive_messages(10, MaxNumberOfMessages=10)
assert (len(msgs) == 4)
assert (msgs[0].body == '1235')
assert (msgs[0].message_attributes.get('attr2') == 111)
assert (msgs[3].body == '4444')
assert (msgs[3].message_attributes.get('attr2') == 'v2')
q.purge()
q.send_message('1235', {'attr1': 'val1', 'attr2': 111})
q.send_message('2222')
q.send_message('3333')
q.send_message('4444', {'attr1': 'v1', 'attr2': 'v2'})
msgs = q.receive_messages(10, MaxNumberOfMessages=2)
assert (len(msgs) == 2)
assert (msgs[0].body == '1235')
assert (msgs[0].message_attributes.get('attr2') == 111)
assert (msgs[1].body == '2222') |
def test_slate_hybridization_nested_schur():
(a, L, W) = setup_poisson()
w = Function(W)
params = {'mat_type': 'matfree', 'ksp_type': 'preonly', 'pc_type': 'python', 'pc_python_type': 'firedrake.HybridizationPC', 'hybridization': {'ksp_type': 'preonly', 'pc_type': 'lu', 'localsolve': {'ksp_type': 'preonly', 'pc_type': 'fieldsplit', 'pc_fieldsplit_type': 'schur'}}}
eq = (a == L)
problem = LinearVariationalProblem(eq.lhs, eq.rhs, w)
solver = LinearVariationalSolver(problem, solver_parameters=params)
solver.solve()
expected = {'nested': True, 'preonly_A00': False, 'jacobi_A00': False, 'schur_approx': False, 'preonly_Shat': False, 'jacobi_Shat': False}
builder = solver.snes.ksp.pc.getPythonContext().getSchurComplementBuilder()
assert options_check(builder, expected), 'Some solver options have not ended up in the PC as wanted.'
(sigma_h, u_h) = w.subfunctions
w2 = Function(W)
solve((a == L), w2, solver_parameters={'ksp_type': 'preonly', 'pc_type': 'python', 'mat_type': 'matfree', 'pc_python_type': 'firedrake.HybridizationPC', 'hybridization': {'ksp_type': 'preonly', 'pc_type': 'lu'}})
(nh_sigma, nh_u) = w2.subfunctions
sigma_err = errornorm(sigma_h, nh_sigma)
u_err = errornorm(u_h, nh_u)
assert (sigma_err < 1e-11)
assert (u_err < 1e-11) |
class MemoryAssistant(object):
def assertMemoryUsage(self, process, usage, slack=0, msg=None):
current_usage = self._memory_usage(process)
hard_limit = (usage * (1 + slack))
if (hard_limit < current_usage):
if (msg is None):
difference = ((current_usage - usage) / usage)
msg = 'Memory leak of {:.2%}'.format(difference)
raise AssertionError(msg)
def assertReturnsMemory(self, function, args=None, iterations=100, slack=0.0, msg=None):
try:
import psutil
except ImportError:
msg = 'Please install psutil to check memory usage'
raise ImportError(msg)
process = psutil.Process(os.getpid())
def test_function():
if (args is None):
function()
else:
function(*args)
gc.collect()
baseline = self._memory_usage(process)
samples_msg = 'Samples : {}'
mem_usage_msg = 'Memory growth (MB): {:5.1f} to {:5.1f}'
mem_leak_msg = 'Memory leak (%) : {:5.1f}'
try:
print('Profiling', end=' ')
sys.stdout.flush()
for index in range(iterations):
test_function()
print('.', end=' ')
sys.stdout.flush()
gc.collect()
self.assertMemoryUsage(process, baseline, slack=slack)
final = self._memory_usage(process)
leak = ((final - baseline) / baseline)
print()
print(samples_msg.format((index + 1)))
print(mem_usage_msg.format(baseline, final))
print(mem_leak_msg.format((leak * 100.0), (index + 1)))
except AssertionError:
final = self._memory_usage(process)
leak = ((final - baseline) / baseline)
if (msg is None):
msg = 'Memory Leak!!!\n'
msg += samples_msg.format((index + 1))
msg += '\n'
msg += mem_usage_msg.format(baseline, final)
msg += '\n'
msg += mem_leak_msg.format((leak * 100.0), (index + 1))
raise AssertionError(msg)
else:
raise AssertionError(msg)
def _memory_usage(self, process):
return (float(process.get_memory_info().rss) / (1024 ** 2)) |
def main(argv):
fonts = {}
for line in fileinput.input():
f = line.strip().split(' ')
if (not f):
continue
k = f[0]
if (k == 'FontName'):
fontname = f[1]
props = {'FontName': fontname, 'Flags': 0}
chars = {}
fonts[fontname] = (props, chars)
elif (k == 'C'):
cid = int(f[1])
if ((0 <= cid) and (cid <= 255)):
width = int(f[4])
chars[cid] = width
elif (k in ('CapHeight', 'XHeight', 'ItalicAngle', 'Ascender', 'Descender')):
k = {'Ascender': 'Ascent', 'Descender': 'Descent'}.get(k, k)
props[k] = float(f[1])
elif (k in ('FontName', 'FamilyName', 'Weight')):
k = {'FamilyName': 'FontFamily', 'Weight': 'FontWeight'}.get(k, k)
props[k] = f[1]
elif (k == 'IsFixedPitch'):
if (f[1].lower() == 'true'):
props['Flags'] = 64
elif (k == 'FontBBox'):
props[k] = tuple(map(float, f[1:5]))
print('# -*- python -*-')
print('FONT_METRICS = {')
for (fontname, (props, chars)) in fonts.iteritems():
print((' %r: %r,' % (fontname, (props, chars))))
print('}')
return 0 |
class TestResourceMixinCreation():
(autouse=True)
def patch_ patch_
pass
def setup_method(self):
self.base_url = '
self.api_key = 'super_secret_api_key'
self.user_agent = 'fintoc-python/test'
self.params = {'first_param': 'first_value', 'second_param': 'second_value'}
self.client = Client(self.base_url, self.api_key, self.user_agent, params=self.params)
self.path = '/resources'
self.handlers = {'update': (lambda object_, identifier: (print('Calling update...') or object_)), 'delete': (lambda identifier: (print('Calling delete...') or identifier))}
def test_empty_mock_resource(self):
methods = []
data = {'id': 'id0', 'identifier': 'identifier0', 'resources': [{'id': 'id1', 'identifier': 'identifier1'}, {'id': 'id2', 'identifier': 'identifier2'}], 'resource': {'id': 'id3', 'identifier': 'identifier3'}}
resource = EmptyMockResource(self.client, self.handlers, methods, self.path, **data)
assert isinstance(resource, ResourceMixin)
assert isinstance(resource.resource, GenericFintocResource)
assert (resource.resource.id == data['resource']['id'])
assert isinstance(resource.resources, list)
for sub_resource in resource.resources:
assert isinstance(sub_resource, GenericFintocResource)
def test_complex_mock_resource(self):
methods = []
data = {'id': 'id0', 'identifier': 'identifier0', 'resources': [{'id': 'id1', 'identifier': 'identifier1'}, {'id': 'id2', 'identifier': 'identifier2'}], 'resource': {'id': 'id3', 'identifier': 'identifier3'}}
resource = ComplexMockResource(self.client, self.handlers, methods, self.path, **data)
assert isinstance(resource, ResourceMixin)
assert isinstance(resource.resource, Link)
assert (resource.resource.id == data['resource']['id'])
assert isinstance(resource.resources, list)
for sub_resource in resource.resources:
assert isinstance(sub_resource, GenericFintocResource)
def test_update_delete_methods_access(self):
methods = ['delete']
data = {'id': 'id0', 'identifier': 'identifier0', 'resources': [{'id': 'id1', 'identifier': 'identifier1'}, {'id': 'id2', 'identifier': 'identifier2'}], 'resource': {'id': 'id3', 'identifier': 'identifier3'}}
resource = EmptyMockResource(self.client, self.handlers, methods, self.path, **data)
assert isinstance(resource, ResourceMixin)
with pytest.raises(AttributeError):
resource.update()
resource.delete() |
def test_spark_dataframe_return():
my_schema = FlyteSchema[kwtypes(name=str, age=int)]
(task_config=Spark())
def my_spark(a: int) -> my_schema:
session = flytekit.current_context().spark_session
df = session.createDataFrame([('Alice', a)], my_schema.column_names())
return df
def my_wf(a: int) -> my_schema:
return my_spark(a=a)
x = my_wf(a=5)
reader = x.open(pd.DataFrame)
df2 = reader.all()
result_df = (df2.reset_index(drop=True) == pd.DataFrame(data={'name': ['Alice'], 'age': [5]}).reset_index(drop=True))
assert result_df.all().all() |
class MyAdminIndexView(admin.AdminIndexView):
('/')
def index(self):
if (not login.current_user.is_authenticated):
return redirect(url_for('.login_view'))
return super(MyAdminIndexView, self).index()
('/login/', methods=('GET', 'POST'))
def login_view(self):
form = LoginForm(request.form)
if helpers.validate_form_on_submit(form):
user = form.get_user()
login.login_user(user)
if login.current_user.is_authenticated:
return redirect(url_for('.index'))
link = (('<p>Don\'t have an account? <a href="' + url_for('.register_view')) + '">Click here to register.</a></p>')
self._template_args['form'] = form
self._template_args['link'] = link
return super(MyAdminIndexView, self).index()
('/register/', methods=('GET', 'POST'))
def register_view(self):
form = RegistrationForm(request.form)
if helpers.validate_form_on_submit(form):
user = User()
form.populate_obj(user)
user.password = generate_password_hash(form.password.data)
db.session.add(user)
db.session.commit()
login.login_user(user)
return redirect(url_for('.index'))
link = (('<p>Already have an account? <a href="' + url_for('.login_view')) + '">Click here to log in.</a></p>')
self._template_args['form'] = form
self._template_args['link'] = link
return super(MyAdminIndexView, self).index()
('/logout/')
def logout_view(self):
login.logout_user()
return redirect(url_for('.index')) |
def test_entity_storage_remove_entity_asset(create_test_db, create_project, prepare_entity_storage):
from stalker import Asset, Task, Version
project = create_project
char1 = Asset.query.filter((Asset.project == project)).filter((Asset.name == 'Char1')).first()
model = Task.query.filter((Task.parent == char1)).filter((Task.name == 'Model')).first()
assert (model is not None)
v1 = model.versions[0]
assert (v1 is not None)
assert isinstance(v1, Version)
storage = EntityStorage()
storage.add_entity(v1)
storage.remove_entity(char1)
assert (char1 not in storage.storage) |
class tunnel_capability(bsn_tlv):
type = 142
def __init__(self, value=None):
if (value != None):
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!Q', self.value))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = tunnel_capability()
_type = reader.read('!H')[0]
assert (_type == 142)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.value = reader.read('!Q')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.value != other.value):
return False
return True
def pretty_print(self, q):
q.text('tunnel_capability {')
with q.group():
with q.indent(2):
q.breakable()
q.text('value = ')
value_name_map = {1: 'OFP_BSN_TUNNEL_L2GRE', 2: 'OFP_BSN_TUNNEL_VXLAN'}
q.text(util.pretty_flags(self.value, value_name_map.values()))
q.breakable()
q.text('}') |
def chain(layer1: Model[(InT, MidT)], layer2: Model[(MidT, Any)], *layers: Model[(Any, Any)]) -> Model[(InT, XY_YZ_OutT)]:
all_layers: List[Model[(Any, Any)]] = [layer1, layer2]
all_layers.extend(layers)
dims: Dict[(str, Optional[int])] = {'nO': None}
if (all_layers[0].has_dim('nI') is True):
dims['nI'] = all_layers[0].get_dim('nI')
if (all_layers[0].has_dim('nI') is None):
dims['nI'] = None
if (all_layers[(- 1)].has_dim('nO') is True):
dims['nO'] = all_layers[(- 1)].get_dim('nO')
model: Model[(InT, XY_YZ_OutT)] = Model('>>'.join((layer.name for layer in all_layers)), forward, init=init, dims=dims, layers=all_layers)
return model |
def explode(self):
sets = settings.sets
set_bounds = {}
set_volume = {}
avg_side = 0
for bset in sets:
set_bounds[bset] = get_bbox_set(bset)
set_volume[bset] = ((set_bounds[bset]['size'].x * set_bounds[bset]['size'].y) * set_bounds[bset]['size'].z)
avg_side += set_bounds[bset]['size'].x
avg_side += set_bounds[bset]['size'].y
avg_side += set_bounds[bset]['size'].z
avg_side /= (len(sets) * 3)
sorted_set_volume = sorted(set_volume.items(), key=operator.itemgetter(1))
sorted_sets = [item[0] for item in sorted_set_volume]
sorted_sets.reverse()
bbox_all = merge_bounds(list(set_bounds.values()))
bbox_max = set_bounds[sorted_sets[0]]
dir_offset_last_bbox = {}
for i in range(0, 6):
dir_offset_last_bbox[i] = bbox_max
bpy.context.scene.frame_start = 0
bpy.context.scene.frame_end = frame_range
bpy.context.scene.frame_current = 0
for bset in sorted_sets[1:]:
delta = (set_bounds[bset]['center'] - bbox_all['center'])
offset_set(bset, delta, (avg_side * 0.35), dir_offset_last_bbox)
bpy.context.scene.frame_current = frame_range |
def get_widget_content_cast(handle, params):
log.debug('getWigetContentCast Called: {0}', params)
server = downloadUtils.get_server()
item_id = params['id']
data_manager = DataManager()
result = data_manager.get_content((('{server}/emby/Users/{userid}/Items/' + item_id) + '?format=json'))
log.debug('ItemInfo: {0}', result)
if (not result):
return
if ((result.get('Type', '') in ['Episode', 'Season']) and (params.get('auto', 'true') == 'true')):
series_id = result.get('SeriesId')
if series_id:
params['id'] = series_id
return get_widget_content_cast(handle, params)
list_items = []
if (result is not None):
people = result.get('People', [])
else:
people = []
for person in people:
if (person.get('Type') == 'Actor'):
person_name = person.get('Name')
person_role = person.get('Role')
person_id = person.get('Id')
person_tag = person.get('PrimaryImageTag')
person_thumbnail = None
if person_tag:
person_thumbnail = downloadUtils.image_url(person_id, 'Primary', 0, 400, 400, person_tag, server=server)
if (kodi_version > 17):
list_item = xbmcgui.ListItem(label=person_name, offscreen=True)
else:
list_item = xbmcgui.ListItem(label=person_name)
list_item.setProperty('id', person_id)
if person_thumbnail:
art_links = {}
art_links['thumb'] = person_thumbnail
art_links['poster'] = person_thumbnail
list_item.setArt(art_links)
labels = {}
labels['mediatype'] = 'artist'
list_item.setInfo(type='music', infoLabels=labels)
if person_role:
list_item.setLabel2(person_role)
item_tupple = ('', list_item, False)
list_items.append(item_tupple)
xbmcplugin.setContent(handle, 'artists')
xbmcplugin.addDirectoryItems(handle, list_items)
xbmcplugin.endOfDirectory(handle, cacheToDisc=False) |
def extractBluebunnytranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('IHM', 'I Have Medicine', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestJzazbz(util.ColorAssertsPyTest):
COLORS = [('red', 'color(--jzazbz 0.13438 0.11789 0.11188)'), ('orange', 'color(--jzazbz 0.16937 0.0312 0.12308)'), ('yellow', 'color(--jzazbz 0.2096 -0.02864 0.13479)'), ('green', 'color(--jzazbz 0.09203 -0.07454 0.07996)'), ('blue', 'color(--jzazbz 0.09577 -0.04085 -0.18585)'), ('indigo', 'color(--jzazbz 0.06146 0.03051 -0.09951)'), ('violet', 'color(--jzazbz 0.16771 0.06427 -0.05514)'), ('white', 'color(--jzazbz 0.22207 -0.00016 -0.00012)'), ('gray', 'color(--jzazbz 0.11827 -0.00012 -0.00008)'), ('black', 'color(--jzazbz 0 0 0)'), ('color(--jzazbz 0.5 0.1 -0.1)', 'color(--jzazbz 0.5 0.1 -0.1)'), ('color(--jzazbz 0.5 0.1 -0.1 / 0.5)', 'color(--jzazbz 0.5 0.1 -0.1 / 0.5)'), ('color(--jzazbz 50% 50% -50% / 50%)', 'color(--jzazbz 0.5 0.25 -0.25 / 0.5)'), ('color(--jzazbz none none none / none)', 'color(--jzazbz none none none / none)'), ('color(--jzazbz 0% 0% 0%)', 'color(--jzazbz 0 0 0)'), ('color(--jzazbz 100% 100% 100%)', 'color(--jzazbz 1 0.5 0.5)'), ('color(--jzazbz -100% -100% -100%)', 'color(--jzazbz -1 -0.5 -0.5)')]
.parametrize('color1,color2', COLORS)
def test_colors(self, color1, color2):
self.assertColorEqual(Color(color1).convert('jzazbz'), Color(color2)) |
def create_new_fl_config_from_old_json(old_config_json_path, new_config_json_path=None, flsim_example=False):
new_config = {}
with open(old_config_json_path) as f:
old_config = json.load(f)
new_config = get_new_fl_config(old_config, flsim_example)
if (new_config_json_path is None):
print(new_config)
else:
with open(new_config_json_path, 'w') as fo:
json.dump(new_config, fo, indent=4)
return new_config |
class Spew():
def __init__(self, trace_names=None, show_values=True):
self.trace_names = trace_names
self.show_values = show_values
def __call__(self, frame, event, arg):
if (event == 'line'):
lineno = frame.f_lineno
if ('__file__' in frame.f_globals):
filename = frame.f_globals['__file__']
if (filename.endswith('.pyc') or filename.endswith('.pyo')):
filename = filename[:(- 1)]
name = frame.f_globals['__name__']
line = linecache.getline(filename, lineno)
else:
name = '[unknown]'
try:
src = inspect.getsourcelines(frame)
line = src[lineno]
except OSError:
line = ('Unknown code named [%s]. VM instruction #%d' % (frame.f_code.co_name, frame.f_lasti))
if ((self.trace_names is None) or (name in self.trace_names)):
print(('%s:%s: %s' % (name, lineno, line.rstrip())))
if (not self.show_values):
return self
details = []
tokens = _token_splitter.split(line)
for tok in tokens:
if (tok in frame.f_globals):
details.append(('%s=%r' % (tok, frame.f_globals[tok])))
if (tok in frame.f_locals):
details.append(('%s=%r' % (tok, frame.f_locals[tok])))
if details:
print(('\t%s' % ' '.join(details)))
return self |
_handler(content_types=['successful_payment'])
def got_payment(message):
bot.send_message(message.chat.id, 'Hoooooray! Thanks for payment! We will proceed your order for `{} {}` as fast as possible! Stay in touch.\n\nUse /buy again to get a Time Machine for your friend!'.format((message.successful_payment.total_amount / 100), message.successful_payment.currency), parse_mode='Markdown') |
class RequirementsFixer():
errors: ErrorReport
bmg: BMGraphBuilder
_typer: LatticeTyper
_reqs: EdgeRequirements
def __init__(self, bmg: BMGraphBuilder, typer: LatticeTyper) -> None:
self.errors = ErrorReport()
self.bmg = bmg
self._typer = typer
self._reqs = EdgeRequirements(typer)
def _type_meets_requirement(self, t: bt.BMGLatticeType, r: bt.Requirement) -> bool:
assert (t != bt.Untypable)
if (r is bt.any_requirement):
return True
if (r is bt.any_real_matrix):
return _is_real_matrix(t)
if (r is bt.any_pos_real_matrix):
return _is_pos_real_matrix(t)
if isinstance(r, bt.UpperBound):
return (bt.supremum(t, r.bound) == r.bound)
if isinstance(r, bt.AlwaysMatrix):
return (t == r.bound)
if (r == bt.BooleanMatrix):
return isinstance(t, bt.BooleanMatrix)
if (r == bt.ProbabilityMatrix):
return isinstance(t, bt.ProbabilityMatrix)
if (r == bt.SimplexMatrix):
return isinstance(t, bt.SimplexMatrix)
return (t == r)
def _node_meets_requirement(self, node: bn.BMGNode, r: bt.Requirement) -> bool:
lattice_type = self._typer[node]
assert (lattice_type is not bt.Untypable)
if isinstance(r, bt.AlwaysMatrix):
return (self._typer.is_matrix(node) and self._type_meets_requirement(lattice_type, r.bound))
if ((r is bt.any_real_matrix) or (r is bt.any_pos_real_matrix)):
return (self._typer.is_matrix(node) and self._type_meets_requirement(lattice_type, r))
return self._type_meets_requirement(lattice_type, r)
def _try_to_meet_constant_requirement(self, node: bn.ConstantNode, requirement: bt.Requirement) -> Optional[bn.BMGNode]:
it = self._typer[node]
result = self._try_to_broadcast(node, requirement)
if (result is not None):
return result
if (requirement is bt.any_real_matrix):
if _is_real_matrix(it):
assert isinstance(it, bt.BMGMatrixType)
return self.bmg.add_constant_of_matrix_type(node.value, it)
else:
return self.bmg.add_real_matrix(node.value)
if (requirement is bt.any_pos_real_matrix):
if _is_pos_real_matrix(it):
assert isinstance(it, bt.BMGMatrixType)
return self.bmg.add_constant_of_matrix_type(node.value, it)
else:
return self.bmg.add_pos_real_matrix(node.value)
if self._type_meets_requirement(it, bt.upper_bound(requirement)):
if (requirement is bt.any_requirement):
required_type = bt.lattice_to_bmg(it)
else:
required_type = bt.requirement_to_type(requirement)
if bt.must_be_matrix(requirement):
assert isinstance(required_type, bt.BMGMatrixType)
result = self.bmg.add_constant_of_matrix_type(node.value, required_type)
else:
result = self.bmg.add_constant_of_type(node.value, required_type)
assert self._node_meets_requirement(result, requirement)
return result
return None
def _meet_constant_requirement(self, node: bn.ConstantNode, requirement: bt.Requirement, consumer: bn.BMGNode, edge: str) -> bn.BMGNode:
result = self._try_to_meet_constant_requirement(node, requirement)
if (result is not None):
return result
self.errors.add_error(Violation(node, self._typer[node], requirement, consumer, edge, self.bmg.execution_context.node_locations(consumer)))
return node
def _convert_operator_to_atomic_type(self, node: bn.OperatorNode, requirement: bt.BMGLatticeType) -> bn.BMGNode:
node_type = self._typer[node]
assert (node_type != requirement)
assert bt.is_convertible_to(node_type, requirement)
if (requirement == bt.Real):
return self.bmg.add_to_real(node)
if (requirement == bt.PositiveReal):
return self.bmg.add_to_positive_real(node)
assert ((requirement == bt.Natural) or (requirement == bt.Probability))
assert (node_type == bt.Boolean)
zero = self.bmg.add_constant_of_type(0.0, requirement)
one = self.bmg.add_constant_of_type(1.0, requirement)
return self.bmg.add_if_then_else(node, one, zero)
def _convert_operator_to_matrix_type(self, node: bn.OperatorNode, requirement: bt.Requirement) -> bn.BMGNode:
if isinstance(requirement, bt.AlwaysMatrix):
requirement = requirement.bound
assert isinstance(requirement, bt.BMGMatrixType)
node_type = self._typer[node]
assert (node_type != requirement)
assert bt.is_convertible_to(node_type, requirement)
assert self._typer.is_matrix(node)
if isinstance(requirement, bt.RealMatrix):
return self.bmg.add_to_real_matrix(node)
if isinstance(requirement, bt.NegativeRealMatrix):
return self.bmg.add_to_negative_real_matrix(node)
assert isinstance(requirement, bt.PositiveRealMatrix)
return self.bmg.add_to_positive_real_matrix(node)
def _can_force_to_prob(self, inf_type: bt.BMGLatticeType, requirement: bt.Requirement) -> bool:
return (((requirement == bt.Probability) or (requirement == bt.upper_bound(bt.Probability))) and ((inf_type == bt.Real) or (inf_type == bt.PositiveReal)))
def _can_force_to_neg_real(self, node_type: bt.BMGLatticeType, requirement: bt.Requirement) -> bool:
return (((requirement == bt.NegativeReal) or (requirement == bt.upper_bound(bt.NegativeReal))) and (node_type == bt.Real))
def _try_to_meet_any_real_matrix_requirement(self, node: bn.OperatorNode, requirement: bt.Requirement) -> Optional[bn.BMGNode]:
assert (not self._node_meets_requirement(node, requirement))
if (requirement is not bt.any_real_matrix):
return None
result = self.bmg.add_to_real_matrix(node)
assert self._node_meets_requirement(result, requirement)
return result
def _try_to_meet_any_pos_real_matrix_requirement(self, node: bn.OperatorNode, requirement: bt.Requirement) -> Optional[bn.BMGNode]:
assert (not self._node_meets_requirement(node, requirement))
if (requirement is not bt.any_pos_real_matrix):
return None
node_type = self._typer[node]
if isinstance(node_type, bt.NegativeRealMatrix):
return None
result = self.bmg.add_to_positive_real_matrix(node)
assert self._node_meets_requirement(result, requirement)
return result
def _try_to_meet_upper_bound_requirement(self, node: bn.OperatorNode, requirement: bt.Requirement) -> Optional[bn.BMGNode]:
assert (not self._node_meets_requirement(node, requirement))
node_type = self._typer[node]
if (not self._type_meets_requirement(node_type, bt.upper_bound(requirement))):
return None
assert (not isinstance(requirement, bt.UpperBound))
if bt.must_be_matrix(requirement):
result = self._convert_operator_to_matrix_type(node, requirement)
else:
assert isinstance(requirement, bt.BMGLatticeType)
result = self._convert_operator_to_atomic_type(node, requirement)
assert self._node_meets_requirement(result, requirement)
return result
def _try_to_force_to_prob(self, node, requirement) -> Optional[bn.BMGNode]:
node_type = self._typer[node]
if (not self._can_force_to_prob(node_type, requirement)):
return None
assert ((node_type == bt.Real) or (node_type == bt.PositiveReal))
assert self._node_meets_requirement(node, node_type)
return self.bmg.add_to_probability(node)
def _try_to_force_to_neg_real(self, node, requirement) -> Optional[bn.BMGNode]:
node_type = self._typer[node]
if (not self._can_force_to_neg_real(node_type, requirement)):
return None
return self.bmg.add_to_negative_real(node)
def _try_to_matrix_fill(self, node: bn.BMGNode, requirement: bt.Requirement) -> Optional[bn.BMGNode]:
if ((not isinstance(requirement, bt.BMGMatrixType)) or requirement.is_singleton()):
return None
node_type = self._typer[node]
if ((not isinstance(node_type, bt.BMGMatrixType)) or (not node_type.is_singleton())):
return None
scalar_req = requirement.with_dimensions(1, 1)
converted_node = self._try_to_meet_requirement(node, scalar_req)
if (converted_node is None):
return None
r = self.bmg.add_natural(requirement.rows)
c = self.bmg.add_natural(requirement.columns)
result = self.bmg.add_fill_matrix(converted_node, r, c)
assert self._node_meets_requirement(result, requirement)
return result
def _try_to_broadcast(self, node: bn.BMGNode, requirement: bt.Requirement) -> Optional[bn.BMGNode]:
result = self._try_to_matrix_fill(node, requirement)
if (result is not None):
return result
if ((not isinstance(requirement, bt.BMGMatrixType)) or requirement.is_singleton()):
return None
node_type = self._typer[node]
if ((not isinstance(node_type, bt.BMGMatrixType)) or node_type.is_singleton()):
return None
if ((node_type.columns == requirement.columns) and (node_type.rows == requirement.rows)):
return None
if ((node_type.columns != 1) and (node_type.columns != requirement.columns)):
return None
if ((node_type.rows != 1) and (node_type.rows != requirement.rows)):
return None
wrong_size_req = requirement.with_dimensions(node_type.rows, node_type.columns)
converted_node = self._try_to_meet_requirement(node, wrong_size_req)
if (converted_node is None):
return None
r = self.bmg.add_natural(requirement.rows)
c = self.bmg.add_natural(requirement.columns)
result = self.bmg.add_broadcast(converted_node, r, c)
assert self._node_meets_requirement(result, requirement)
return result
def _try_to_meet_operator_requirement(self, node: bn.OperatorNode, requirement: bt.Requirement) -> Optional[bn.BMGNode]:
assert (not self._node_meets_requirement(node, requirement))
result = self._try_to_broadcast(node, requirement)
if (result is not None):
return result
result = self._try_to_meet_any_real_matrix_requirement(node, requirement)
if (result is not None):
return result
result = self._try_to_meet_any_pos_real_matrix_requirement(node, requirement)
if (result is not None):
return result
result = self._try_to_meet_upper_bound_requirement(node, requirement)
if (result is not None):
return result
result = self._try_to_force_to_prob(node, requirement)
if (result is not None):
return result
result = self._try_to_force_to_neg_real(node, requirement)
if (result is not None):
return result
return None
def _meet_operator_requirement(self, node: bn.OperatorNode, requirement: bt.Requirement, consumer: bn.BMGNode, edge: str) -> bn.BMGNode:
assert (not self._node_meets_requirement(node, requirement))
result = self._try_to_meet_operator_requirement(node, requirement)
if (result is not None):
return result
node_type = self._typer[node]
self.errors.add_error(Violation(node, node_type, requirement, consumer, edge, self.bmg.execution_context.node_locations(consumer)))
return node
def _check_requirement_validity(self, node: bn.BMGNode, requirement: bt.Requirement, consumer: bn.BMGNode, edge: str) -> None:
ice = 'Internal compiler error in edge requirements checking:\n'
if (requirement in {bt.Tensor, bt.One, bt.Zero, bt.Untypable}):
raise InternalError(f'{ice} Requirement {requirement} is an invalid requirement.')
node_type = type(node)
if (node_type in [bn.Observation, bn.Query, bn.FactorNode]):
raise InternalError((f'{ice} Node of type {node_type.__name__} is being checked for requirements but ' + "should never have an outgoing edge '{edge}'."))
if isinstance(node, bn.ConstantNode):
return
if (not is_supported_by_bmg(node)):
raise InternalError(((f'{ice} Node of type {node_type.__name__} is being checked for requirements but ' + 'is not supported by BMG; the unsupported node checker should already ') + 'have either replaced the node or produced an error.'))
lattice_type = self._typer[node]
if (lattice_type is bt.Untypable):
raise InternalError((((f'{ice} Node of type {node_type.__name__} is being checked for requirements but ' + 'the lattice typer is unable to assign it a type. Requirements checking always ') + 'needs to know the lattice type of a node when checking requirements on its ') + 'outgoing edges.'))
def _try_to_meet_requirement(self, node: bn.BMGNode, requirement: bt.Requirement) -> Optional[bn.BMGNode]:
if self._node_meets_requirement(node, requirement):
return node
if isinstance(node, bn.ConstantNode):
return self._try_to_meet_constant_requirement(node, requirement)
assert isinstance(node, bn.OperatorNode)
return self._try_to_meet_operator_requirement(node, requirement)
def meet_requirement(self, node: bn.BMGNode, requirement: bt.Requirement, consumer: bn.BMGNode, edge: str) -> bn.BMGNode:
self._check_requirement_validity(node, requirement, consumer, edge)
if isinstance(node, bn.UntypedConstantNode):
return self._meet_constant_requirement(node, requirement, consumer, edge)
if self._node_meets_requirement(node, requirement):
return node
if isinstance(node, bn.ConstantNode):
return self._meet_constant_requirement(node, requirement, consumer, edge)
assert isinstance(node, bn.OperatorNode)
return self._meet_operator_requirement(node, requirement, consumer, edge)
def fix_problems(self) -> bool:
made_progress = False
nodes = self.bmg.all_ancestor_nodes()
for node in nodes:
requirements = self._reqs.requirements(node)
edges = get_edge_labels(node)
node_was_updated = False
for i in range(len(requirements)):
new_input = self.meet_requirement(node.inputs[i], requirements[i], node, edges[i])
if (node.inputs[i] is not new_input):
node.inputs[i] = new_input
node_was_updated = True
if node_was_updated:
self._typer.update_type(node)
made_progress = True
return made_progress |
def test_retrieving_hidden_posts(topic, user):
new_post = Post(content='stuff')
new_post.save(user, topic)
new_post.hide(user)
assert (Post.query.get(new_post.id) is None)
assert (Post.query.with_hidden().get(new_post.id) == new_post)
assert (Post.query.filter((Post.id == new_post.id)).first() is None)
hidden_post = Post.query.with_hidden().filter((Post.id == new_post.id)).first()
assert (hidden_post == new_post) |
class pygaze_log(item):
description = u'Writes information to the eye-tracker logfile'
def reset(self):
self.var.msg = u''
self.var.auto_log = u'no'
self.var.throttle = 2
def run(self):
self.set_item_onset()
for msg in self.var.msg.split(u'\n'):
self.experiment.pygaze_eyetracker.log(self.syntax.eval_text(msg))
self.clock.sleep(self.var.throttle)
if (self.var.auto_log == u'yes'):
for (logvar, info) in self.experiment.var.inspect().items():
self.experiment.pygaze_eyetracker.log_var(logvar, info[u'value'])
self.clock.sleep(self.var.throttle) |
class Command(DanubeCloudCommand):
help = 'Check the existence of SECRET_KEY in local_settings.py and generate one if needed.'
def handle(self, *args, **options):
try:
from core import local_settings
except ImportError:
local_settings = None
fn = self._path(self.PROJECT_DIR, 'core', 'local_settings.py')
else:
fn = local_settings.__file__.replace('local_settings.pyc', 'local_settings.py')
try:
key = local_settings.SECRET_KEY
except AttributeError:
self.display('Missing SECRET_KEY in local_settings.py', color='yellow')
key = os.urandom(128).encode('base64')[:76]
with open(fn, 'a') as f:
f.write((('\nSECRET_KEY="""' + key) + '"""\n'))
self.display(('New SECRET_KEY was saved in %s' % fn), color='green')
if key:
self.display('SECRET_KEY is OK', color='green')
else:
raise CommandError('SECRET_KEY is empty!') |
class MapTest(SeleniumTestCase):
def test_map_slider(self):
self.browser.get((self.live_server_url + '/analyse/#org=CCG&numIds=0212000AA&denomIds=2.12&selectedTab=map'))
self.find_by_xpath("//*[='leaflet-zoom-animated' and name()='svg']")
self.assertEqual(len(self.browser.find_elements(By.XPATH, "//*[='#67001f' and name()='path']")), 1)
self.assertEqual(self.find_by_xpath("//p[='chart-sub-title']").text, "in Sep '16")
js = "\n var slider = $('#chart-date-slider');\n slider.val(0);\n slider.trigger('change');\n "
self.browser.execute_script(js)
self.assertEqual(len(self.browser.find_elements(By.XPATH, "//*[='#67001f' and name()='path']")), 2)
self.assertEqual(self.find_by_xpath("//p[='chart-sub-title']").text, "in Apr '13") |
def _assert_format(instances, namespace=None):
if (not namespace):
namespace = list()
if isinstance(instances, dict):
for instance_list in instances.values():
if (not _check_format(instance_list, namespace=namespace)):
return False
namespace.extend((n for (n, e) in instance_list))
return True
return _check_format(instances, namespace=namespace) |
def run_window_function(options):
module = 'emlearn.tools.window_function'
args = ['python3', '-m', module]
for (key, value) in options.items():
args.append('--{}={}'.format(key, value))
stdout = subprocess.check_output(' '.join(args), shell=True)
return stdout.decode('utf-8') |
class pad_last_dim(Operator):
def __init__(self, ndim: int, out_dim: int):
super().__init__()
self._attrs['op'] = 'pad_last_dim'
self._attrs['ndim'] = ndim
self._attrs['out_dim'] = out_dim
self.shape_eval_template = SHAPE_FUNC_TEMPLATE
self.shape_save_template = SHAPE_ASSIGNMENT_TEMPLATE
def _infer_shapes(self, x: Tensor):
x_shape = x._attrs['shape']
ndim = len(x_shape)
if (self._attrs['out_dim'] <= max(x_shape[(- 1)]._attrs['values'])):
raise RuntimeError('Output of padded dim must be larger than original dim')
if (ndim != self._attrs['ndim']):
raise RuntimeError('Data/Op dims mismatch')
if (ndim > 4):
raise NotImplementedError
output_shape = list(x_shape)
output_shape[(- 1)] = IntImm(self._attrs['out_dim'])
return output_shape
def __call__(self, x: Tensor) -> List[Tensor]:
self._attrs['inputs'] = [x]
self._set_depth()
output_shape = self._infer_shapes(x)
output = Tensor(output_shape, src_ops={self}, dtype=x._attrs['dtype'])
self._attrs['outputs'] = [output]
return output
def _get_op_attributes(self):
return {'ndim': self._attrs['ndim'], 'out_dim': self._attrs['out_dim']}
def gen_function(self) -> str:
target = backend.target.Target.current()
template_path = target.template_path()
func_key = '{target}.{op}.gen_function'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
return func(self._attrs, template_path, self.shape_eval_template, self.shape_save_template) |
def get_deck_template() -> 'Template':
from jinja2 import Environment, FileSystemLoader, select_autoescape
root = os.path.dirname(os.path.abspath(__file__))
templates_dir = os.path.join(root, 'html')
env = Environment(loader=FileSystemLoader(templates_dir), autoescape=select_autoescape(enabled_extensions=('html',)))
return env.get_template('template.html') |
def run():
params = {'tiles': []}
for (tile, sites) in gen_sites():
for (site_type, site) in sites.items():
p = {}
p['tile'] = tile
p['site'] = site
p['CONNECTION'] = random.choice(('HARD_ZERO', 'CLOCK'))
params['tiles'].append(p)
print("\nmodule top (input wire clk);\n (* KEEP, DONT_TOUCH *)\n STARTUPE2 STARTUPE2 (\n .CLK(1'b0),\n .GSR(1'b0),\n .GTS(1'b0),\n .KEYCLEARB(1'b1),\n .PACK(1'b0),\n .PREQ(),\n \n // Drive clock.")
connection = p['CONNECTION']
if (connection == 'HARD_ZERO'):
print(" .USRCCLKO (1'b0),")
elif (connection == 'HARD_ONE'):
print(" .USRCCLKO (1'b1),")
else:
print(' .USRCCLKO (clk),')
print("\n .USRCCLKTS(1'b0), \n .USRDONEO (1'b0),\n .USRDONETS(1'b1),\n .CFGCLK(),\n .CFGMCLK(),\n .EOS()\n );\n\nendmodule\n")
with open('params.json', 'w') as f:
json.dump(params, f, indent=2) |
def holeCol(func):
(func)
def f(self, *args, **kw):
if ('color' in kw):
color = kw.pop('color')
else:
color = Color.INNER_CUT
self.ctx.stroke()
with self.saved_context():
self.set_source_color(color)
func(self, *args, **kw)
self.ctx.stroke()
return f |
class StalkerShotAddPrevisOutputOperator(bpy.types.Operator):
bl_label = 'Add Previs Output'
bl_idname = 'stalker.shot_add_previs_output_op'
stalker_entity_id = bpy.props.IntProperty(name='stalker_entity_id')
stalker_entity_name = bpy.props.StringProperty(name='stalker_entity_name')
def execute(self, context):
logger.debug(('inside %s.execute()' % self.__class__.__name__))
shot = Shot.query.get(self.stalker_entity_id)
logger.debug(('shot: %s' % shot))
return set(['FINISHED']) |
def test_div_param(some_thr):
dtype = numpy.float32
input = get_test_array((1000,), dtype)
p1 = get_test_array((1,), dtype)[0]
p2 = get_test_array((1,), dtype)[0]
input_dev = some_thr.to_device(input)
output_dev = some_thr.empty_like(input_dev)
test = get_test_computation(input_dev)
scale = tr.div_param(input_dev, dtype)
test.parameter.input.connect(scale, scale.output, input_prime=scale.input, p1=scale.param)
test.parameter.output.connect(scale, scale.input, output_prime=scale.output, p2=scale.param)
testc = test.compile(some_thr)
testc(output_dev, p1, input_dev, p2)
assert diff_is_negligible(output_dev.get(), ((input / p1) / p2)) |
def extcodehash_eip2929(computation: ComputationAPI) -> None:
address = force_bytes_to_address(computation.stack_pop1_bytes())
state = computation.state
_consume_gas_for_account_load(computation, address, mnemonics.EXTCODEHASH)
if state.account_is_empty(address):
computation.stack_push_bytes(constants.NULL_BYTE)
else:
computation.stack_push_bytes(state.get_code_hash(address)) |
def infer(model, device, data_type, input_size, output_size, batch_size, args):
if (device == 'cpu'):
elap = infer_cpu(model, device, data_type, input_size, output_size, batch_size, args)
elif (device == 'gpu'):
elap = infer_gpu(model, device, data_type, input_size, output_size, batch_size, args)
elif (device == 'nnpi'):
elap = infer_nnpi(model, device, data_type, input_size, output_size, batch_size, args)
return elap |
def is_boolean(value):
if isinstance(value, str):
try:
return bool_dict[value.lower()]
except KeyError:
raise VdtTypeError(value)
if (value == False):
return False
elif (value == True):
return True
else:
raise VdtTypeError(value) |
class OptionSeriesVennSonificationTracksMapping(Options):
def frequency(self) -> 'OptionSeriesVennSonificationTracksMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesVennSonificationTracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesVennSonificationTracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesVennSonificationTracksMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesVennSonificationTracksMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesVennSonificationTracksMappingHighpass)
def lowpass(self) -> 'OptionSeriesVennSonificationTracksMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesVennSonificationTracksMappingLowpass)
def noteDuration(self) -> 'OptionSeriesVennSonificationTracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesVennSonificationTracksMappingNoteduration)
def pan(self) -> 'OptionSeriesVennSonificationTracksMappingPan':
return self._config_sub_data('pan', OptionSeriesVennSonificationTracksMappingPan)
def pitch(self) -> 'OptionSeriesVennSonificationTracksMappingPitch':
return self._config_sub_data('pitch', OptionSeriesVennSonificationTracksMappingPitch)
def playDelay(self) -> 'OptionSeriesVennSonificationTracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesVennSonificationTracksMappingPlaydelay)
def rate(self) -> 'OptionSeriesVennSonificationTracksMappingRate':
return self._config_sub_data('rate', OptionSeriesVennSonificationTracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesVennSonificationTracksMappingTime':
return self._config_sub_data('time', OptionSeriesVennSonificationTracksMappingTime)
def tremolo(self) -> 'OptionSeriesVennSonificationTracksMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesVennSonificationTracksMappingTremolo)
def volume(self) -> 'OptionSeriesVennSonificationTracksMappingVolume':
return self._config_sub_data('volume', OptionSeriesVennSonificationTracksMappingVolume) |
(x_input=INTEGER_ST)
(deadline=DEADLINE, max_examples=5)
def test_workflow_within_eager_workflow(x_input: int):
async def eager_wf(x: int) -> int:
out = (await subworkflow(x=x))
return (await double(x=out))
result = asyncio.run(eager_wf(x=x_input))
assert (result == ((x_input + 1) * 2)) |
class OptionPlotoptionsDependencywheelSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def get_specialized_executioners(project) -> dict[(str, SpecializedExecutioner)]:
with open('clippinator/minions/specialized_minions.yaml') as f:
data = yaml.load(f, Loader=yaml.FullLoader)
return {line['name']: specialized_executioner(**{k.replace('-', '_'): v for (k, v) in line.items()})(project) for line in data} |
class BackupDefineView(APIView):
order_by_default = ('vm__hostname', '-id')
order_by_fields = ('name', 'disk_id')
order_by_field_map = {'hostname': 'vm__hostname', 'created': 'id'}
def get(self, vm, define, many=False, extended=False):
if extended:
ser_class = ExtendedBackupDefineSerializer
else:
ser_class = BackupDefineSerializer
if many:
if (self.full or self.extended):
if define:
res = ser_class(self.request, define, many=True).data
else:
res = []
else:
res = list(define.values_list('name', flat=True))
else:
res = ser_class(self.request, define).data
return SuccessTaskResponse(self.request, res, vm=vm)
_vm_operational
def post(self, vm, define, vm_template=False, **kwargs):
data2 = define_schedule_defaults(define.name)
data2.update(self.data)
ser = BackupDefineSerializer(self.request, define, data=data2, vm_template=vm_template)
if (not ser.is_valid()):
return FailureTaskResponse(self.request, ser.errors, vm=vm)
ser.object.save()
return SuccessTaskResponse(self.request, ser.data, status=scode.HTTP_201_CREATED, vm=vm, detail_dict=detail_dict('bkpdef', ser), msg=LOG_BKPDEF_CREATE)
_vm_operational
def put(self, vm, define, **kwargs):
ser = BackupDefineSerializer(self.request, define, data=self.data, partial=True)
if (not ser.is_valid()):
return FailureTaskResponse(self.request, ser.errors, vm=vm)
ser.object.save()
return SuccessTaskResponse(self.request, ser.data, vm=vm, detail_dict=detail_dict('bkpdef', ser), msg=LOG_BKPDEF_UPDATE)
_vm_operational
def delete(self, vm, define, **kwargs):
ser = BackupDefineSerializer(self.request, define)
ser.object.delete()
return SuccessTaskResponse(self.request, None, vm=vm, detail_dict=detail_dict('bkpdef', ser, data={}), msg=LOG_BKPDEF_DELETE)
def create_from_template(cls, request, vm, vm_define_backup, log=logger):
if (vm_define_backup and isinstance(vm_define_backup, list)):
request = set_request_method(request, 'POST')
for (i, data) in enumerate(vm_define_backup):
try:
try:
bkpdef = data['bkpdef']
except KeyError:
bkpdef = data['name']
(disk_id, real_disk_id, zfs_filesystem) = get_disk_id(request, vm, data)
log.info('Creating backup definition [%d] "%s" for vm=%s, disk_id=%d defined by template %s', i, bkpdef, vm, disk_id, vm.template)
define = get_object(request, BackupDefine, {'name': bkpdef, 'vm': vm, 'disk_id': real_disk_id})
res = cls(request, data=data).post(vm, define, vm_template=True)
if (res.status_code != scode.HTTP_201_CREATED):
raise APIError(('vm_define_backup error [%s]: %s' % (res.status_code, res.data)))
except Exception as ex:
log.warn('Failed to create backup definition [%d] for vm=%s defined by template %s with data="%s". Error: %s', i, vm, vm.template, data, ex) |
class RevisionMiddleware():
manage_manually = False
using = None
atomic = True
def __init__(self, get_response):
self.get_response = create_revision(manage_manually=self.manage_manually, using=self.using, atomic=self.atomic, request_creates_revision=self.request_creates_revision)(get_response)
def request_creates_revision(self, request):
return _request_creates_revision(request)
def __call__(self, request):
return self.get_response(request) |
def generate_edges(graph, root, graph_nodes):
edge = [root]
prev_root = None
while True:
outbound_edges = graph_nodes[root]
outbound_edges -= set((prev_root,))
if (len(outbound_edges) > 1):
graph['edges'].append(edge)
if (root not in graph['joins']):
graph['joins'][root] = set()
graph['joins'][root] |= outbound_edges
for element in graph_nodes[root]:
if (element not in graph['joins']):
graph['joins'][element] = set()
graph['joins'][element].add(root)
break
else:
if (len(outbound_edges) == 0):
graph['edges'].append(edge)
break
next_root = tuple(outbound_edges)[0]
edge.append(next_root)
(prev_root, root) = (root, next_root) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.