code stringlengths 281 23.7M |
|---|
(short_help='Install openapi module.')
('-t', '--target', metavar='TARGET', type=click.Path(), default='C:\\', help='Target directory for installation. Will create "OpenAPI" folder under this directory for install. (default: "C:\\")')
('--cleanup/--no-cleanup', default=True, help='Clean up temporary directory after install. (default: true)')
_option(default=5, show_default=True)
def openapi(target, cleanup):
with contextlib.ExitStack() as stack:
tempdir = tempfile.mkdtemp()
logger.info('Created temporary directory: %s', tempdir)
if cleanup:
stack.callback(shutil.rmtree, tempdir)
logger.info('Registered to remove the temporary directory after install.')
installer_filename = 'OpenAPISetup.exe'
installer_filepath = os.path.join(tempdir, installer_filename)
logger.info('Downloading installer: %s', installer_filepath)
download_openapi_installer(installer_filepath)
iss_filename = 'setup.iss'
iss_filepath = os.path.join(tempdir, iss_filename)
logger.info('Preparing .iss file: %s', iss_filepath)
prepare_issfile_for_install(iss_filepath, target)
log_filename = 'setup.log'
log_filepath = os.path.join(tempdir, log_filename)
try:
_return_code = run_installer_with_issfile(installer_filepath, iss_filepath, log_filepath, cwd=tempdir)
except subprocess.CalledProcessError as e:
logger.exception('Failed to install openapi with return code: %d', e.returncode)
raise RuntimeError(('Failed to install openapi with return code: %d' % e.returncode)) from e
else:
logger.info('Succesfully installed openapi.') |
class HypreADS(PCBase):
def initialize(self, obj):
(A, P) = obj.getOperators()
appctx = self.get_appctx(obj)
prefix = obj.getOptionsPrefix()
V = get_function_space(obj.getDM())
mesh = V.mesh()
family = str(V.ufl_element().family())
formdegree = V.finat_element.formdegree
degree = V.ufl_element().degree()
try:
degree = max(degree)
except TypeError:
pass
if ((formdegree != 2) or (degree != 1)):
raise ValueError(('Hypre ADS requires lowest order RT elements! (not %s of degree %d)' % (family, degree)))
P1 = FunctionSpace(mesh, 'Lagrange', 1)
NC1 = FunctionSpace(mesh, ('N1curl' if mesh.ufl_cell().is_simplex() else 'NCE'), 1)
G_callback = appctx.get('get_gradient', None)
if (G_callback is None):
G = chop(Interpolator(grad(TestFunction(P1)), NC1).callable().handle)
else:
G = G_callback(P1, NC1)
C_callback = appctx.get('get_curl', None)
if (C_callback is None):
C = chop(Interpolator(curl(TestFunction(NC1)), V).callable().handle)
else:
C = C_callback(NC1, V)
pc = PETSc.PC().create(comm=obj.comm)
pc.incrementTabLevel(1, parent=obj)
pc.setOptionsPrefix((prefix + 'hypre_ads_'))
pc.setOperators(A, P)
pc.setType('hypre')
pc.setHYPREType('ads')
pc.setHYPREDiscreteGradient(G)
pc.setHYPREDiscreteCurl(C)
V = VectorFunctionSpace(mesh, 'Lagrange', 1)
linear_coordinates = interpolate(SpatialCoordinate(mesh), V).dat.data_ro.copy()
pc.setCoordinates(linear_coordinates)
pc.setFromOptions()
self.pc = pc
def apply(self, pc, x, y):
self.pc.apply(x, y)
def applyTranspose(self, pc, x, y):
self.pc.applyTranspose(x, y)
def view(self, pc, viewer=None):
super(HypreADS, self).view(pc, viewer)
if hasattr(self, 'pc'):
viewer.printfASCII('PC to apply inverse\n')
self.pc.view(viewer)
def update(self, pc):
self.pc.setUp() |
def generate_ancestry_path(full_name):
ancestry_path = ''
full_name_items = full_name.split('/')
for i in range(0, (len(full_name_items) - 1)):
if (full_name_items[i] in SUPPORTED_ANCESTORS):
ancestry_path += (((full_name_items[i] + '/') + full_name_items[(i + 1)]) + '/')
else:
continue
return ancestry_path |
class RLAB(Lab):
BASE = 'xyz-d65'
NAME = 'rlab'
SERIALIZE = ('--rlab',)
WHITE = WHITES['2deg']['D65']
CHANNELS = (Channel('l', 0.0, 100.0), Channel('a', (- 125.0), 125.0, flags=FLG_MIRROR_PERCENT), Channel('b', (- 125.0), 125.0, flags=FLG_MIRROR_PERCENT))
def to_base(self, coords: Vector) -> Vector:
return rlab_to_xyz(coords)
def from_base(self, coords: Vector) -> Vector:
return xyz_to_rlab(coords) |
class OptionPlotoptionsVectorSonificationContexttracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractBunnylovereaderCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.django_db
def test_recipient_endpoint(client, award_spending_data):
resp = client.get('/api/v2/award_spending/recipient/?fiscal_year=2017&awarding_agency_id=111')
assert (resp.status_code == status.HTTP_200_OK)
assert (len(resp.data['results']) == 3)
assert (resp.data['results'][0]['award_category'] == 'contracts')
resp = client.get('/api/v2/award_spending/recipient/?award_category=contracts&fiscal_year=2017&awarding_agency_id=111')
assert (resp.status_code == status.HTTP_200_OK)
assert (len(resp.data['results']) == 1)
assert (resp.data['results'][0]['award_category'] == 'contracts')
assert (float(resp.data['results'][0]['obligated_amount']) == 40)
resp = client.get('/api/v2/award_spending/recipient/')
assert (resp.status_code == status.HTTP_400_BAD_REQUEST) |
def import_local_lib(lib):
path_script = os.path.dirname(__file__)
path_ext = os.path.join(path_script, '../../Externals')
if ((sys.platform == 'win32') and (platform.architecture()[0] == '64bit') and (sys.version[:3] == '3.7')):
sys.path.append(os.path.join(path_ext, 'gams_api/win64/'))
else:
logging.error((((('Pre-compiled GAMS libraries are only available for python 3.7 64 bits under windows. You are using platform ' + sys.platform) + ' and architecture ') + platform.architecture()[0]) + 'Please install the gams API using: "pip install gamsxcc gdxcc optcc"'))
sys.exit(1)
if (lib == 'gams'):
try:
import gams
return True
except ImportError:
logging.error('Could not load the gams high-level api. The gams library is required to run the GAMS versions of DispaSET.Please install the gams API using: "python setup.py install" in the gams api folder')
sys.exit(1)
elif (lib == 'lowlevel'):
try:
import gdxcc, gamsxcc, optcc
return True
except ImportError:
logging.error('Could not load the gams low-level api. The gams library is required to run the GAMS versions of DispaSET.Please install the gams API using: "pip install gamsxcc gdxcc optcc"')
sys.exit(1)
elif (lib == 'gdxcc'):
try:
import gdxcc
return True
except ImportError:
logging.critical('gdxcc module could not be imported from Externals. GDX cannot be produced or readPlease install the gams API using: "pip install gamsxcc gdxcc optcc"')
sys.exit(1)
else:
logging.error('Only "gams" and "gdxcc" are present') |
def _check_staged_request(reqid: RequestID, reqfs: _common.JobFS) -> None:
try:
status = Result.read_status(str(reqfs.result.metadata))
except _utils.MissingMetadataError:
raise StagedRequestMissingStatusError(reqid, None)
except _utils.InvalidMetadataError:
raise StagedRequestInvalidMetadataError(reqid, None)
else:
if ((status in Result.ACTIVE) and (status != 'pending')):
if (not _utils.PIDFile(str(reqfs.pidfile)).read(orphaned='ignore')):
raise StagedRequestNotRunningError(reqid, status)
elif (status in Result.FINISHED):
raise StagedRequestAlreadyFinishedError(reqid, status)
else:
raise StagedRequestUnexpectedStatusError(reqid, status) |
def extractTitle(inStr):
p = TitleParser(inStr)
vol = p.getVolume()
chp = p.getChapter()
frag = p.getFragment()
post = p.getPostfix()
if ((chp and (not frag)) or (chp and (float(int(float(chp))) != float(chp)) and ((frag == 0) or (frag is None)))):
chp = int(chp)
frag = (int((chp * 100)) % 100)
return (vol, chp, frag, post) |
_latency_scorer('RTF')
class RTFScorer(LatencyScorer):
def compute(self, ins: Instance):
(delays, source_length, _) = self.get_delays_lengths(ins)
if isinstance(ins, SpeechOutputInstance):
delays = [(start + duration) for (start, duration) in ins.intervals]
return (delays[(- 1)] / source_length) |
def test_queued_call(app):
app.handle_caller(caller2)
app.handle_caller(caller3)
global ignore_call_called
global answer_call_called
ignore_call_called = False
answer_call_called = False
thread = threading.Thread(target=app.run)
thread.start()
time.sleep(15)
assert ignore_call_called
assert answer_call_called
app._stop_event.set() |
class BaseOperatorMeta(ABCMeta):
def _apply_defaults(cls, func: F) -> F:
sig_cache = signature(func)
(func)
def apply_defaults(self: 'BaseOperator', *args: Any, **kwargs: Any) -> Any:
dag: Optional[DAG] = (kwargs.get('dag') or DAGVar.get_current_dag())
task_id: Optional[str] = kwargs.get('task_id')
system_app: Optional[SystemApp] = (kwargs.get('system_app') or DAGVar.get_current_system_app())
executor = (kwargs.get('executor') or DAGVar.get_executor())
if (not executor):
if system_app:
executor = system_app.get_component(ComponentType.EXECUTOR_DEFAULT, ExecutorFactory).create()
else:
executor = DefaultExecutorFactory().create()
DAGVar.set_executor(executor)
if ((not task_id) and dag):
task_id = dag._new_node_id()
runner: Optional[WorkflowRunner] = (kwargs.get('runner') or default_runner)
if (not kwargs.get('dag')):
kwargs['dag'] = dag
if (not kwargs.get('task_id')):
kwargs['task_id'] = task_id
if (not kwargs.get('runner')):
kwargs['runner'] = runner
if (not kwargs.get('system_app')):
kwargs['system_app'] = system_app
if (not kwargs.get('executor')):
kwargs['executor'] = executor
real_obj = func(self, *args, **kwargs)
return real_obj
return cast(T, apply_defaults)
def __new__(cls, name, bases, namespace, **kwargs):
new_cls = super().__new__(cls, name, bases, namespace, **kwargs)
new_cls.__init__ = cls._apply_defaults(new_cls.__init__)
return new_cls |
class USlotEdge(Edge):
def __call__(self, length, bedBolts=None, bedBoltSettings=None, **kw):
l = length
o = self.settings
d = ((length * (1 - (o / 100))) / 2)
r = min((3 * self.thickness), ((l - (2 * d)) / 2))
self.edges['f'](d)
self.polyline(0, 90, 0, ((- 90), r), ((l - (2 * d)) - (2 * r)), ((- 90), r), 0, 90)
self.edges['f'](d)
def margin(self) -> float:
return self.edges['f'].margin() |
.parallel
def test_snespatch(mesh, CG1, solver_params):
u = Function(CG1)
v = TestFunction(CG1)
f = Constant(1, domain=mesh)
F = (((inner(grad(u), grad(v)) * dx) - (inner(f, v) * dx)) + (inner(((u ** 3) - u), v) * dx))
z = zero(CG1.ufl_element().value_shape)
bcs = DirichletBC(CG1, z, 'on_boundary')
nvproblem = NonlinearVariationalProblem(F, u, bcs=bcs)
solver = NonlinearVariationalSolver(nvproblem, solver_parameters=solver_params)
solver.solve()
assert (solver.snes.reason > 0) |
def train(model, train_dataloader, eval_dataloader, tokenizer, optimizer, lr_scheduler, gradient_accumulation_steps, train_config, fsdp_config=None, local_rank=None, rank=None):
if (train_config.use_fp16 and train_config.enable_fsdp):
scaler = ShardedGradScaler()
elif (train_config.use_fp16 and (not train_config.enable_fsdp)):
scaler = torch.cuda.amp.GradScaler()
if train_config.enable_fsdp:
world_size = int(os.environ['WORLD_SIZE'])
autocast = (torch.cuda.amp.autocast if train_config.use_fp16 else nullcontext)
train_prep = []
train_loss = []
val_prep = []
val_loss = []
if train_config.save_metrics:
metrics_filename = f"{train_config.output_dir}/metrics_data_{local_rank}-{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.json"
train_step_perplexity = []
train_step_loss = []
val_step_loss = []
val_step_perplexity = []
epoch_times = []
checkpoint_times = []
results = {}
best_val_loss = float('inf')
for epoch in range(train_config.num_epochs):
epoch_start_time = time.perf_counter()
with MemoryTrace() as memtrace:
model.train()
total_loss = 0.0
total_length = (len(train_dataloader) // gradient_accumulation_steps)
pbar = tqdm(colour='blue', desc=f'Training Epoch: {(epoch + 1)}', total=total_length, dynamic_ncols=True)
for (step, batch) in enumerate(train_dataloader):
for key in batch.keys():
if train_config.enable_fsdp:
batch[key] = batch[key].to(local_rank)
else:
batch[key] = batch[key].to('cuda:0')
with autocast():
loss = model(**batch).loss
loss = (loss / gradient_accumulation_steps)
if train_config.save_metrics:
train_step_loss.append(loss.detach().float().item())
train_step_perplexity.append(float(torch.exp(loss.detach().float())))
total_loss += loss.detach().float()
if train_config.use_fp16:
scaler.scale(loss).backward()
if ((((step + 1) % gradient_accumulation_steps) == 0) or (step == (len(train_dataloader) - 1))):
if (train_config.gradient_clipping and (train_config.gradient_clipping_threshold > 0.0)):
scaler.unscale_(optimizer)
if train_config.enable_fsdp:
model.clip_grad_norm_(train_config.gradient_clipping_threshold)
else:
torch.nn.utils.clip_grad_norm_(model.parameters(), train_config.gradient_clipping_threshold)
scaler.step(optimizer)
scaler.update()
optimizer.zero_grad()
pbar.update(1)
else:
loss.backward()
if ((((step + 1) % gradient_accumulation_steps) == 0) or (step == (len(train_dataloader) - 1))):
if (train_config.gradient_clipping and (train_config.gradient_clipping_threshold > 0.0)):
if train_config.enable_fsdp:
model.clip_grad_norm_(train_config.gradient_clipping_threshold)
else:
torch.nn.utils.clip_grad_norm_(model.parameters(), train_config.gradient_clipping_threshold)
optimizer.step()
optimizer.zero_grad()
pbar.update(1)
pbar.set_description(f'Training Epoch: {(epoch + 1)}/{train_config.num_epochs}, step {step}/{len(train_dataloader)} completed (loss: {loss.detach().float()})')
if train_config.save_metrics:
save_to_json(metrics_filename, train_step_loss, train_loss, train_step_perplexity, train_prep, val_step_loss, val_loss, val_step_perplexity, val_prep)
pbar.close()
epoch_end_time = (time.perf_counter() - epoch_start_time)
epoch_times.append(epoch_end_time)
if ((torch.cuda.device_count() > 1) and train_config.enable_fsdp):
dist.all_reduce(total_loss, op=dist.ReduceOp.SUM)
train_epoch_loss = (total_loss / len(train_dataloader))
if train_config.enable_fsdp:
train_epoch_loss = (train_epoch_loss / world_size)
train_perplexity = torch.exp(train_epoch_loss)
train_prep.append(float(train_perplexity))
train_loss.append(float(train_epoch_loss))
if train_config.enable_fsdp:
if (rank == 0):
print(f'Max CUDA memory allocated was {memtrace.peak} GB')
print(f'Max CUDA memory reserved was {memtrace.max_reserved} GB')
print(f'Peak active CUDA memory was {memtrace.peak_active_gb} GB')
print(f'Cuda Malloc retires : {memtrace.cuda_malloc_retires}')
print(f'CPU Total Peak Memory consumed during the train (max): {(memtrace.cpu_peaked + memtrace.cpu_begin)} GB')
else:
print(f'Max CUDA memory allocated was {memtrace.peak} GB')
print(f'Max CUDA memory reserved was {memtrace.max_reserved} GB')
print(f'Peak active CUDA memory was {memtrace.peak_active_gb} GB')
print(f'Cuda Malloc retires : {memtrace.cuda_malloc_retires}')
print(f'CPU Total Peak Memory consumed during the train (max): {(memtrace.cpu_peaked + memtrace.cpu_begin)} GB')
lr_scheduler.step()
if train_config.run_validation:
(eval_ppl, eval_epoch_loss, temp_val_loss, temp_step_perplexity) = evaluation(model, train_config, eval_dataloader, local_rank, tokenizer)
if train_config.save_metrics:
val_step_loss.extend(temp_val_loss)
val_step_perplexity.extend(temp_step_perplexity)
checkpoint_start_time = time.perf_counter()
if (train_config.save_model and (eval_epoch_loss < best_val_loss)):
if train_config.enable_fsdp:
dist.barrier()
if train_config.use_peft:
if train_config.enable_fsdp:
if (rank == 0):
print(f'we are about to save the PEFT modules')
else:
print(f'we are about to save the PEFT modules')
model.save_pretrained(train_config.output_dir)
if train_config.enable_fsdp:
if (rank == 0):
print(f'PEFT modules are saved in {train_config.output_dir} directory')
else:
print(f'PEFT modules are saved in {train_config.output_dir} directory')
else:
if ((not train_config.use_peft) and (fsdp_config.checkpoint_type == StateDictType.FULL_STATE_DICT)):
save_model_checkpoint(model, optimizer, rank, train_config, epoch=epoch)
elif ((not train_config.use_peft) and (fsdp_config.checkpoint_type == StateDictType.SHARDED_STATE_DICT)):
print(' Saving the FSDP model checkpoints using SHARDED_STATE_DICT')
print('')
save_model_and_optimizer_sharded(model, rank, train_config)
if train_config.save_optimizer:
save_model_and_optimizer_sharded(model, rank, train_config, optim=optimizer)
print(' Saving the FSDP model checkpoints and optimizer using SHARDED_STATE_DICT')
print('')
if ((not train_config.use_peft) and train_config.save_optimizer):
save_optimizer_checkpoint(model, optimizer, rank, train_config, epoch=epoch)
print(' Saving the FSDP model checkpoints and optimizer using FULL_STATE_DICT')
print('')
if train_config.enable_fsdp:
dist.barrier()
checkpoint_end_time = (time.perf_counter() - checkpoint_start_time)
checkpoint_times.append(checkpoint_end_time)
if (eval_epoch_loss < best_val_loss):
best_val_loss = eval_epoch_loss
if train_config.enable_fsdp:
if (rank == 0):
print(f'best eval loss on epoch {(epoch + 1)} is {best_val_loss}')
else:
print(f'best eval loss on epoch {(epoch + 1)} is {best_val_loss}')
val_loss.append(float(best_val_loss))
val_prep.append(float(eval_ppl))
if train_config.enable_fsdp:
if (rank == 0):
print(f'Epoch {(epoch + 1)}: train_perplexity={train_perplexity:.4f}, train_epoch_loss={train_epoch_loss:.4f}, epoch time {epoch_end_time}s')
else:
print(f'Epoch {(epoch + 1)}: train_perplexity={train_perplexity:.4f}, train_epoch_loss={train_epoch_loss:.4f}, epoch time {epoch_end_time}s')
if train_config.save_metrics:
save_to_json(metrics_filename, train_step_loss, train_loss, train_step_perplexity, train_prep, val_step_loss, val_loss, val_step_perplexity, val_prep)
avg_epoch_time = (sum(epoch_times) / len(epoch_times))
avg_checkpoint_time = ((sum(checkpoint_times) / len(checkpoint_times)) if (len(checkpoint_times) > 0) else 0)
avg_train_prep = (sum(train_prep) / len(train_prep))
avg_train_loss = (sum(train_loss) / len(train_loss))
if train_config.run_validation:
avg_eval_prep = (sum(val_prep) / len(val_prep))
avg_eval_loss = (sum(val_loss) / len(val_loss))
results['avg_train_prep'] = avg_train_prep
results['avg_train_loss'] = avg_train_loss
if train_config.run_validation:
results['avg_eval_prep'] = avg_eval_prep
results['avg_eval_loss'] = avg_eval_loss
results['avg_epoch_time'] = avg_epoch_time
results['avg_checkpoint_time'] = avg_checkpoint_time
if train_config.save_metrics:
results['metrics_filename'] = metrics_filename
if (train_config.enable_fsdp and (not train_config.use_peft)):
save_train_params(train_config, fsdp_config, rank)
return results |
def upgrade():
op.create_table('faq', sa.Column('id', sa.Integer(), nullable=False), sa.Column('question', sa.String(), nullable=False), sa.Column('answer', sa.String(), nullable=False), sa.Column('event_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['event_id'], ['events.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id')) |
def create_video(input_dir, pattern, output_filepath):
trans_to_white = "format=yuva444p, geq= 'if(lte(alpha(X,Y),16),255,p(X,Y))': 'if(lte(alpha(X,Y),16),128,p(X,Y))': 'if(lte(alpha(X,Y),16),128,p(X,Y))'"
import glob
for fi in glob.glob(f'{input_dir}/output_*.png'):
subprocess.run(['convert', '-flatten', fi, fi])
subprocess.run(['ffmpeg', '-i', f'{input_dir}/{pattern}', '-vcodec', 'libx264', '-vf', 'format=yuv420p', '-y', output_filepath]) |
.skipif((os.environ.get('TEST_SCOPE') == 'CICD-OPENSOURCE'), reason="Don't run on opensource cicd workflow")
class TestLoadKey():
.parametrize('provider', sorted(global_providers()))
def test_load_key_of_valid_provider(self, provider: str):
if (provider == 'faker'):
pytest.skip('unsupported provider')
data = load_key(provider, False)
assert (isinstance(data, dict) or isinstance(data, list)), f'No settings.json file found for {provider}' |
class CO2eqParametersDirect(BaseClasses.CO2eqParametersDirectAndLifecycleBase):
parameters = CO2EQ_PARAMETERS_DIRECT
ranges_by_mode: dict[(str, tuple[((int | float), (int | float))])] = {'coal': (500, 1600), 'gas': (200, 700), 'oil': (300, 1400), 'geothermal': (0, 199), 'hydro': (0, 0), 'nuclear': (0, 0), 'solar': (0, 0), 'wind': (0, 0), 'biomass': (0, 0), 'hydro charge': (0, 0), 'battery charge': (0, 0), 'battery discharge': (0, 1000), 'hydro discharge': (0, 1000), 'unknown': (0, 1000)} |
class SequenceSlicesTestCase(unittest.TestCase):
def test_single(self):
actual = common.sequence_slices(':10')
self.assertEqual([slice(None, 10)], actual)
def test_multiple(self):
actual = common.sequence_slices('1:10,3:20')
self.assertEqual([slice(0, 10), slice(2, 20)], actual) |
def _fetch_file(url):
info(('Downloading %s' % url))
for tries in range(4):
try:
return urlopen(url, timeout=5.0).read()
except Exception as e:
warning(('Error while fetching file: %s' % str(e)))
raise IOError(('Unable to download %r. Perhaps there is a no internet connection? If there is, please report this problem.' % url)) |
class UserMessage(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='user', verbose_name='')
to_user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='to_user', verbose_name='', blank=True, null=True)
message = models.TextField(verbose_name='')
ids = models.UUIDField(blank=True, null=True, verbose_name='id')
url = models.CharField(max_length=200, verbose_name='', blank=True, null=True)
is_supper = models.BooleanField(default=False, verbose_name='')
has_read = models.BooleanField(default=False, verbose_name='')
add_time = models.DateTimeField(default=datetime.now, verbose_name='')
def __str__(self):
return self.message
class Meta():
verbose_name = ''
verbose_name_plural = verbose_name
ordering = ('-add_time',) |
class OptionSeriesPieDataDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
_frequency(timedelta(days=1))
def fetch_exchange(zone_key1: ZoneKey, zone_key2: ZoneKey, session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list[dict]:
sorted_zone_keys = ZoneKey('->'.join(sorted([zone_key1, zone_key2])))
check_valid_parameters(sorted_zone_keys, session, target_datetime)
ses = (session or Session())
data = fetch_and_preprocess_data(EXCHANGE_FUNCTION_MAP[sorted_zone_keys], ses, logger, target_datetime)
exchangeList = ExchangeList(logger)
for event in data:
exchanges = {}
for key in event:
if (key in EXCHANGE_PARSE_MAPPING.keys()):
exchanges[EXCHANGE_PARSE_MAPPING[key]] = event[key]
net_flow: float
if (sorted_zone_keys == 'ES-IB-MA->ES-IB-ME'):
net_flow = ((- 1) * exchanges['ma_me'])
elif (sorted_zone_keys == 'ES-IB-IZ->ES-IB-MA'):
net_flow = exchanges['ma_ib']
elif (sorted_zone_keys == 'ES-IB-FO->ES-IB-IZ'):
net_flow = ((- 1) * exchanges['ib_fo'])
else:
net_flow = exchanges['pe_ma']
exchangeList.append(zoneKey=sorted_zone_keys, datetime=event['ts'], netFlow=net_flow, source='demanda.ree.es')
return exchangeList.to_list() |
def incompatibleBrowser(browsers: List[str]):
def decorator(func):
(func)
def decorated(*args, **kwargs):
print(('Warning: This function - %s - is not compatible with %s' % (func.__name__, ', '.join(browsers))))
return func(*args, **kwargs)
return decorated
return decorator |
class Solution():
def insert(self, intervals: List[List[int]], newInterval: List[int]) -> List[List[int]]:
for (i, interval) in enumerate(intervals):
if (interval[0] >= newInterval[0]):
intervals.insert(i, newInterval)
break
else:
intervals.append(newInterval)
ret = []
for interval in intervals:
if ((not ret) or (ret[(- 1)][1] < interval[0])):
ret.append(interval)
else:
ret[(- 1)] = [ret[(- 1)][0], max(ret[(- 1)][1], interval[1])]
return ret |
(scope='function')
def system_provide_service(db: Session) -> System:
system_provide_service = System.create(db=db, data={'fides_key': f'system_key-f{uuid4()}', 'name': f'system-{uuid4()}', 'description': 'fixture-made-system', 'organization_fides_key': 'default_organization', 'system_type': 'Service'})
PrivacyDeclaration.create(db=db, data={'name': 'The source service, system, or product being provided to the user', 'system_id': system_provide_service.id, 'data_categories': ['user.device.cookie_id'], 'data_use': 'essential.service', 'data_subjects': ['customer'], 'dataset_references': None, 'egress': None, 'ingress': None})
db.refresh(system_provide_service)
return system_provide_service |
class DnDWrapper():
_subst_format_dnd = ('%A', '%a', '%b', '%C', '%c', '{%CST}', '{%CTT}', '%D', '%e', '{%L}', '{%m}', '{%ST}', '%T', '{%t}', '{%TT}', '%W', '%X', '%Y')
_subst_format_str_dnd = ' '.join(_subst_format_dnd)
tkinter.BaseWidget._subst_format_dnd = _subst_format_dnd
tkinter.BaseWidget._subst_format_str_dnd = _subst_format_str_dnd
def _substitute_dnd(self, *args):
if (len(args) != len(self._subst_format_dnd)):
return args
def getint_event(s):
try:
return int(s)
except ValueError:
return s
def splitlist_event(s):
try:
return self.tk.splitlist(s)
except ValueError:
return s
(A, a, b, C, c, CST, CTT, D, e, L, m, ST, T, t, TT, W, X, Y) = args
ev = DnDEvent()
ev.action = A
ev.actions = splitlist_event(a)
ev.button = getint_event(b)
ev.code = C
ev.codes = splitlist_event(c)
ev.commonsourcetypes = splitlist_event(CST)
ev.commontargettypes = splitlist_event(CTT)
ev.data = D
ev.name = e
ev.types = splitlist_event(L)
ev.modifiers = splitlist_event(m)
ev.supportedsourcetypes = splitlist_event(ST)
ev.sourcetypes = splitlist_event(t)
ev.type = T
ev.supportedtargettypes = splitlist_event(TT)
try:
ev.widget = self.nametowidget(W)
except KeyError:
ev.widget = W
ev.x_root = getint_event(X)
ev.y_root = getint_event(Y)
return (ev,)
tkinter.BaseWidget._substitute_dnd = _substitute_dnd
def _dnd_bind(self, what, sequence, func, add, needcleanup=True):
if isinstance(func, str):
self.tk.call((what + (sequence, func)))
elif func:
funcid = self._register(func, self._substitute_dnd, needcleanup)
cmd = ('%s%s %s' % (((add and '+') or ''), funcid, self._subst_format_str_dnd))
self.tk.call((what + (sequence, cmd)))
return funcid
elif sequence:
return self.tk.call((what + (sequence,)))
else:
return self.tk.splitlist(self.tk.call(what))
tkinter.BaseWidget._dnd_bind = _dnd_bind
def dnd_bind(self, sequence=None, func=None, add=None):
return self._dnd_bind(('bind', self._w), sequence, func, add)
tkinter.BaseWidget.dnd_bind = dnd_bind
def drag_source_register(self, button=None, *dndtypes):
if (button is None):
button = 1
else:
try:
button = int(button)
except ValueError:
dndtypes = ((button,) + dndtypes)
button = 1
self.tk.call('tkdnd::drag_source', 'register', self._w, dndtypes, button)
tkinter.BaseWidget.drag_source_register = drag_source_register
def drag_source_unregister(self):
self.tk.call('tkdnd::drag_source', 'unregister', self._w)
tkinter.BaseWidget.drag_source_unregister = drag_source_unregister
def drop_target_register(self, *dndtypes):
self.tk.call('tkdnd::drop_target', 'register', self._w, dndtypes)
tkinter.BaseWidget.drop_target_register = drop_target_register
def drop_target_unregister(self):
self.tk.call('tkdnd::drop_target', 'unregister', self._w)
tkinter.BaseWidget.drop_target_unregister = drop_target_unregister
def platform_independent_types(self, *dndtypes):
return self.tk.split(self.tk.call('tkdnd::platform_independent_types', dndtypes))
tkinter.BaseWidget.platform_independent_types = platform_independent_types
def platform_specific_types(self, *dndtypes):
return self.tk.split(self.tk.call('tkdnd::platform_specific_types', dndtypes))
tkinter.BaseWidget.platform_specific_types = platform_specific_types
def get_dropfile_tempdir(self):
return self.tk.call('tkdnd::GetDropFileTempDirectory')
tkinter.BaseWidget.get_dropfile_tempdir = get_dropfile_tempdir
def set_dropfile_tempdir(self, tempdir):
self.tk.call('tkdnd::SetDropFileTempDirectory', tempdir)
tkinter.BaseWidget.set_dropfile_tempdir = set_dropfile_tempdir |
def extractAsterintheskyWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OperAccumulations(Source):
def __init__(self, *args, **kwargs):
request = {}
for a in args:
request.update(self.requests(**a))
request = self.requests(**kwargs)
param = request['param']
if (not isinstance(param, (list, tuple))):
param = [param]
for p in param:
assert (p in ['cp', 'lsp', 'tp']), p
user_dates = request['date']
user_times = request['time']
requests = {'oper': {'dates': set(), 'times': set()}, 'scda': {'dates': set(), 'times': set()}}
user_step = 6
requested = set()
for (user_date, user_time) in itertools.product(user_dates, user_times):
assert isinstance(user_date, datetime.datetime), (type(user_date), user_dates, user_times)
assert isinstance(user_time, int), (type(user_time), user_dates, user_times)
assert (user_time in [0, 6, 12, 18]), user_time
when = (user_date + datetime.timedelta(hours=user_time))
requested.add(when)
when -= datetime.timedelta(hours=user_step)
date = datetime.datetime(when.year, when.month, when.day)
time = when.hour
stream = {0: 'oper', 6: 'scda', 12: 'oper', 18: 'scda'}[time]
requests[stream]['dates'].add(date)
requests[stream]['times'].add(time)
print(requests)
dataset = dict(oper=cml.load_source('empty'), scda=cml.load_source('empty'))
for stream in ['oper', 'scda']:
dates = sorted(requests[stream]['dates'])
times = sorted(requests[stream]['times'])
if ((not dates) and (not times)):
continue
assert dates, (stream, dates, times)
oper_request = dict(**request)
oper_request.update({'class': 'od', 'type': 'fc', 'levtype': 'sfc', 'stream': stream, 'date': [d.strftime('%Y-%m-%d') for d in dates], 'time': sorted(times), 'step': user_step})
ds = cml.load_source('mars', **oper_request)
index = [(d.valid_datetime() in requested) for d in ds]
dataset[stream] = ds[index]
self.ds = (dataset['oper'] + dataset['scda'])
def mutate(self):
return self.ds
('date', 'date-list(datetime.datetime)')
('time', 'int-list')
('area', 'bounding-box(list)')
def requests(self, **kwargs):
result = dict(**kwargs)
return result |
class Batches():
batches: List[BatchObject]
def __iter__(self):
return iter(self.batches)
def __getitem__(self, i):
return self.batches[i]
def add(self, batch: BatchObject):
if (not isinstance(batch, BatchObject)):
raise ValueError('Batch must be BatchObject')
self.batches += [batch]
def set(self, batches: List[BatchObject]):
self.batches = batches |
def write_rows_to_csv(csv_path, rows, header):
directory_path = Path(csv_path).parent.resolve()
Path(directory_path).mkdir(parents=True, exist_ok=True)
with open(csv_path, 'w') as csv_file:
writer = csv.DictWriter(csv_file, fieldnames=header)
writer.writeheader()
writer.writerows(rows) |
class EpisodeList():
def __init__(self, eps=()):
self.list = []
self.title_set = set()
self.url_set = set()
for ep in eps:
self.add(ep)
def add(self, ep):
if (ep in self):
return False
self.list.append(ep)
self.url_set.add(ep.url)
self.title_set.add(ep.title)
return True
def __contains__(self, ep):
if (ep.url in self.url_set):
return True
if (ep.title in self.title_set):
return True
return False
def __iter__(self):
return iter(self.list)
def __len__(self):
return len(self.list)
def __reversed__(self):
return reversed(self.list) |
def run(*argv, expect_exit_code=0):
global tmp_dir
try:
prev_dir = os.getcwd()
if (tmp_dir is None):
os.chdir(base_dir)
else:
os.chdir(tmp_dir)
cmd = ' '.join((shlex.quote(x) for x in argv))
print(('$ ' + cmd))
exit_code = os.system(cmd)
if (exit_code != expect_exit_code):
print(('Command exited incorrectly: should have been %d was %d' % (expect_exit_code, exit_code)))
exit((exit_code or 1))
finally:
os.chdir(prev_dir) |
class TestVersionFilter(unittest.TestCase):
def test_filter(self):
self.assertEqual(filter_versions(['1.1'], []), ['1.1'])
self.assertEqual(filter_versions(['1.1', '1.2', '1.3'], [('>', '1.0'), ('<=', '1.4')]), ['1.1', '1.2', '1.3'])
self.assertEqual(filter_versions(['1.1', '1.2', '1.3'], [('<', '1.0')]), [])
self.assertEqual(filter_versions(['1.1', '1.2', '1.3'], [('<', '1.0'), ('==', '1.2')]), [])
self.assertEqual(filter_versions(['1.1', '1.2', '1.3'], [('==', '1.2')]), ['1.2'])
self.assertEqual(filter_versions(['1.1', '1.2', '1.3'], [('!=', '1.2')]), ['1.1', '1.3'])
self.assertEqual(filter_versions(['1.a', '1.b', '1.c'], [('>=', '1.b')]), ['1.b', '1.c'])
self.assertEqual(filter_versions(['3.1.0', '2.2.0', '1.12.0', 'start'], [('<', '2.0.0')]), ['1.12.0'])
def test_sort(self):
self.assertEqual(filter_versions(['1.1', '1.2', '1.3'], [], sort=True), ['1.1', '1.2', '1.3'])
self.assertEqual(filter_versions(['1.3', '1.2', '1.1'], [], sort=True), ['1.1', '1.2', '1.3'])
self.assertEqual(filter_versions(['1.c', '1.a', '1.b'], [], sort=True), ['1.a', '1.b', '1.c'])
def test_objects(self):
self.assertEqual(filter_versioned_items([('c', '1.1'), ('a', '1.3'), ('b', '1.2'), ('d', '1.0')], [('!=', FallbackVersion('1.2'))], to_version=(lambda o: FallbackVersion(o[1])), sort=True), [('d', '1.0'), ('c', '1.1'), ('a', '1.3')]) |
def query(__fct__=None, input_variables=None, is_async=True, calling_frame=None, **extra_args):
fct = __fct__
if (fct is None):
def wrapper(fct):
import inspect
calling_frame = inspect.stack()[1]
return query(fct, input_variables=input_variables, is_async=is_async, calling_frame=calling_frame, **extra_args)
return wrapper
import inspect
if (type(fct) is LMQLQueryFunction):
return fct
if (type(fct) is str):
return query_from_string(fct, input_variables, is_async=is_async, **extra_args)
else:
assert (input_variables is None), 'input_variables must be None when using as a decorator.'
calling_frame = (calling_frame or inspect.stack()[1])
scope = LMQLInputVariableScope(fct, calling_frame)
code = get_decorated_function_code(fct)
temp_lmql_file = tempfile.mktemp(suffix='.lmql')
with open(temp_lmql_file, 'w') as f:
f.write(code)
module = load(temp_lmql_file, output_writer=silent)
is_async = inspect.iscoroutinefunction(fct)
decorated_fct_signature = inspect.signature(fct)
compiled_query_fct_args = inspect.getfullargspec(module.query.fct).args
module.query.function_context = FunctionContext(decorated_fct_signature, compiled_query_fct_args, scope)
module.query.is_async = is_async
module.query.extra_args = extra_args
module.query.name = fct.__name__
(fct)
def lmql_query_wrapper(*args, **kwargs):
return module.query(*args, **kwargs)
for attr in ['aschain', 'lmql_code', 'is_async', 'output_variables']:
setattr(lmql_query_wrapper, attr, getattr(module.query, attr))
setattr(lmql_query_wrapper, '__lmql_query_function__', module.query)
return lmql_query_wrapper |
class SimpleEditor(Editor):
names = Property()
mapping = Property()
inverse_mapping = Property()
scrollable = True
def init(self, parent):
factory = self.factory
if (factory.name != ''):
(self._object, self._name, self._value) = self.parse_extended_name(factory.name)
self.values_changed()
self._object.on_trait_change(self._values_changed, self._name, dispatch='ui')
else:
self._value = (lambda : self.factory.values)
self.values_changed()
factory.on_trait_change(self._values_changed, 'values', dispatch='ui')
self.control = panel = TraitsUIPanel(parent, (- 1))
hsizer = wx.BoxSizer(wx.HORIZONTAL)
vsizer = wx.BoxSizer(wx.VERTICAL)
self._unused = self._create_listbox(panel, hsizer, self._on_unused, self._on_use, factory.left_column_title)
self._use_all = self._unuse_all = self._up = self._down = None
if factory.can_move_all:
self._use_all = self._create_button('>>', panel, vsizer, 15, self._on_use_all)
self._use = self._create_button('>', panel, vsizer, 15, self._on_use)
self._unuse = self._create_button('<', panel, vsizer, 0, self._on_unuse)
if factory.can_move_all:
self._unuse_all = self._create_button('<<', panel, vsizer, 15, self._on_unuse_all)
if factory.ordered:
self._up = self._create_button('Move Up', panel, vsizer, 30, self._on_up)
self._down = self._create_button('Move Down', panel, vsizer, 0, self._on_down)
hsizer.Add(vsizer, 0, (wx.LEFT | wx.RIGHT), 8)
self._used = self._create_listbox(panel, hsizer, self._on_value, self._on_unuse, factory.right_column_title)
panel.SetSizer(hsizer)
self.context_object.on_trait_change(self.update_editor, (self.extended_name + '_items?'), dispatch='ui')
self.set_tooltip()
def _get_names(self):
return self._names
def _get_mapping(self):
return self._mapping
def _get_inverse_mapping(self):
return self._inverse_mapping
def _create_listbox(self, parent, sizer, handler1, handler2, title):
column_sizer = wx.BoxSizer(wx.VERTICAL)
title_widget = wx.StaticText(parent, (- 1), title)
font = title_widget.GetFont()
emphasis_font = wx.Font((font.GetPointSize() + 1), font.GetFamily(), font.GetStyle(), wx.BOLD)
title_widget.SetFont(emphasis_font)
column_sizer.Add(title_widget, 0, 0)
list = wx.ListBox(parent, (- 1), style=(wx.LB_EXTENDED | wx.LB_NEEDED_SB))
column_sizer.Add(list, 1, wx.EXPAND)
sizer.Add(column_sizer, 1, wx.EXPAND)
parent.Bind(wx.EVT_LISTBOX, handler1, id=list.GetId())
parent.Bind(wx.EVT_LISTBOX_DCLICK, handler2, id=list.GetId())
return list
def _create_button(self, label, parent, sizer, space_before, handler):
button = wx.Button(parent, (- 1), label, style=wx.BU_EXACTFIT)
sizer.AddSpacer(space_before)
sizer.Add(button, 0, (wx.EXPAND | wx.BOTTOM), 8)
parent.Bind(wx.EVT_BUTTON, handler, id=button.GetId())
return button
def values_changed(self):
(self._names, self._mapping, self._inverse_mapping) = enum_values_changed(self._value(), self.string_value)
def _values_changed(self):
self.values_changed()
self.update_editor()
def update_editor(self):
mapping = self.inverse_mapping.copy()
values = [v for v in self.value if (v in mapping)]
if (len(values) < len(self.value)):
self.value = values
return
used = self._used
used_labels = self._get_selected_strings(used)
unused = self._unused
unused_labels = self._get_selected_strings(unused)
used.Clear()
unused.Clear()
if (not self.factory.ordered):
values = sorted(values[:])
used_selections = []
for (i, value) in enumerate(values):
label = mapping[value]
used.Append(label)
del mapping[value]
if (label in used_labels):
used_selections.append(i)
unused_selections = []
unused_items = sorted(mapping.values())
mapping = self.mapping
self._unused_items = [mapping[ui] for ui in unused_items]
for (i, unused_item) in enumerate(unused_items):
unused.Append(unused_item)
if (unused_item in unused_labels):
unused_selections.append(i)
if ((len(used_selections) == 0) and (len(unused_selections) == 0)):
if (unused.GetCount() == 0):
used_selections.append(0)
else:
unused_selections.append(0)
used_count = used.GetCount()
for i in used_selections:
if (i < used_count):
used.SetSelection(i)
unused_count = unused.GetCount()
for i in unused_selections:
if (i < unused_count):
unused.SetSelection(i)
self._check_up_down()
self._check_left_right()
def dispose(self):
if (self._object is not None):
self._object.on_trait_change(self._values_changed, self._name, remove=True)
else:
self.factory.on_trait_change(self._values_changed, 'values', remove=True)
self.context_object.on_trait_change(self.update_editor, (self.extended_name + '_items?'), remove=True)
super().dispose()
def get_error_control(self):
return [self._unused, self._used]
def _on_value(self, event):
if (not self.factory.ordered):
self._clear_selection(self._unused)
self._check_left_right()
self._check_up_down()
def _on_unused(self, event):
if (not self.factory.ordered):
self._clear_selection(self._used)
self._check_left_right()
self._check_up_down()
def _on_use(self, event):
(self._unused_items, self.value) = self._transfer_items(self._unused, self._used, self._unused_items, self.value)
def _on_unuse(self, event):
(self.value, self._unused_items) = self._transfer_items(self._used, self._unused, self.value, self._unused_items)
def _on_use_all(self, event):
(self._unused_items, self.value) = self._transfer_all(self._unused, self._used, self._unused_items, self.value)
def _on_unuse_all(self, event):
(self.value, self._unused_items) = self._transfer_all(self._used, self._unused, self.value, self._unused_items)
def _on_up(self, event):
self._move_item((- 1))
def _on_down(self, event):
self._move_item(1)
def _clear_selection(self, box):
for i in box.GetSelections():
box.Deselect(i)
def _transfer_all(self, list_from, list_to, values_from, values_to):
values_from = values_from[:]
values_to = values_to[:]
self._clear_selection(list_from)
while (list_from.GetCount() > 0):
index_to = list_to.GetCount()
list_from.SetSelection(0)
list_to.InsertItems(self._get_selected_strings(list_from), index_to)
list_from.Delete(0)
values_to.append(values_from[0])
del values_from[0]
list_to.SetSelection(0)
self._check_left_right()
self._check_up_down()
return (values_from, values_to)
def _transfer_items(self, list_from, list_to, values_from, values_to):
values_from = values_from[:]
values_to = values_to[:]
indices_from = list_from.GetSelections()
index_from = max(self._get_first_selection(list_from), 0)
index_to = max(self._get_first_selection(list_to), 0)
self._clear_selection(list_to)
selected_list = self._get_selected_strings(list_from)
selected_list.reverse()
list_to.InsertItems(selected_list, index_to)
for i in range((len(indices_from) - 1), (- 1), (- 1)):
list_from.Delete(indices_from[i])
for item_label in selected_list:
val_index_from = values_from.index(self.mapping[item_label])
values_to.insert(index_to, values_from[val_index_from])
del values_from[val_index_from]
if self.factory.ordered:
list_to.SetSelection(list_to.FindString(item_label))
count = list_from.GetCount()
if (count > 0):
if (index_from >= count):
index_from -= 1
list_from.SetSelection(index_from)
self._check_left_right()
self._check_up_down()
return (values_from, values_to)
def _move_item(self, direction):
listbox = self._used
index_from = self._get_first_selection(listbox)
index_to = (index_from + direction)
label = listbox.GetString(index_from)
listbox.Deselect(index_from)
listbox.Delete(index_from)
listbox.Insert(label, index_to)
listbox.SetSelection(index_to)
self._check_up_down()
value = self.value
if (direction < 0):
index = index_to
values = [value[index_from], value[index_to]]
else:
index = index_from
values = [value[index_to], value[index_from]]
self.value = ((value[:index] + values) + value[(index + 2):])
def _check_up_down(self):
if self.factory.ordered:
index_selected = self._used.GetSelections()
self._up.Enable(((len(index_selected) == 1) and (index_selected[0] > 0)))
self._down.Enable(((len(index_selected) == 1) and (index_selected[0] < (self._used.GetCount() - 1))))
def _check_left_right(self):
self._use.Enable(((self._unused.GetCount() > 0) and (self._get_first_selection(self._unused) >= 0)))
self._unuse.Enable(((self._used.GetCount() > 0) and (self._get_first_selection(self._used) >= 0)))
if self.factory.can_move_all:
self._use_all.Enable(((self._unused.GetCount() > 0) and (self._get_first_selection(self._unused) >= 0)))
self._unuse_all.Enable(((self._used.GetCount() > 0) and (self._get_first_selection(self._used) >= 0)))
def _get_selected_strings(self, listbox):
stringlist = []
for label_index in listbox.GetSelections():
stringlist.append(listbox.GetString(label_index))
return stringlist
def _get_first_selection(self, listbox):
select_list = listbox.GetSelections()
if (len(select_list) == 0):
return (- 1)
return select_list[0] |
.parametrize('test_input, include, exclude, expected', [([''], {}, {}, set([''])), ([''], {'status': ['suspended']}, {}, set()), (['', ''], {}, {}, set(['', ''])), (['', ''], {}, {'status': ['suspended']}, set([''])), (['', ''], {'env': ['dev']}, {}, set([''])), ([], {}, {}, set(['', '', ''])), ([], {'env': ['prod', 'nonprod']}, {}, set(['', ''])), ([], {'status': ['active', 'no_such_value']}, {}, set(['', ''])), ([], {'status': ['active']}, {'env': ['nonprod', 'dev']}, set([''])), ([], {'status': ['active']}, {'env': ['nonprod', 'dev', 'prod']}, set()), ([], {'status': ['active', 'suspended']}, {'status': ['active', 'no_such_value']}, set(['']))])
def test_meta_account_loader_accounts(many_acct_list, test_input, include, exclude, expected):
mal = acctload.MetaAccountLoader(many_acct_list)
accts = mal.accounts(acct_ids=test_input, include=include, exclude=exclude)
acct_ids = {a.id for a in accts}
assert (acct_ids == expected) |
class OptionSeriesLineSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def d2q_b(m0, m1, m2, n0, n1, n2):
x0 = (m0 - n0)
x1 = (x0 ** 2)
x2 = (m1 - n1)
x3 = (x2 ** 2)
x4 = (m2 - n2)
x5 = (x4 ** 2)
x6 = ((x1 + x3) + x5)
x7 = (1 / math.sqrt(x6))
x8 = (x6 ** ((- 3) / 2))
x9 = (- x0)
x10 = (x8 * x9)
x11 = ((x0 * x10) + x7)
x12 = (x10 * x2)
x13 = (x10 * x4)
x14 = (- x7)
x15 = (- x2)
x16 = (x10 * x15)
x17 = (- x4)
x18 = (x10 * x17)
x19 = (x15 * x8)
x20 = (x0 * x19)
x21 = ((x19 * x2) + x7)
x22 = (x19 * x4)
x23 = (x17 * x19)
x24 = (x17 * x8)
x25 = (x0 * x24)
x26 = (x2 * x24)
x27 = ((x24 * x4) + x7)
x28 = (x0 * x8)
x29 = (x2 * x28)
x30 = (x28 * x4)
x31 = ((x2 * x4) * x8)
return np.array([x11, x12, x13, (x14 + (x8 * (x9 ** 2))), x16, x18, x20, x21, x22, x16, (x14 + ((x15 ** 2) * x8)), x23, x25, x26, x27, x18, x23, (x14 + ((x17 ** 2) * x8)), ((x1 * x8) + x14), x29, x30, x11, x20, x25, x29, (x14 + (x3 * x8)), x31, x12, x21, x26, x30, x31, (x14 + (x5 * x8)), x13, x22, x27], dtype=np.float64) |
def test_compute_background_functional_truncate_zeros():
outfile = NamedTemporaryFile(suffix='.txt', delete=False)
outfile.close()
args = '--matrices {} {} --referencePoints {} -o {} -t {} --truncateZeros'.format((ROOT + 'FL-E13-5_chr1.cool'), (ROOT + 'MB-E10-5_chr1.cool'), (ROOT + 'referencePoints.bed'), outfile.name, 1).split()
compute(chicViewpointBackgroundModel.main, args, 5)
assert are_files_equal((ROOT + 'background_truncateZeros.txt'), outfile.name, delta=1000, skip=1) |
def parse_transform_binary(program: bytes, build: str='metadata') -> List[Tuple[(str, Union[(str, bytes, bool)])]]:
ENABLE_STEPS = [TransformStep.BASE64, TransformStep.BASE64URL, TransformStep.NETBIOS, TransformStep.NETBIOSU, TransformStep.URI_APPEND, TransformStep.PRINT, TransformStep.MASK]
ARGUMENT_STEPS = [TransformStep._HEADER, TransformStep.HEADER, TransformStep.PARAMETER, TransformStep._PARAMETER, TransformStep._HOSTHEADER, TransformStep.APPEND, TransformStep.PREPEND]
BUILD_MAP = {0: build, 1: 'output'}
tsteps: List[Tuple[(str, Union[(str, bytes, bool)])]] = []
p = io.BytesIO(program)
while True:
d = p.read(4)
value = u32be(d)
if ((len(d) != 4) or (value == 0)):
break
name = TransformStep.reverse.get(value, '')
step = getattr(TransformStep, name, None)
if (step is None):
raise IndexError('Unknown transform step for value: {}'.format(value))
elif (step == TransformStep.BUILD):
btype = u32be(p.read(4))
bvalue = BUILD_MAP.get(btype, 'UNKNOWN BUILD ARG')
tsteps.append((name, bvalue))
elif (step in ENABLE_STEPS):
tsteps.append((name, True))
elif (step in ARGUMENT_STEPS):
length = u32be(p.read(4))
arg = p.read(length)
tsteps.append((name, arg))
return tsteps |
def _create_data_sources(data, index_sort='none'):
if ((type(data) == ndarray) or (len(data) == 2)):
(index, value) = data
if (type(index) in (list, tuple, ndarray)):
index = ArrayDataSource(array(index), sort_order=index_sort)
elif (not isinstance(index, AbstractDataSource)):
raise RuntimeError(('Need an array or list of values or a DataSource, got %s instead.' % type(index)))
if (type(value) in (list, tuple, ndarray)):
value = ArrayDataSource(array(value))
elif (not isinstance(value, AbstractDataSource)):
raise RuntimeError(('Need an array or list of values or a DataSource, got %s instead.' % type(index)))
return (index, value)
else:
raise RuntimeError('Unable to create datasources.') |
def generate_c_inlined(forest, name, dtype='float', classifier=True):
(nodes, roots) = forest
cgen.assert_valid_identifier(name)
def is_leaf(n):
return (n[0] < 0)
def class_value(n):
assert is_leaf(n)
return n[1]
if classifier:
class_values = set(map(class_value, filter(is_leaf, nodes)))
assert (min(class_values) == 0)
n_classes = (max(class_values) + 1)
else:
n_classes = numpy.shape(forest[1])[0]
tree_names = [(name + '_tree_{}'.format(i)) for (i, _) in enumerate(roots)]
ctype = dtype
indent = 2
def c_leaf(n, depth):
return (((depth * indent) * ' ') + 'return {};'.format(n[1]))
def c_internal(n, depth):
f = '{indent}if (features[{feature}] < {value}) {{\n {left}\n {indent}}} else {{\n {right}\n {indent}}}'.format(**{'feature': n[0], 'value': cgen.constant(n[1], dtype=dtype), 'left': c_node(n[2], (depth + 1)), 'right': c_node(n[3], (depth + 1)), 'indent': ((depth * indent) * ' ')})
return f
def c_node(nid, depth):
n = nodes[nid]
if (n[0] < 0):
return c_leaf(n, (depth + 1))
return c_internal(n, (depth + 1))
def tree_func(name, root, return_type='int32_t'):
return 'static inline int32_t {function_name}(const {ctype} *features, int32_t features_length) {{\n {code}\n }}\n '.format(**{'function_name': name, 'code': c_node(root, 0), 'ctype': ctype, 'return_type': return_type})
def tree_vote_classifier(name):
return '_class = {}(features, features_length); votes[_class] += 1;'.format(name)
def tree_vote_regressor(name):
return 'avg += {}(features, features_length); '.format(name)
forest_regressor_func = 'float {function_name}(const {ctype} *features, int32_t features_length) {{\n\n float avg = 0;\n\n {tree_predictions}\n \n return avg/{n_classes};\n }}\n '.format(**{'function_name': (name + '_predict'), 'n_classes': n_classes, 'tree_predictions': '\n '.join([tree_vote_regressor(n) for n in tree_names]), 'ctype': ctype})
forest_classifier_func = 'int32_t {function_name}(const {ctype} *features, int32_t features_length) {{\n\n int32_t votes[{n_classes}] = {{0,}};\n int32_t _class = -1;\n\n {tree_predictions}\n \n int32_t most_voted_class = -1;\n int32_t most_voted_votes = 0;\n for (int32_t i=0; i<{n_classes}; i++) {{\n\n if (votes[i] > most_voted_votes) {{\n most_voted_class = i;\n most_voted_votes = votes[i];\n }}\n }}\n return most_voted_class;\n }}\n '.format(**{'function_name': (name + '_predict'), 'n_classes': n_classes, 'tree_predictions': '\n '.join([tree_vote_classifier(n) for n in tree_names]), 'ctype': ctype})
return_type = 'int32_t'
forest_func = forest_classifier_func
if (not classifier):
return_type = 'float'
forest_func = forest_regressor_func
tree_funcs = [tree_func(n, r, return_type=return_type) for (n, r) in zip(tree_names, roots)]
return '\n\n'.join((tree_funcs + [forest_func])) |
class OptionPlotoptionsScatterDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
def test_nested_sorting(db, client, admin_jwt):
OrderSubFactory(event__name='Shah', identifier='zxcv')
OrderSubFactory(event__name='Abu', identifier='abcde')
OrderSubFactory(event__name='Xerxes', identifier='fghj')
db.session.commit()
response = client.get('/v1/orders?sort=event.name', content_type='application/vnd.api+json', headers=admin_jwt)
orders = json.loads(response.data)['data']
assert (orders[0]['attributes']['identifier'] == 'abcde')
assert (orders[1]['attributes']['identifier'] == 'zxcv')
assert (orders[2]['attributes']['identifier'] == 'fghj')
response = client.get('/v1/orders?sort=-event.name', content_type='application/vnd.api+json', headers=admin_jwt)
orders = json.loads(response.data)['data']
assert (orders[0]['attributes']['identifier'] == 'fghj')
assert (orders[1]['attributes']['identifier'] == 'zxcv')
assert (orders[2]['attributes']['identifier'] == 'abcde')
response = client.get('/v1/orders?sort=identifier', content_type='application/vnd.api+json', headers=admin_jwt)
orders = json.loads(response.data)['data']
assert (orders[0]['attributes']['identifier'] == 'abcde')
assert (orders[1]['attributes']['identifier'] == 'fghj')
assert (orders[2]['attributes']['identifier'] == 'zxcv') |
def extractMakiTranslates(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def bulk_delete_users():
from firebase_admin import auth
result = auth.delete_users(['uid1', 'uid2', 'uid3'])
print('Successfully deleted {0} users'.format(result.success_count))
print('Failed to delete {0} users'.format(result.failure_count))
for err in result.errors:
print('error #{0}, reason: {1}'.format(result.index, result.reason)) |
class BZReader(Reader):
def __init__(self, source, path):
super().__init__(source, path)
def uncompress(target, _):
with open(target, 'wb') as g:
with bz2.open(path, 'rb') as f:
while True:
chunk = f.read((1024 * 1204))
if (not chunk):
break
g.write(chunk)
self.unzipped = self.cache_file(uncompress, dict(path=path))
def mutate(self):
print('mutare', self.source, self.unzipped)
return find_reader(self.source, self.unzipped) |
class QueryStub(object):
def __init__(self, channel):
self.Validators = channel.unary_unary('/cosmos.staking.v1beta1.Query/Validators', request_serializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryValidatorsRequest.SerializeToString, response_deserializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryValidatorsResponse.FromString)
self.Validator = channel.unary_unary('/cosmos.staking.v1beta1.Query/Validator', request_serializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryValidatorRequest.SerializeToString, response_deserializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryValidatorResponse.FromString)
self.ValidatorDelegations = channel.unary_unary('/cosmos.staking.v1beta1.Query/ValidatorDelegations', request_serializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryValidatorDelegationsRequest.SerializeToString, response_deserializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryValidatorDelegationsResponse.FromString)
self.ValidatorUnbondingDelegations = channel.unary_unary('/cosmos.staking.v1beta1.Query/ValidatorUnbondingDelegations', request_serializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryValidatorUnbondingDelegationsRequest.SerializeToString, response_deserializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryValidatorUnbondingDelegationsResponse.FromString)
self.Delegation = channel.unary_unary('/cosmos.staking.v1beta1.Query/Delegation', request_serializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryDelegationRequest.SerializeToString, response_deserializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryDelegationResponse.FromString)
self.UnbondingDelegation = channel.unary_unary('/cosmos.staking.v1beta1.Query/UnbondingDelegation', request_serializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryUnbondingDelegationRequest.SerializeToString, response_deserializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryUnbondingDelegationResponse.FromString)
self.DelegatorDelegations = channel.unary_unary('/cosmos.staking.v1beta1.Query/DelegatorDelegations', request_serializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryDelegatorDelegationsRequest.SerializeToString, response_deserializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryDelegatorDelegationsResponse.FromString)
self.DelegatorUnbondingDelegations = channel.unary_unary('/cosmos.staking.v1beta1.Query/DelegatorUnbondingDelegations', request_serializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryDelegatorUnbondingDelegationsRequest.SerializeToString, response_deserializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryDelegatorUnbondingDelegationsResponse.FromString)
self.Redelegations = channel.unary_unary('/cosmos.staking.v1beta1.Query/Redelegations', request_serializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryRedelegationsRequest.SerializeToString, response_deserializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryRedelegationsResponse.FromString)
self.DelegatorValidators = channel.unary_unary('/cosmos.staking.v1beta1.Query/DelegatorValidators', request_serializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryDelegatorValidatorsRequest.SerializeToString, response_deserializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryDelegatorValidatorsResponse.FromString)
self.DelegatorValidator = channel.unary_unary('/cosmos.staking.v1beta1.Query/DelegatorValidator', request_serializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryDelegatorValidatorRequest.SerializeToString, response_deserializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryDelegatorValidatorResponse.FromString)
self.HistoricalInfo = channel.unary_unary('/cosmos.staking.v1beta1.Query/HistoricalInfo', request_serializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryHistoricalInfoRequest.SerializeToString, response_deserializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryHistoricalInfoResponse.FromString)
self.Pool = channel.unary_unary('/cosmos.staking.v1beta1.Query/Pool', request_serializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryPoolRequest.SerializeToString, response_deserializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryPoolResponse.FromString)
self.Params = channel.unary_unary('/cosmos.staking.v1beta1.Query/Params', request_serializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryParamsRequest.SerializeToString, response_deserializer=cosmos_dot_staking_dot_v1beta1_dot_query__pb2.QueryParamsResponse.FromString) |
def extractRuiruitranslatesBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
(max_runs=5, min_passes=1)
.limit_memory('130 MB')
.integration_test
def test_memory_smoothing(poly_template):
ert_config = ErtConfig.from_file('poly.ert')
fill_storage_with_data(poly_template, ert_config)
with open_storage((poly_template / 'ensembles'), mode='w') as storage:
prior_ens = storage.get_ensemble_by_name('prior')
posterior_ens = storage.create_ensemble(prior_ens.experiment_id, ensemble_size=prior_ens.ensemble_size, iteration=1, name='posterior', prior_ensemble=prior_ens)
smoother_update(prior_ens, posterior_ens, str(uuid.uuid4()), UpdateConfiguration.global_update_step(list(ert_config.observations.keys()), list(ert_config.ensemble_config.parameters))) |
('Custom Placeholders > Custom Placeholder Details of Event Sub-topic > Custom Placeholder Details of Event Sub-topic')
def event_sub_topic_custom_placeholder_get_detail(transaction):
with stash['app'].app_context():
event_sub_topic = EventSubTopicFactory()
db.session.add(event_sub_topic)
custom_placeholder = CustomPlaceholderFactory(event_sub_topic_id=1)
db.session.add(custom_placeholder)
db.session.commit() |
class FirewallClientZoneSettings():
_exceptions
def __init__(self, settings=None):
self.settings = ['', '', '', False, DEFAULT_ZONE_TARGET, [], [], [], False, [], [], [], [], [], [], False, False, DEFAULT_ZONE_PRIORITY, DEFAULT_ZONE_PRIORITY]
self.settings_name = ['version', 'short', 'description', 'UNUSED', 'target', 'services', 'ports', 'icmp_blocks', 'masquerade', 'forward_ports', 'interfaces', 'sources', 'rules_str', 'protocols', 'source_ports', 'icmp_block_inversion', 'forward', 'ingress_priority', 'egress_priority']
self.settings_dbus_type = ['s', 's', 's', 'b', 's', 's', '(ss)', 's', 'b', '(ssss)', 's', 's', 's', 's', '(ss)', 'b', 'b', 'i', 'i']
if settings:
if isinstance(settings, list):
for (i, v) in enumerate(settings):
self.settings[i] = settings[i]
if isinstance(settings, dict):
self.setSettingsDict(settings)
_exceptions
def __repr__(self):
return ('%s(%r)' % (self.__class__, self.settings))
_exceptions
def getSettingsDict(self):
settings = {}
for (key, value) in zip(self.settings_name, self.settings):
if (key == 'UNUSED'):
continue
settings[key] = value
return settings
_exceptions
def setSettingsDict(self, settings):
for key in settings:
self.settings[self.settings_name.index(key)] = settings[key]
_exceptions
def getSettingsDbusDict(self):
settings = {}
for (key, value, sig) in zip(self.settings_name, self.settings, self.settings_dbus_type):
if (key == 'UNUSED'):
continue
if (type(value) is list):
settings[key] = dbus.Array(value, signature=sig)
elif (type(value) is dict):
settings[key] = dbus.Dictionary(value, signature=sig)
else:
settings[key] = value
return settings
_exceptions
def getRuntimeSettingsDict(self):
settings = self.getSettingsDict()
del settings['version']
del settings['short']
del settings['description']
del settings['target']
return settings
_exceptions
def getRuntimeSettingsDbusDict(self):
settings = self.getSettingsDbusDict()
del settings['version']
del settings['short']
del settings['description']
del settings['target']
return settings
_exceptions
def getVersion(self):
return self.settings[0]
_exceptions
def setVersion(self, version):
self.settings[0] = version
_exceptions
def getShort(self):
return self.settings[1]
_exceptions
def setShort(self, short):
self.settings[1] = short
_exceptions
def getDescription(self):
return self.settings[2]
_exceptions
def setDescription(self, description):
self.settings[2] = description
_exceptions
def getTarget(self):
return (self.settings[4] if (self.settings[4] != DEFAULT_ZONE_TARGET) else 'default')
_exceptions
def setTarget(self, target):
self.settings[4] = (target if (target != 'default') else DEFAULT_ZONE_TARGET)
_exceptions
def getServices(self):
return self.settings[5]
_exceptions
def setServices(self, services):
self.settings[5] = services
_exceptions
def addService(self, service):
if (service not in self.settings[5]):
self.settings[5].append(service)
else:
raise FirewallError(errors.ALREADY_ENABLED, service)
_exceptions
def removeService(self, service):
if (service in self.settings[5]):
self.settings[5].remove(service)
else:
raise FirewallError(errors.NOT_ENABLED, service)
_exceptions
def queryService(self, service):
return (service in self.settings[5])
_exceptions
def getPorts(self):
return self.settings[6]
_exceptions
def setPorts(self, ports):
self.settings[6] = ports
_exceptions
def addPort(self, port, protocol):
if ((port, protocol) not in self.settings[6]):
self.settings[6].append((port, protocol))
else:
raise FirewallError(errors.ALREADY_ENABLED, ("'%s:%s'" % (port, protocol)))
_exceptions
def removePort(self, port, protocol):
if ((port, protocol) in self.settings[6]):
self.settings[6].remove((port, protocol))
else:
raise FirewallError(errors.NOT_ENABLED, ("'%s:%s'" % (port, protocol)))
_exceptions
def queryPort(self, port, protocol):
return ((port, protocol) in self.settings[6])
_exceptions
def getProtocols(self):
return self.settings[13]
_exceptions
def setProtocols(self, protocols):
self.settings[13] = protocols
_exceptions
def addProtocol(self, protocol):
if (protocol not in self.settings[13]):
self.settings[13].append(protocol)
else:
raise FirewallError(errors.ALREADY_ENABLED, protocol)
_exceptions
def removeProtocol(self, protocol):
if (protocol in self.settings[13]):
self.settings[13].remove(protocol)
else:
raise FirewallError(errors.NOT_ENABLED, protocol)
_exceptions
def queryProtocol(self, protocol):
return (protocol in self.settings[13])
_exceptions
def getSourcePorts(self):
return self.settings[14]
_exceptions
def setSourcePorts(self, ports):
self.settings[14] = ports
_exceptions
def addSourcePort(self, port, protocol):
if ((port, protocol) not in self.settings[14]):
self.settings[14].append((port, protocol))
else:
raise FirewallError(errors.ALREADY_ENABLED, ("'%s:%s'" % (port, protocol)))
_exceptions
def removeSourcePort(self, port, protocol):
if ((port, protocol) in self.settings[14]):
self.settings[14].remove((port, protocol))
else:
raise FirewallError(errors.NOT_ENABLED, ("'%s:%s'" % (port, protocol)))
_exceptions
def querySourcePort(self, port, protocol):
return ((port, protocol) in self.settings[14])
_exceptions
def getIcmpBlocks(self):
return self.settings[7]
_exceptions
def setIcmpBlocks(self, icmpblocks):
self.settings[7] = icmpblocks
_exceptions
def addIcmpBlock(self, icmptype):
if (icmptype not in self.settings[7]):
self.settings[7].append(icmptype)
else:
raise FirewallError(errors.ALREADY_ENABLED, icmptype)
_exceptions
def removeIcmpBlock(self, icmptype):
if (icmptype in self.settings[7]):
self.settings[7].remove(icmptype)
else:
raise FirewallError(errors.NOT_ENABLED, icmptype)
_exceptions
def queryIcmpBlock(self, icmptype):
return (icmptype in self.settings[7])
_exceptions
def getIcmpBlockInversion(self):
return self.settings[15]
_exceptions
def setIcmpBlockInversion(self, flag):
self.settings[15] = flag
_exceptions
def addIcmpBlockInversion(self):
if (not self.settings[15]):
self.settings[15] = True
else:
raise FirewallError(errors.ALREADY_ENABLED, 'icmp-block-inversion')
_exceptions
def removeIcmpBlockInversion(self):
if self.settings[15]:
self.settings[15] = False
else:
raise FirewallError(errors.NOT_ENABLED, 'icmp-block-inversion')
_exceptions
def queryIcmpBlockInversion(self):
return self.settings[15]
_exceptions
def getForward(self):
return self.settings[16]
_exceptions
def setForward(self, forward):
self.settings[16] = forward
_exceptions
def addForward(self):
if (not self.settings[16]):
self.settings[16] = True
else:
raise FirewallError(errors.ALREADY_ENABLED, 'forward')
_exceptions
def removeForward(self):
if self.settings[16]:
self.settings[16] = False
else:
raise FirewallError(errors.NOT_ENABLED, 'forward')
_exceptions
def queryForward(self):
return self.settings[16]
_exceptions
def getMasquerade(self):
return self.settings[8]
_exceptions
def setMasquerade(self, masquerade):
self.settings[8] = masquerade
_exceptions
def addMasquerade(self):
if (not self.settings[8]):
self.settings[8] = True
else:
raise FirewallError(errors.ALREADY_ENABLED, 'masquerade')
_exceptions
def removeMasquerade(self):
if self.settings[8]:
self.settings[8] = False
else:
raise FirewallError(errors.NOT_ENABLED, 'masquerade')
_exceptions
def queryMasquerade(self):
return self.settings[8]
_exceptions
def getForwardPorts(self):
return self.settings[9]
_exceptions
def setForwardPorts(self, ports):
self.settings[9] = ports
_exceptions
def addForwardPort(self, port, protocol, to_port, to_addr):
if (to_port is None):
to_port = ''
if (to_addr is None):
to_addr = ''
if ((port, protocol, to_port, to_addr) not in self.settings[9]):
self.settings[9].append((port, protocol, to_port, to_addr))
else:
raise FirewallError(errors.ALREADY_ENABLED, ("'%s:%s:%s:%s'" % (port, protocol, to_port, to_addr)))
_exceptions
def removeForwardPort(self, port, protocol, to_port, to_addr):
if (to_port is None):
to_port = ''
if (to_addr is None):
to_addr = ''
if ((port, protocol, to_port, to_addr) in self.settings[9]):
self.settings[9].remove((port, protocol, to_port, to_addr))
else:
raise FirewallError(errors.NOT_ENABLED, ("'%s:%s:%s:%s'" % (port, protocol, to_port, to_addr)))
_exceptions
def queryForwardPort(self, port, protocol, to_port, to_addr):
if (to_port is None):
to_port = ''
if (to_addr is None):
to_addr = ''
return ((port, protocol, to_port, to_addr) in self.settings[9])
_exceptions
def getInterfaces(self):
return self.settings[10]
_exceptions
def setInterfaces(self, interfaces):
self.settings[10] = interfaces
_exceptions
def addInterface(self, interface):
if (interface not in self.settings[10]):
self.settings[10].append(interface)
else:
raise FirewallError(errors.ALREADY_ENABLED, interface)
_exceptions
def removeInterface(self, interface):
if (interface in self.settings[10]):
self.settings[10].remove(interface)
else:
raise FirewallError(errors.NOT_ENABLED, interface)
_exceptions
def queryInterface(self, interface):
return (interface in self.settings[10])
_exceptions
def getSources(self):
return self.settings[11]
_exceptions
def setSources(self, sources):
self.settings[11] = sources
_exceptions
def addSource(self, source):
if (source not in self.settings[11]):
self.settings[11].append(source)
else:
raise FirewallError(errors.ALREADY_ENABLED, source)
_exceptions
def removeSource(self, source):
if (source in self.settings[11]):
self.settings[11].remove(source)
else:
raise FirewallError(errors.NOT_ENABLED, source)
_exceptions
def querySource(self, source):
return (source in self.settings[11])
_exceptions
def getRichRules(self):
return self.settings[12]
_exceptions
def setRichRules(self, rules):
rules = [str(Rich_Rule(rule_str=r)) for r in rules]
self.settings[12] = rules
_exceptions
def addRichRule(self, rule):
rule = str(Rich_Rule(rule_str=rule))
if (rule not in self.settings[12]):
self.settings[12].append(rule)
else:
raise FirewallError(errors.ALREADY_ENABLED, rule)
_exceptions
def removeRichRule(self, rule):
rule = str(Rich_Rule(rule_str=rule))
if (rule in self.settings[12]):
self.settings[12].remove(rule)
else:
raise FirewallError(errors.NOT_ENABLED, rule)
_exceptions
def queryRichRule(self, rule):
rule = str(Rich_Rule(rule_str=rule))
return (rule in self.settings[12])
_exceptions
def getPriority(self):
if (self.getIngressPriority() != self.getEgressPriority()):
raise FirewallError(errors.INVALID_PRIORITY, 'Ingress and Egress priority mismatch. Use --get-ingress-priority and --get-egress-priority.')
return self.getIngressPriority()
_exceptions
def setPriority(self, priority):
self.setIngressPriority(priority)
self.setEgressPriority(priority)
_exceptions
def getIngressPriority(self):
return self.getSettingsDict()['ingress_priority']
_exceptions
def setIngressPriority(self, priority):
self.setSettingsDict({'ingress_priority': int(priority)})
_exceptions
def getEgressPriority(self):
return self.getSettingsDict()['egress_priority']
_exceptions
def setEgressPriority(self, priority):
self.setSettingsDict({'egress_priority': int(priority)}) |
class SQLAssistantAgent(ConversableAgent):
DEFAULT_SYSTEM_MESSAGE = "You are a SQL expert and answer user questions by writing SQL using the following data structures.\n Use the following data structure to write the best mysql SQL for the user's problem. \n Data Structure information:\n {data_structure}\n \n - Please ensure that the SQL is correct and high-performance.\n - Please be careful not to use tables or fields that are not mentioned.\n - Make sure to only return SQL.\n "
DEFAULT_DESCRIBE = 'You can analyze data with a known structure through SQL and generate a single analysis chart for a given target. Please note that you do not have the ability to obtain and process data and can only perform data analysis based on a given structure. If the task goal cannot or does not need to be solved by SQL analysis, please do not use'
NAME = 'SqlEngineer'
def __init__(self, memory: GptsMemory, agent_context: AgentContext, describe: Optional[str]=DEFAULT_DESCRIBE, is_termination_msg: Optional[Callable[([Dict], bool)]]=None, max_consecutive_auto_reply: Optional[int]=None, human_input_mode: Optional[str]='NEVER', **kwargs):
super().__init__(name=self.NAME, memory=memory, describe=describe, system_message=self.DEFAULT_SYSTEM_MESSAGE, is_termination_msg=is_termination_msg, max_consecutive_auto_reply=max_consecutive_auto_reply, human_input_mode=human_input_mode, agent_context=agent_context, **kwargs)
self.register_reply(Agent, SQLAssistantAgent.generate_analysis_chart_reply)
self.agent_context = agent_context
self.db_connect = CFG.LOCAL_DB_MANAGE.get_connect(self.agent_context.resource_db.get('name', None))
async def a_system_fill_param(self):
params = {'data_structure': self.db_connect.get_table_info(), 'disply_type': ApiCall.default_chart_type_promot(), 'dialect': self.db_connect.db_type}
self.update_system_message(self.DEFAULT_SYSTEM_MESSAGE.format(**params))
async def generate_analysis_chart_reply(self, message: Optional[str]=None, sender: Optional[Agent]=None, reviewer: Optional[Agent]=None, config: Optional[Union[(Dict, Literal[False])]]=None):
self.api_call = ApiCall(display_registry=[])
if self.api_call.check_have_plugin_call(message):
exit_success = True
try:
chart_vis = self.api_call.display_sql_llmvis(message, self.db_connect.run_to_df)
except Exception as e:
err_info = f'{str(e)}'
exit_success = False
output = (chart_vis if exit_success else err_info)
else:
exit_success = False
output = message
return (True, {'is_exe_success': exit_success, 'content': f'{output}'}) |
def delete_unknown_nodes(progress_controller=None):
if (progress_controller is None):
progress_controller = ProgressControllerBase()
progress_controller.minimum = 0
progress_controller.value = 0
unknown_nodes = mc.ls(type='unknown')
progress_controller.maximum = len(unknown_nodes)
for node in enumerate(unknown_nodes):
try:
mc.lockNode(node, lock=False)
except TypeError:
pass
progress_controller.increment()
if unknown_nodes:
mc.delete(unknown_nodes)
progress_controller.complete() |
def _detect_space(repo, branch=None, yes=False):
space = _resolve_rule(repo, branch)
if (space is None):
print('No space detected')
return None
print('Detected space {space}'.format(**locals()))
if (not yes):
run = input('Deploy to space {space} (enter "yes" to deploy)? > '.format(**locals()))
if (run.lower() not in ['y', 'yes']):
return None
return space |
class OptionPlotoptionsWordcloudSonificationDefaultspeechoptionsMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_historical_mean_return():
orig = [13., 3.]
l1 = np.array(range(1, 101)).astype(np.float64)
l2 = [((10 * 0.2) + (i * 0.25)) for i in range(21, 121)]
d = {'1': l1, '2': l2}
df = pd.DataFrame(d)
ret = historical_mean_return(df, freq=252)
assert (abs((ret['1'] - orig[0])) <= 1e-15)
assert (abs((ret['2'] - orig[1])) <= 1e-15) |
def main():
cap = EasyPySpin.VideoCapture(0)
if (not cap.isOpened()):
print("Camera can't open\nexit")
return (- 1)
cap.set(cv2.CAP_PROP_EXPOSURE, (- 1))
cap.set(cv2.CAP_PROP_GAIN, (- 1))
while True:
(ret, frame) = cap.read()
img_show = cv2.resize(frame, None, fx=0.25, fy=0.25)
cv2.imshow('press q to quit', img_show)
key = cv2.waitKey(30)
if (key == ord('q')):
break
cap.release()
cv2.destroyAllWindows() |
def test_matchable_resource():
cluster_resource_attrs = matchable_resource.ClusterResourceAttributes({'cpu': 'one million', 'gpu': 'just one'})
obj = matchable_resource.MatchingAttributes(cluster_resource_attributes=cluster_resource_attrs)
assert (obj.cluster_resource_attributes == cluster_resource_attrs)
assert (obj == matchable_resource.MatchingAttributes.from_flyte_idl(obj.to_flyte_idl()))
execution_queue_attributes = matchable_resource.ExecutionQueueAttributes(['foo', 'bar', 'baz'])
obj2 = matchable_resource.MatchingAttributes(execution_queue_attributes=execution_queue_attributes)
assert (obj2.execution_queue_attributes == execution_queue_attributes)
assert (obj2 == matchable_resource.MatchingAttributes.from_flyte_idl(obj2.to_flyte_idl()))
execution_cluster_label = matchable_resource.ExecutionClusterLabel('my_cluster')
obj2 = matchable_resource.MatchingAttributes(execution_cluster_label=execution_cluster_label)
assert (obj2.execution_cluster_label == execution_cluster_label)
assert (obj2 == matchable_resource.MatchingAttributes.from_flyte_idl(obj2.to_flyte_idl()))
plugin_overrides_obj = matchable_resource.PluginOverrides(overrides=[matchable_resource.PluginOverride('task_type', ['acceptable', 'override'], matchable_resource.PluginOverride.USE_DEFAULT)])
plugin_override_attributes = matchable_resource.MatchingAttributes(plugin_overrides=plugin_overrides_obj)
assert (plugin_override_attributes.plugin_overrides == plugin_overrides_obj)
assert (plugin_override_attributes == matchable_resource.MatchingAttributes.from_flyte_idl(plugin_override_attributes.to_flyte_idl())) |
def assign_color(index):
material = get_material(index)
if material:
rgb = get_color(index)
rgba = (rgb[0], rgb[1], rgb[2], 1)
if ((material.use_nodes and (bpy.context.scene.render.engine == 'CYCLES')) or ((bpy.context.scene.render.engine == 'BLENDER_EEVEE') and material.use_nodes)):
for n in material.node_tree.nodes:
if (n.bl_idname == 'ShaderNodeBsdfPrincipled'):
n.inputs[0].default_value = rgba
material.diffuse_color = rgba
elif ((bpy.context.scene.render.engine == 'BLENDER_EEVEE') and (not material.use_nodes)):
material.diffuse_color = rgba |
class OptionSeriesTreemapSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class FIS_DATA(FIS):
def __init__(self, packet=[0], direction='H2D'):
FIS.__init__(self, packet, fis_data_header.fields, direction)
self.type = fis_types['DATA']
def __repr__(self):
r = 'FIS_DATA\n'
r += FIS.__repr__(self)
for data in self.packet[1:]:
r += '{:08x}\n'.format(data)
return r |
class OptionSeriesDependencywheelDataEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
def batch_hip_to_root(source_dir, dest_dir, use_x=True, use_y=True, use_z=True, on_ground=True, use_rotation=True, scale=1.0, restoffset=(0, 0, 0), hipname='', fixbind=True, apply_rotation=True, apply_scale=False, b_remove_namespace=True, b_unreal_bones=False, add_leaf_bones=False, knee_offset=(0, 0, 0), ignore_leaf_bones=True, automatic_bone_orientation=True, quaternion_clean_pre=True, quaternion_clean_post=True, foot_bone_workaround=False):
source_dir = Path(source_dir)
dest_dir = Path(dest_dir)
bpy.context.scene.unit_settings.system = 'METRIC'
bpy.context.scene.unit_settings.scale_length = 1
numfiles = 0
for file in source_dir.iterdir():
if (not file.is_file()):
continue
file_ext = file.suffix
file_loader = {'.fbx': (lambda filename: bpy.ops.import_scene.fbx(filepath=str(filename), axis_forward='-Z', axis_up='Y', directory='', filter_glob='*.fbx', ui_tab='MAIN', use_manual_orientation=False, global_scale=1.0, bake_space_transform=False, use_custom_normals=True, use_image_search=True, use_alpha_decals=False, decal_offset=0.0, use_anim=True, anim_offset=1.0, use_custom_props=True, use_custom_props_enum_as_string=True, ignore_leaf_bones=ignore_leaf_bones, force_connect_children=False, automatic_bone_orientation=automatic_bone_orientation, primary_bone_axis='Y', secondary_bone_axis='X', use_prepost_rot=True)), '.dae': (lambda filename: bpy.ops.wm.collada_import(filepath=str(filename), filter_blender=False, filter_backup=False, filter_image=False, filter_movie=False, filter_python=False, filter_font=False, filter_sound=False, filter_text=False, filter_btx=False, filter_collada=True, filter_alembic=False, filter_folder=True, filter_blenlib=False, filemode=8, display_type='DEFAULT', sort_method='FILE_SORT_ALPHA', import_units=False, fix_orientation=True, find_chains=True, auto_connect=True, min_chain_length=0))}
if (not (file_ext in file_loader)):
continue
numfiles += 1
bpy.ops.object.select_all(action='SELECT')
bpy.ops.object.delete(use_global=True)
for mesh in bpy.data.meshes:
bpy.data.meshes.remove(mesh, do_unlink=True)
for material in bpy.data.materials:
bpy.data.materials.remove(material, do_unlink=True)
for action in bpy.data.actions:
bpy.data.actions.remove(action, do_unlink=True)
file_loader[file_ext](file)
if b_remove_namespace:
for obj in bpy.context.selected_objects:
remove_namespace(obj)
elif b_unreal_bones:
for obj in bpy.context.selected_objects:
rename_bones(obj, 'unreal')
def getArmature(objects):
for a in objects:
if (a.type == 'ARMATURE'):
return a
raise TypeError('No Armature found')
armature = getArmature(bpy.context.selected_objects)
try:
for step in hip_to_root(armature, use_x=use_x, use_y=use_y, use_z=use_z, on_ground=on_ground, use_rotation=use_rotation, scale=scale, restoffset=restoffset, hipname=hipname, fixbind=fixbind, apply_rotation=apply_rotation, apply_scale=apply_scale, quaternion_clean_pre=quaternion_clean_pre, quaternion_clean_post=quaternion_clean_post, foot_bone_workaround=foot_bone_workaround):
pass
except Exception as e:
log.error(('ERROR hip_to_root raised %s when processing %s' % (str(e), file.name)))
return (- 1)
if (Vector(knee_offset).length > 0.0):
apply_kneefix(armature, knee_offset, bonenames=bpy.context.scene.mixamo.knee_bones.split(','))
for action in bpy.data.actions:
if (action != armature.animation_data.action):
bpy.data.actions.remove(action, do_unlink=True)
output_file = dest_dir.joinpath((file.stem + '.fbx'))
bpy.ops.export_scene.fbx(filepath=str(output_file), use_selection=False, apply_unit_scale=False, add_leaf_bones=add_leaf_bones, axis_forward='-Z', axis_up='Y', mesh_smooth_type='FACE')
bpy.ops.object.select_all(action='SELECT')
bpy.ops.object.delete(use_global=False)
return numfiles |
def decode_auth_plain(ciphertext: bytes, privkey: datatypes.PrivateKey) -> Tuple[(datatypes.Signature, datatypes.PublicKey, bytes, int)]:
message = ecies.decrypt(ciphertext, privkey)
if (len(message) != AUTH_MSG_LEN):
raise BadAckMessage(f'Unexpected size for auth message: {len(message)}')
signature = keys.Signature(signature_bytes=message[:SIGNATURE_LEN])
pubkey_start = (SIGNATURE_LEN + HASH_LEN)
pubkey = keys.PublicKey(message[pubkey_start:(pubkey_start + PUBKEY_LEN)])
nonce_start = (pubkey_start + PUBKEY_LEN)
nonce = message[nonce_start:(nonce_start + HASH_LEN)]
return (signature, pubkey, nonce, DEVP2P_V4) |
class PerformanceAPI(ABC):
total_msgs: int
total_items: int
total_timeouts: int
total_response_time: float
response_quality_ema: EMA
round_trip_ema: EMA
round_trip_99th: Percentile
round_trip_stddev: StandardDeviation
items_per_second_ema: EMA
def get_stats(self) -> str:
... |
class ColumnRange(Iterator):
def __init__(self, i, j=None, include_end=True, reverse=False):
self.incr = 1
if (j is None):
self.start = column_index_to_integer('A')
self.end = column_index_to_integer(i)
else:
self.start = column_index_to_integer(i)
self.end = column_index_to_integer(j)
if reverse:
(self.end, self.start) = (self.start, self.end)
self.incr = (- 1)
self.column = (self.start - self.incr)
if include_end:
self.end += self.incr
def next(self):
return self.__next__()
def __next__(self):
self.column = (self.column + self.incr)
if (self.column == self.end):
raise StopIteration
return column_integer_to_index(self.column) |
class RequestManager():
logger = logging.getLogger('web3.RequestManager')
middleware_onion: Union[(MiddlewareOnion, AsyncMiddlewareOnion, NamedElementOnion[(None, None)])]
def __init__(self, w3: Union[('AsyncWeb3', 'Web3')], provider: Optional[Union[('BaseProvider', 'AsyncBaseProvider')]]=None, middlewares: Optional[Union[(Sequence[Tuple[(Middleware, str)]], Sequence[Tuple[(AsyncMiddleware, str)]])]]=None) -> None:
self.w3 = w3
if (provider is None):
self.provider = AutoProvider()
else:
self.provider = provider
if (middlewares is None):
middlewares = (self.async_default_middlewares() if self.provider.is_async else self.default_middlewares(cast('Web3', w3)))
self.middleware_onion = NamedElementOnion(middlewares)
if isinstance(provider, PersistentConnectionProvider):
provider = cast(PersistentConnectionProvider, self.provider)
self._request_processor: RequestProcessor = provider._request_processor
w3: Union[('AsyncWeb3', 'Web3')] = None
_provider = None
def provider(self) -> Union[('BaseProvider', 'AsyncBaseProvider')]:
return self._provider
def provider(self, provider: Union[('BaseProvider', 'AsyncBaseProvider')]) -> None:
self._provider = provider
def default_middlewares(w3: 'Web3') -> List[Tuple[(Middleware, str)]]:
return [(gas_price_strategy_middleware, 'gas_price_strategy'), (name_to_address_middleware(w3), 'name_to_address'), (attrdict_middleware, 'attrdict'), (validation_middleware, 'validation'), (abi_middleware, 'abi'), (buffered_gas_estimate_middleware, 'gas_estimate')]
def async_default_middlewares() -> List[Tuple[(AsyncMiddleware, str)]]:
return [(async_gas_price_strategy_middleware, 'gas_price_strategy'), (async_name_to_address_middleware, 'name_to_address'), (async_attrdict_middleware, 'attrdict'), (async_validation_middleware, 'validation'), (async_buffered_gas_estimate_middleware, 'gas_estimate')]
def _make_request(self, method: Union[(RPCEndpoint, Callable[(..., RPCEndpoint)])], params: Any) -> RPCResponse:
provider = cast('BaseProvider', self.provider)
request_func = provider.request_func(cast('Web3', self.w3), cast(MiddlewareOnion, self.middleware_onion))
self.logger.debug(f'Making request. Method: {method}')
return request_func(method, params)
async def _coro_make_request(self, method: Union[(RPCEndpoint, Callable[(..., RPCEndpoint)])], params: Any) -> RPCResponse:
provider = cast('AsyncBaseProvider', self.provider)
request_func = (await provider.request_func(cast('AsyncWeb3', self.w3), cast(AsyncMiddlewareOnion, self.middleware_onion)))
self.logger.debug(f'Making request. Method: {method}')
return (await request_func(method, params))
def formatted_response(response: RPCResponse, params: Any, error_formatters: Optional[Callable[(..., Any)]]=None, null_result_formatters: Optional[Callable[(..., Any)]]=None) -> Any:
if (('jsonrpc' in response) and (response['jsonrpc'] != '2.0')):
_raise_bad_response_format(response, 'The "jsonrpc" field must be present with a value of "2.0"')
if ('id' in response):
response_id = response['id']
if ((response_id is None) and ('error' not in response)):
_raise_bad_response_format(response, '"id" must be None when an error is present')
elif (not isinstance(response_id, (str, int, type(None)))):
_raise_bad_response_format(response, '"id" must be a string or integer')
if (('error' in response) and ('result' in response)):
_raise_bad_response_format(response, 'Response cannot include both "error" and "result"')
elif ('error' in response):
error = response.get('error')
if ((error is None) or isinstance(error, str)):
raise ValueError(error)
code = error.get('code')
if (not isinstance(code, int)):
_raise_bad_response_format(response, "error['code'] must be an integer")
elif (code == METHOD_NOT_FOUND):
raise MethodUnavailable(error)
if (not isinstance(error.get('message'), str)):
_raise_bad_response_format(response, "error['message'] must be a string")
apply_error_formatters(error_formatters, response)
raise ValueError(error)
elif ('result' in response):
if (response.get('result', False) in NULL_RESPONSES):
apply_null_result_formatters(null_result_formatters, response, params)
return response.get('result')
elif ((response.get('method') == 'eth_subscription') and (response.get('params') is not None) and (response['params'].get('subscription') is not None) and (response['params'].get('result') is not None)):
return {'subscription': response['params']['subscription'], 'result': response['params']['result']}
else:
_raise_bad_response_format(response)
def request_blocking(self, method: Union[(RPCEndpoint, Callable[(..., RPCEndpoint)])], params: Any, error_formatters: Optional[Callable[(..., Any)]]=None, null_result_formatters: Optional[Callable[(..., Any)]]=None) -> Any:
response = self._make_request(method, params)
return self.formatted_response(response, params, error_formatters, null_result_formatters)
async def coro_request(self, method: Union[(RPCEndpoint, Callable[(..., RPCEndpoint)])], params: Any, error_formatters: Optional[Callable[(..., Any)]]=None, null_result_formatters: Optional[Callable[(..., Any)]]=None) -> Any:
response = (await self._coro_make_request(method, params))
return self.formatted_response(response, params, error_formatters, null_result_formatters)
async def ws_send(self, method: RPCEndpoint, params: Any) -> RPCResponse:
provider = cast(PersistentConnectionProvider, self._provider)
request_func = (await provider.request_func(cast('AsyncWeb3', self.w3), cast(AsyncMiddlewareOnion, self.middleware_onion)))
self.logger.debug(f'Making request to open websocket connection - uri: {provider.endpoint_uri}, method: {method}')
response = (await request_func(method, params))
return (await self._process_ws_response(response))
async def ws_recv(self) -> Any:
return (await self._ws_recv_stream().__anext__())
def _persistent_recv_stream(self) -> '_AsyncPersistentRecvStream':
return _AsyncPersistentRecvStream(self)
async def _ws_recv_stream(self) -> AsyncGenerator[(RPCResponse, None)]:
if (not isinstance(self._provider, PersistentConnectionProvider)):
raise TypeError('Only websocket providers that maintain an open, persistent connection can listen to websocket recv streams.')
while True:
(await asyncio.sleep(0))
response = self._request_processor.pop_raw_response(subscription=True)
if (response is not None):
break
elif (not self._provider._ws_lock.locked()):
async with self._provider._ws_lock:
try:
response = (await self._provider._ws_recv(timeout=0.5))
except asyncio.TimeoutError:
continue
if (response.get('method') == 'eth_subscription'):
break
else:
self._provider._request_processor.cache_raw_response(response)
(yield (await self._process_ws_response(response)))
async def _process_ws_response(self, response: RPCResponse) -> RPCResponse:
provider = cast(PersistentConnectionProvider, self._provider)
request_info = self._request_processor.get_request_information_for_response(response)
if (request_info is None):
self.logger.debug('No cache key found for response, returning raw response')
return response
else:
if ((request_info.method == 'eth_subscribe') and ('result' in response.keys())):
subscription_id = response['result']
cache_key = generate_cache_key(subscription_id)
if (cache_key not in self._request_processor._request_information_cache):
request_info.subscription_id = subscription_id
provider.logger.debug(f'''Caching eth_subscription info:
cache_key={cache_key},
request_info={request_info.__dict__}''')
self._request_processor._request_information_cache.cache(cache_key, request_info)
if (len(request_info.middleware_response_processors) > 0):
response = pipe(response, *request_info.middleware_response_processors)
(result_formatters, error_formatters, null_formatters) = request_info.response_formatters
partly_formatted_response = self.formatted_response(response, request_info.params, error_formatters, null_formatters)
return apply_result_formatters(result_formatters, partly_formatted_response) |
class PHYSource(Module):
def __init__(self):
self.source = stream.Endpoint(phy_description(32))
self.dword = PHYDword()
def send(self, dword):
self.dword = dword
def generator(self):
while True:
(yield self.source.valid.eq(1))
(yield self.source.charisk.eq(0))
for (k, v) in primitives.items():
if (v == self.dword.dat):
(yield self.source.charisk.eq(1))
(yield self.source.data.eq(self.dword.dat))
(yield) |
def test_parseInline():
md = MarkdownIt()
tokens = md.parseInline('abc\n\n> xyz')
assert (tokens == [Token(type='inline', tag='', nesting=0, attrs={}, map=[0, 1], level=0, children=[Token(type='text', tag='', nesting=0, attrs={}, map=None, level=0, children=None, content='abc', markup='', info='', meta={}, block=False, hidden=False), Token(type='softbreak', tag='br', nesting=0, attrs={}, map=None, level=0, children=None, content='', markup='', info='', meta={}, block=False, hidden=False), Token(type='softbreak', tag='br', nesting=0, attrs={}, map=None, level=0, children=None, content='', markup='', info='', meta={}, block=False, hidden=False), Token(type='text', tag='', nesting=0, attrs={}, map=None, level=0, children=None, content='> xyz', markup='', info='', meta={}, block=False, hidden=False)], content='abc\n\n> xyz', markup='', info='', meta={}, block=False, hidden=False)]) |
def Run(params):
config = params.config
join = os.path.join
temp_dir_name = '.mu.diff.git.tmp'
if os.path.exists(temp_dir_name):
n = ''
while (n not in ('y', 'n')):
n = raw_input(('Temporary dir for diff: %s already exists. Delete and continue (y) or cancel (n)? ' % (temp_dir_name,))).strip().lower()
if (n == 'y'):
RmTree(temp_dir_name)
break
if (n == 'n'):
Print('Canceling diff action.')
return
temp_working = join(temp_dir_name, 'WORKING')
temp_repo = join(temp_dir_name, 'REPO')
os.mkdir(temp_dir_name)
os.mkdir(temp_working)
os.mkdir(temp_repo)
from mu_repo import keep_files_synched
def symlink(src, target):
if os.path.isdir(src):
if os.path.exists(target):
os.rmdir(target)
shutil.copytree(src, target)
keep_files_synched.KeepInSync(src, target)
else:
if os.path.exists(target):
if os.path.isdir(target):
RmTree(target)
else:
os.remove(target)
shutil.copyfile(src, target)
keep_files_synched.KeepInSync(src, target)
try:
args = params.args
branch = ''
repos_and_curr_branch = None
if (len(args) > 1):
branch = args[1]
if (branch == '--prev'):
repos_and_curr_branch = dict(GetReposAndCurrBranch(params, verbose=False))
threads = []
for repo in config.repos:
if (repos_and_curr_branch is not None):
branch = (repos_and_curr_branch[repo] + '{1}')
thread = DoDiffOnRepoThread(config, repo, symlink, temp_working, temp_repo, branch)
else:
thread = DoDiffOnRepoThread(config, repo, symlink, temp_working, temp_repo, branch)
threads.append(thread)
thread.start()
for thread in threads:
thread.join()
thread_pool.Join()
for thread in threads:
if (thread.entry_count != 0):
break
else:
Print('No changes found.')
return
if (sys.platform == 'win32'):
write_left = ['/wl']
if (not branch):
write_left = []
winmerge_cmd = 'WinMergeU.exe /r /u /wr /dl WORKINGCOPY /dr HEAD'.split()
cmd = ((winmerge_cmd + write_left) + [temp_working, temp_repo])
try:
subprocess.call(cmd)
except:
Print(('Error calling: %s' % (' '.join(cmd),)))
else:
write_left = ['-leftreadonly']
if (not branch):
write_left = []
cmd = (['bcomp', temp_working, temp_repo, '-expandall'] + write_left)
try:
subprocess.call(cmd)
except:
cmd = ['meld', temp_working, temp_repo]
try:
subprocess.call(cmd)
except:
Print(('Error calling: %s' % (' '.join(cmd),)))
finally:
if (keep_files_synched is not None):
keep_files_synched.StopSyncs()
def onerror(*args):
Print(('Error removing temporary directory structure: %s' % (args,)))
RmTree(temp_dir_name, onerror=onerror) |
def test_building_shared_agent_policies():
env = build_dummy_structured_env()
model_config = _dummy_model_config()
policy = _dummy_policy_for(model_config, env, agent_counts_dict={0: 1, 1: 3})
assert (len(policy.networks) == 2)
assert ([0, 1] == list(policy.networks.keys()))
assert isinstance(policy.network_for(actor_id=ActorID(0, 0)), DummyPolicyNet)
assert isinstance(policy.network_for(actor_id=ActorID(1, 0)), DummyPolicyNet) |
.parametrize('expr', ['revert()', 'require(false)'])
.parametrize('func', REVERT_FUNCTIONS_NO_INPUT)
def test_final_stmt_revert_no_input_no_msg(console_mode, evmtester, accounts, expr, func):
func = func.format(expr)
code = f'''
pragma solidity >=0.4.22;
contract Foo {{
{func}
}}
'''
contract = compile_source(code).Foo.deploy({'from': accounts[0]})
tx = contract.foo()
assert (tx.revert_msg == 'dev: yuss')
assert (tx.dev_revert_msg == 'dev: yuss') |
class ReferenceSegmenterModelTrainingDataGenerator(AbstractDocumentModelTrainingDataGenerator):
def get_main_model(self, document_context: TrainingDataDocumentContext) -> Model:
return document_context.fulltext_models.reference_segmenter_model
def iter_model_layout_documents(self, layout_document: LayoutDocument, document_context: TrainingDataDocumentContext) -> Iterable[LayoutDocument]:
segmentation_label_result = get_segmentation_label_result(layout_document, document_context=document_context)
ref_layout_document = segmentation_label_result.get_filtered_document_by_label('<references>').remove_empty_blocks()
if (not ref_layout_document.pages):
return []
return [ref_layout_document] |
class TestObserverGraph(unittest.TestCase):
def test_equality(self):
graph1 = graph_from_nodes(1, 2, 3)
graph2 = graph_from_nodes(1, 2, 3)
self.assertEqual(graph1, graph2)
self.assertEqual(hash(graph1), hash(graph2))
def test_equality_different_type(self):
graph1 = graph_from_nodes(1, 2, 3)
self.assertNotEqual(graph1, 1)
def test_equality_different_length_children(self):
graph1 = ObserverGraph(node=1, children=[ObserverGraph(node=2), ObserverGraph(node=3)])
graph2 = ObserverGraph(node=1, children=[ObserverGraph(node=2)])
self.assertNotEqual(graph1, graph2)
def test_equality_order_of_children(self):
graph1 = ObserverGraph(node=1, children=[ObserverGraph(node=2), ObserverGraph(node=3)])
graph2 = ObserverGraph(node=1, children=[ObserverGraph(node=3), ObserverGraph(node=2)])
self.assertEqual(graph1, graph2)
self.assertEqual(hash(graph1), hash(graph2))
def test_children_ordered(self):
child_graph = ObserverGraph(node=2)
graph = ObserverGraph(node=1, children=[child_graph, ObserverGraph(node=3)])
self.assertIs(graph.children[0], child_graph)
def test_children_unique(self):
child_graph = ObserverGraph(node=2)
with self.assertRaises(ValueError) as exception_cm:
ObserverGraph(node=1, children=[child_graph, ObserverGraph(node=2)])
self.assertEqual(str(exception_cm.exception), 'Not all children are unique.')
def test_slots(self):
graph = ObserverGraph(node=1)
with self.assertRaises(AttributeError):
graph.__dict__
with self.assertRaises(AttributeError):
graph.__weakref__
def test_eval_repr_roundtrip(self):
graph = ObserverGraph(node=1, children=[ObserverGraph(node=2), ObserverGraph(node=3)])
self.assertEqual(eval(repr(graph)), graph) |
def save_funny_files(meta_data, name, id, paths, obj, uri):
meta = TaskMetaData(name, paths[1].split('.')[0])
meta.add_output_meta_data(meta_data)
file_path = '/tmp/dagster/{id}/{id}_{step}_task_meta_data.json'.format(id=id, step=paths[1].split('.')[0])
meta_data_dic = meta_data.to_dict()
meta_dic = meta.to_dict()
if (name == 'CategoryEncoding'):
meta_data_dic['dtypes'].update({'mappings': obj[1][1].mapping})
meta_dic['outputs'][0]['dtypes'].update({'mappings': obj[1][1].mapping})
if os.path.isfile(file_path):
meta_data.output_sequence = 2
with open(file_path, 'r+') as meta_file:
dat = json.load(meta_file)
meta_file.seek(0)
dat['outputs'].append(meta_data_dic)
json.dump(dat, meta_file)
meta_file.truncate()
else:
with open(file_path, 'w+') as meta_file:
meta_file.write(json.dumps(meta_dic, default=str)) |
class FeatureLibErrorTest(unittest.TestCase):
def test_str(self):
err = FeatureLibError('Squeak!', FeatureLibLocation('foo.fea', 23, 42))
self.assertEqual(str(err), 'foo.fea:23:42: Squeak!')
def test_str_nolocation(self):
err = FeatureLibError('Squeak!', None)
self.assertEqual(str(err), 'Squeak!') |
class TraceSymbolTableTestCase(unittest.TestCase):
def setUp(self) -> None:
self.symbols_1: List[str] = ['a', 'b', 'c', 'b1']
self.symbols_2: List[str] = ['a', 'b', 'c', 'b2', 'd1']
self.symbols_3: List[str] = ['a', 'b', 'f', 'b3', 'd2']
ss: Set[str] = set()
self.symbols_list = [self.symbols_1, self.symbols_2, self.symbols_3]
for s in self.symbols_list:
ss = ss.union(set(s))
self.symbols: List[str] = sorted(list(ss))
def test_add_symbols_single_process(self):
st = TraceSymbolTable()
for symbols in [self.symbols_1, self.symbols_2, self.symbols_3]:
st.add_symbols(symbols)
self.assertListEqual(sorted(st.get_sym_table()), self.symbols)
self.assertTrue(check_symbol_table(st))
def test_add_symbols_multi_processing(self):
st = TraceSymbolTable()
st.add_symbols_mp(self.symbols_list)
self.assertListEqual(sorted(st.get_sym_table()), self.symbols)
self.assertTrue(check_symbol_table(st))
def test_query_symbols_multi_processes(self):
st = TraceSymbolTable()
st.add_symbols_mp(self.symbols_list)
decoder = SymbolDecoder(st)
indices = [i for (i, _) in enumerate(st.get_sym_table())]
np = 4
with mp.Pool(np) as pool:
decoded_symbols = pool.map(decoder, indices)
pool.join()
pool.close()
self.assertEqual(len(decoded_symbols), len(self.symbols))
sym_id_map = st.get_sym_id_map()
is_consistent = [(sym_id_map[sym] == idx) for (sym, idx) in zip(decoded_symbols, indices)]
self.assertTrue(all(is_consistent)) |
_member_required
def view_tag_d3(request, tag=None):
nodes = {}
links = []
def add_node(n):
nodes[n.uuid] = {'id': n.data, 'uuid': str(n.uuid), 'group': None}
if tag:
try:
u = uuid_lib.UUID(tag)
tag_m = get_object_or_404(TextMetadata, uuid=u)
tag = tag_m.data
except ValueError:
tag_m = get_object_or_404(TextMetadata, name=TAG_NAME, data=tag)
max_depth = 3
else:
tag_m = TextMetadata.objects.all().filter(name=TAG_NAME).first()
max_depth = 9999999
visited = set()
group = 0
def BFS(tag_m, visited, depth):
if (tag_m.uuid in visited):
return
add_node(tag_m)
visited.add(tag_m.uuid)
nodes[tag_m.uuid]['group'] = group
if (depth == max_depth):
return
siblings = tag_m.documents.all().filter(document__text_metadata__metadata__name=TAG_NAME).values_list('document__text_metadata__metadata__uuid', flat=True)
siblings = [s for s in siblings if (s not in visited)]
siblings = TextMetadata.objects.all().filter(uuid__in=siblings).all()
for s in siblings:
BFS(s, visited, (depth + 1))
links.append({'source': nodes[tag_m.uuid]['id'], 'target': nodes[s.uuid]['id'], 'value': 2})
return
if (tag_m is not None):
BFS(tag_m, visited, 0)
group += 1
if (tag is None):
orphans = TextMetadata.objects.all().filter(name=TAG_NAME).exclude(uuid__in=visited)
for orph in orphans:
BFS(orph, visited, 0)
group += 1
context = {'tag': tag, 'add_document_form': AddDocument(), 'node_data': {'nodes': [nodes[k] for k in nodes], 'links': links}}
template = loader.get_template('tag_d3.html')
return HttpResponse(template.render(context, request)) |
.django_db(transaction=True)
def test_pstxt_download_awards_without_columns(client, monkeypatch, download_test_data, elasticsearch_award_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
download_generation.retrieve_db_string = Mock(return_value=get_database_dsn_string())
resp = client.post('/api/v2/download/awards/', content_type='application/json', data=json.dumps({'filters': {'award_type_codes': ['A']}, 'columns': [], 'file_format': 'pstxt'}))
assert (resp.status_code == status.HTTP_200_OK)
assert ('.zip' in resp.json()['file_url']) |
def extractSetsunastranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
(expression='^Plot .*/(?P<filename>(?P<name>plot-k(?P<size>\\d+)-(?P<year>\\d+)-(?P<month>\\d+)-(?P<day>\\d+)-(?P<hour>\\d+)-(?P<minute>\\d+)-(?P<plot_id>\\w+)).plot) .*')
def plot_name_line(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
return attr.evolve(info, plot_size=int(match.group('size')), plot_name=match.group('name'), started_at=pendulum.datetime(year=int(match.group('year')), month=int(match.group('month')), day=int(match.group('day')), hour=int(match.group('hour')), minute=int(match.group('minute')), tz=None), filename=match.group('filename'), plot_id=match.group('plot_id')) |
class OptionPlotoptionsVariwideSonificationDefaultinstrumentoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def get_asec(element, zoom=(0, 0)):
eid = element[0]
if (eid in ['nodebox', 'rect', 'array', 'text', 'triangle', 'html', 'img']):
return element[1]
elif (eid == 'outline'):
(r, a, dr, da) = element[1]
return Box(r, a, dr, da)
elif eid.startswith('pixi-'):
(x, y, dx, dy) = element[1]
return Box(x, y, dx, dy)
elif (eid == 'rhombus'):
points = element[1]
r = points[3][0]
a = points[0][1]
dr = (points[2][0] - r)
da = (points[2][0] - a)
return Box(r, a, dr, da)
elif (eid == 'polygon'):
min_x = min((p[0] for p in element[1]))
max_x = max((p[0] for p in element[1]))
min_y = min((p[1] for p in element[1]))
max_y = max((p[1] for p in element[1]))
return Box(min_x, min_y, (max_x - min_x), (max_y - min_y))
elif (eid in ['line', 'arc']):
((x1, y1), (x2, y2)) = (element[1], element[2])
rect = Box(min(x1, x2), min(y1, y2), abs((x2 - x1)), abs((y2 - y1)))
return dh.circumasec(rect)
elif (eid == 'circle'):
z = zoom[0]
((x, y), r) = (dh.cartesian(element[1]), (element[2] / z))
rect = Box((x - r), (y - r), (2 * r), (2 * r))
return dh.circumasec(rect)
elif (eid == 'ellipse'):
(x, y) = dh.cartesian(element[1])
z = zoom[0]
(rx, ry) = ((element[2] / z), (element[3] / z))
rect = Box((x - rx), (y - ry), (2 * rx), (2 * ry))
return dh.circumasec(rect)
elif (eid == 'slice'):
z = zoom[0]
((x, y), r) = (dh.cartesian(element[1][0]), (element[1][1] / z))
rect = Box((x - r), (y - r), (2 * r), (2 * r))
return dh.circumasec(rect)
else:
raise ValueError(f'unrecognized element: {element!r}') |
def extractZiruTranslations(item):
if ('espanol' in item['tags']):
return None
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if (('Suterareta Yuusha no Eiyuutan' in item['tags']) or ('Suterareta Yuusha no Eyuutan' in item['tags']) or ('Suterurareta Yuusha no Eiyuutan' in item['tags'])):
extract = re.search('Suterareta Yuusha no Ei?yuutan \\((\\d+)\\-(.+?)\\)', item['title'])
if extract:
vol = int(extract.group(1))
try:
chp = int(extract.group(2))
postfix = ''
except ValueError:
chp = None
postfix = extract.group(2)
return buildReleaseMessageWithType(item, 'Suterareta Yuusha no Eiyuutan', vol, chp, postfix=postfix)
tagmap = [('Demon Sword Maiden', 'Demon Sword Maiden', 'translated'), ('No Protection Tonight', 'No Protection Tonight', 'translated'), ('Inside the Cave of Obscenity', 'Inside the Cave of Obscenity', 'translated'), ('Dragon Bloodline', 'Dragon Bloodline', 'translated'), ("Dragon's Bloodline", 'Dragon Bloodline', 'translated'), ('Lazy Dungeon Master', 'Lazy Dungeon Master', 'translated'), ('kuro no maou', 'Kuro no Maou', 'translated'), ('Happy Peach', 'Happy Peach', 'translated'), ("The Guild's Cheat Receptionist", "The Guild's Cheat Receptionist", 'translated'), ('Suterareta Yuusha no Eiyuutan', 'Suterareta Yuusha no Eiyuutan', 'translated'), ('The Magus of Genesis', 'The Magus of Genesis', 'translated'), ('atelier may', 'Easygoing Atelier Life ~May and the Fluffy Miniature Garden~', 'translated'), ('The Forsaken Hero', 'The Forsaken Hero', 'translated'), ('The Restart', 'The Restart', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
titlemap = [('Dragons Bloodline Chapter ', 'Dragon Bloodline', 'translated'), ('Lazy Dungeon Master ', 'Lazy Dungeon Master', 'translated'), ('Happy Peach ', 'Happy Peach', 'translated'), ('The Guilds Cheat Receptionist ', "The Guild's Cheat Receptionist", 'translated'), ('Inside the Cave of Obscenity ', 'Inside the Cave of Obscenity', 'translated')]
for (titlecomponent, name, tl_type) in tagmap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Net():
def __init__(self, domain, username, password, options):
self.__domain = domain
self.__username = username
self.__password = password
self.__options = options
self.__action = options.entry.lower()
self.__lmhash = ''
self.__nthash = ''
self.__aesKey = options.aesKey
self.__doKerberos = options.k
self.__kdcHost = options.dc_ip
self.__smbConnection = None
if (options.hashes is not None):
(self.__lmhash, self.__nthash) = options.hashes.split(':')
def connect(self, remoteName, remoteHost):
self.__smbConnection = SMBConnection(remoteName, remoteHost, sess_port=int(self.__options.port))
if self.__doKerberos:
self.__smbConnection.kerberosLogin(self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash, self.__aesKey, self.__kdcHost)
else:
self.__smbConnection.login(self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash)
def disconnect(self):
self.__smbConnection.close()
self.__smbConnection = None
def run(self, remoteName, remoteHost):
self.connect(remoteName, remoteHost)
actionClass = self.__get_action_class(self.__action)
actionObject = actionClass(self.__smbConnection)
if self.__is_option_present(self.__options, 'create'):
print("[*] Creating {} account '{}'".format(self.__action, self.__options.create))
actionObject.Create(self.__options.create, self.__options.newPasswd)
print('[+] {} account created succesfully: {}:{}'.format(self.__action, self.__options.create, self.__options.newPasswd))
elif self.__is_option_present(self.__options, 'remove'):
print("[*] Deleting {} account '{}'".format(self.__action, self.__options.remove))
actionObject.Remove(self.__options.remove)
print('[+] {} account deleted succesfully!'.format(self.__action))
elif self.__is_option_present(self.__options, 'join'):
print("[*] Adding user account '{}' to group '{}'".format(self.__options.join, self.__options.name))
actionObject.Join(self.__options.name, self.__options.join)
print('[+] User account added to {} succesfully!'.format(self.__options.name))
elif self.__is_option_present(self.__options, 'unjoin'):
print("[*] Removing user account '{}' from group '{}'".format(self.__options.unjoin, self.__options.name))
actionObject.UnJoin(self.__options.name, self.__options.unjoin)
print('[+] User account removed from {} succesfully!'.format(self.__options.name))
elif self.__is_option_present(self.__options, 'name'):
info = actionObject.Query(self.__options.name)
if (type(info) == list):
i = 1
for member in info:
print(' {0}. {1}'.format(i, member))
i += 1
else:
print('User name'.ljust(30), info['UserName'])
print('Full name'.ljust(30), info['FullName'])
print('Comment'.ljust(30), info['AdminComment'])
print("User's comment".ljust(30), info['UserComment'])
print('Country/region code'.ljust(30), ('000 (System Default)' if (info['CountryCode'] == 0) else info['CountryCode']))
print('Account active'.ljust(30), self.__b2s(((info['WhichFields'] & samr.USER_ACCOUNT_DISABLED) == samr.USER_ACCOUNT_DISABLED)))
print('Account expires'.ljust(30), self.__get_time_string(info['AccountExpires']))
print('')
print('Password last set'.ljust(30), self.__get_time_string(info['PasswordLastSet']))
print('Password expires'.ljust(30), self.__get_time_string(info['PasswordMustChange']))
print('Password changeable'.ljust(30), self.__get_time_string(info['PasswordCanChange']))
print('Password required'.ljust(30), self.__b2s(((info['WhichFields'] & samr.USER_PASSWORD_NOT_REQUIRED) == samr.USER_PASSWORD_NOT_REQUIRED)))
print('User may change password'.ljust(30), self.__b2s(((info['WhichFields'] & samr.UF_PASSWD_CANT_CHANGE) == samr.UF_PASSWD_CANT_CHANGE)))
print('')
print('Workstations allowed'.ljust(30), ('All' if (not info['WorkStations']) else info['WorkStations']))
print('Logon script'.ljust(30), info['ScriptPath'])
print('User profile'.ljust(30), info['ProfilePath'])
print('Home directory'.ljust(30), info['HomeDirectory'])
print('Last logon'.ljust(30), self.__get_time_string(info['LastLogon']))
print('Logon count'.ljust(30), info['LogonCount'])
print('')
print('Logon hours allowed'.ljust(30), self.__format_logon_hours(info['LogonHours']['LogonHours']))
print('')
print('Local Group Memberships')
for group in info['LocalGroups']:
print(' * {}'.format(group))
print('')
print('Global Group memberships')
for group in info['GlobalGroups']:
print(' * {}'.format(group))
else:
print('[*] Enumerating {}s ..'.format(self.__action))
i = 1
for object in actionObject.Enumerate():
messae = ' {0}. {1}'.format(i, object['Name'])
if self.__options.debug:
messae += ' ({0})'.format(object['RelativeId'])
print(messae)
i += 1
self.disconnect()
def __getUnixTime(self, t):
t -=
t /=
return t
def __get_time_string(self, large_integer):
time = ((large_integer['HighPart'] << 32) + large_integer['LowPart'])
if ((time == 0) or (time == )):
time = 'Never'
else:
time = datetime.fromtimestamp(self.__getUnixTime(time))
time = time.strftime('%m/%d/%Y %H:%M:%S %p')
return time
def __format_logon_hours(self, s):
logon_hours = ''.join(map((lambda b: b.hex()), s))
if (logon_hours == ('f' * 42)):
logon_hours = 'All'
return logon_hours
def __b2s(self, b):
return ('Yes' if b else 'No')
def __get_action_class(self, action):
return getattr(sys.modules[__name__], action.capitalize())
def __is_option_present(self, options, option):
return (hasattr(options, option) and getattr(options, option)) |
class JKDummy():
DEVICE_INFO = b'U\xaa\xeb\x90\x03\x15JK-B2A24S20P\x00\x00\x00\x0010.X-W\x00\x0010.02\x00\x00\x00\xdc\xc6/\x00\x06\x00\x00\x00JK pw123456\x00\x00\x00\x00\x001234\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x\x00\x\x000000\x00Input Userdata\x00\x\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc5\xaaU\x90\xeb\xc8\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00D'
def __init__(self, is_new_11x=False):
self._callbacks = {}
self.logger = get_logger()
self.MSGS = [b'U\xaa\xeb\x90\x01\xd3X\x02\x00\x00(\n\x00\x00Z\n\x00\x00\xac\r\x00\x00\x16\r\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc4\t\x00\x00\xa0\x86\x01\x00\x1e\x00\x00\x00<\x00\x00\x00\xc0\xd4\x01\x00,\x01\x00\x00<\x00\x00\x00<\x00\x00\x00\xd0\x07\x00\x00\xa4\x01\x00\x00\x90\x01\x00\x00\xa4\x01\x00\x00\x90\x01\x00\x00\x00\x00\x00\x002\x00\x00\x00\x84\x03\x00\x00\xbc\x02\x00\x00\x08\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x0082\x04\x00\xdc\x05\x00\x00\xb8\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x83\xaaU\x90\xeb\xc8\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00D', (b'U\xaa\xeb\x90\x02\xb9"\r$\r%\r*\r$\r"\r%\r"\r\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\x00\x00\x00$\r\x08\x00\x03\x016\x005\x004\x005\x004\x004\x005\x004\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1fi\x00\x00v\x04\x05\x00\xbb/\x00\x00\x0c\x01\xf8\x00\x1b\x01\x00\x00\x00\x00\x00A,\xc3\x02\x0082\x04\x00\x00\x00\x00\x00\x9dV\x00\x00d\x00\xa8\x02\x00\xfd/\x00\x01\x01\xd2\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x01\x00\x00\x00C\x048\x00\x00\x00\xf4\\x00\x00\x00\x00\xe2\x04\xd40\x00\x00\x00\x01\x00\x05\x00\x00#)\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00=' if (not is_new_11x) else b"U\xaa\xeb\x90\x02S\x07\r\x07\r\x07\r\x08\r\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x07\r\x01\x00\x00\x02'\x00'\x00&\x00&\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe8\x00\x00\x00\x00\x00\x1c4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc7\x00\xc5\x00\x00\x00\x00\x00\x00\x00\x00a\x02%\x04\x00\xc0E\x04\x00\x05\x00\x00\x00J\x04\x18\x00d\x00\x00\x00K\x8c'\x00\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\x00\x01\x00\x00\x00\x9d\x03\x00\x00\x00\x00:\\x00\x00\x00\x006\x05\x00\x00\x00\x01\x00\x01\x00\x05\x00\x00Z\x03\n\x00\x00\x00\x00\x00\xe8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\x7f\xdc/\x01\x01\x01\x00\x00\x00\x00J")]
from bmslib.models.jikong import JKBt
self.services = [dotdict(uuid=JKBt.SERVICE_UUID, characteristics=[dotdict(uuid=JKBt.CHAR_UUID, properties='write,notify', handle=2, descriptors=[])])]
async def start_notify(self, char_specifier, callback: Callable[([int, bytearray], None)]):
self._callbacks[char_specifier] = callback
async def write_gatt_char(self, char_specifier, data: Union[(bytes, bytearray, memoryview)], response: bool=False):
crc = data[(- 1)]
data = bytes(data[:(- 1)])
from bmslib.models.jikong import calc_crc
assert (calc_crc(data) == crc)
if data.startswith(b'\xaaU\x90\xeb\x97'):
self.logger.info('dummy query device info')
self._callbacks['0000ffe1-0000-1000-8000-00805f9b34fb'](self, bytes(self.DEVICE_INFO))
elif data.startswith(b'\xaaU\x90\xeb\x96'):
self.logger.info('dummy subscribe')
def send_data():
while True:
time.sleep(1)
for msg in self.MSGS:
self._callbacks['0000ffe1-0000-1000-8000-00805f9b34fb'](self, bytes(msg))
Thread(target=send_data, daemon=True).start()
else:
raise Exception(('JK dummy received unrecognized msg %s' % data)) |
class OefSearchDialogues(Model, BaseOefSearchDialogues):
def __init__(self, **kwargs: Any) -> None:
Model.__init__(self, **kwargs)
def role_from_first_message(message: Message, receiver_address: Address) -> BaseDialogue.Role:
return BaseOefSearchDialogue.Role.AGENT
BaseOefSearchDialogues.__init__(self, self_address=str(self.context.skill_id), role_from_first_message=role_from_first_message) |
def test_save_as_sets_the_resolution_for_every_version(create_test_data, create_maya_env):
data = create_test_data
maya_env = create_maya_env
version1 = Version(task=data['task1'])
version1.extension = '.ma'
version1.update_paths()
DBSession.add(version1)
DBSession.commit()
width = data['project'].image_format.width
height = data['project'].image_format.height
pixel_aspect = data['project'].image_format.pixel_aspect
maya_env.save_as(version1)
dRes = pm.PyNode('defaultResolution')
assert (dRes.width.get() == width)
assert (dRes.height.get() == height)
assert (dRes.pixelAspect.get() == pixel_aspect)
new_width = 1280
new_height = 720
new_pixel_aspect = 1.0
dRes.width.set(new_width)
dRes.height.set(new_height)
dRes.pixelAspect.set(new_pixel_aspect)
new_version = Version(task=data['task1'])
new_version.extension = '.ma'
new_version.update_paths()
DBSession.add(new_version)
DBSession.commit()
maya_env.save_as(new_version)
assert (dRes.width.get() == width)
assert (dRes.height.get() == height)
assert (dRes.pixelAspect.get() == pixel_aspect) |
class DescribeTaskQueueRequest(betterproto.Message):
namespace: str = betterproto.string_field(1)
task_queue: v1taskqueue.TaskQueue = betterproto.message_field(2)
task_queue_type: v1enums.TaskQueueType = betterproto.enum_field(3)
include_task_queue_status: bool = betterproto.bool_field(4) |
class OptionPlotoptionsOrganizationLink(Options):
def color(self):
return self._config_get('#666666')
def color(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(10)
def radius(self, num: float):
self._config(num, js_type=False)
def type(self):
return self._config_get('default')
def type(self, value: Any):
self._config(value, js_type=False) |
def main():
parser = argparse.ArgumentParser(prog='filter_img', description='Apply filter to an image.')
parser.add_argument('--input', '-i', help='Input image.')
parser.add_argument('--output', '-o', help='Output name and location.')
parser.add_argument('--filter', '-f', help='The filter to use.')
parser.add_argument('--amount', '-a', type=float, help='Amount to filter the image.')
parser.add_argument('--cvd-approach', '-c', help='CVD approach to use.')
parser.add_argument('--space', '-s', default='srgb-linear', help='Color space to filter in.')
parser.add_argument('--gamut-map', '-g', default='clip', help='Specify GMA method to use (default simple clipping)')
args = parser.parse_args()
process_image(args.input, args.output, args.filter, args.amount, args.space, args.cvd_approach, args.gamut_map)
return 0 |
class TestMetadataManager(unittest.TestCase):
def setUp(self) -> None:
self.file = 'test.db'
self._delete_db_file()
self.url = 'sqlite:///{}'.format(self.file)
init_db(self.url)
self.session = new_session(db_uri=self.url)
self.metadata_manager = MetadataManager(session=self.session)
def _delete_db_file(self):
if os.path.exists(self.file):
os.remove(self.file)
def tearDown(self) -> None:
self.session.close()
self._delete_db_file()
def session_rollback(self):
self.metadata_manager.rollback()
def test_default_namespace_already_exists(self):
with self.assertRaises(IntegrityError):
self.metadata_manager.add_namespace(name='default', properties={})
def test_namespace_operations(self):
namespace_metas = self.metadata_manager.list_namespace()
self.assertEqual(1, len(namespace_metas))
self.assertEqual('default', namespace_metas[0].name)
namespace_meta_1 = self.metadata_manager.add_namespace(name='namespace_1', properties={'a': 'a'})
self.metadata_manager.commit()
self.assertEqual('namespace_1', namespace_meta_1.name)
self.assertEqual('a', namespace_meta_1.get_properties()['a'])
namespace_meta_1 = self.metadata_manager.update_namespace(name='namespace_1', properties={'a': 'b'})
self.metadata_manager.commit()
self.assertEqual('b', namespace_meta_1.get_properties()['a'])
namespace_meta_1 = self.metadata_manager.get_namespace(name='namespace_1')
self.assertEqual('b', namespace_meta_1.get_properties()['a'])
namespace_meta_2 = self.metadata_manager.add_namespace(name='namespace_2', properties={'c': 'c'})
self.metadata_manager.commit()
namespace_metas = self.metadata_manager.list_namespace()
self.assertEqual(3, len(namespace_metas))
self.metadata_manager.delete_namespace('namespace_1')
self.metadata_manager.commit()
namespace_metas = self.metadata_manager.list_namespace()
self.assertEqual(2, len(namespace_metas))
self.metadata_manager.delete_namespace('namespace_2')
self.metadata_manager.commit()
namespace_metas = self.metadata_manager.list_namespace()
self.assertEqual(1, len(namespace_metas))
def test_add_workflow_without_namespace(self):
namespace_name = 'namespace'
content = 'source of workflow'
workflow_object = cloudpickle.dumps(content)
with self.assertRaises(Exception) as context:
workflow_meta_1 = self.metadata_manager.add_workflow(namespace=namespace_name, name='workflow_1', content=content, workflow_object=workflow_object)
self.metadata_manager.commit()
self.assertTrue(('constraint failed' in str(context.exception)))
def test_add_update_delete_workflows(self):
namespace_name = 'namespace'
content = 'source of workflow'
workflow_object = cloudpickle.dumps(content)
namespace_meta = self.metadata_manager.add_namespace(name=namespace_name, properties={'a': 'a'})
workflow_meta = self.metadata_manager.add_workflow(namespace=namespace_name, name='workflow_1', content=content, workflow_object=workflow_object)
self.metadata_manager.commit()
workflow_meta = self.metadata_manager.get_workflow_by_id(workflow_meta.id)
update_time_1 = workflow_meta.update_time
self.metadata_manager.update_workflow(namespace=namespace_name, name='workflow_1', is_enabled=False)
self.metadata_manager.commit()
workflow_meta = self.metadata_manager.get_workflow_by_id(workflow_meta.id)
update_time_2 = workflow_meta.update_time
self.assertEqual(False, workflow_meta.is_enabled)
self.assertEqual(workflow_meta.create_time, workflow_meta.create_time)
self.assertLess(update_time_1, update_time_2)
self.metadata_manager.delete_workflow_by_id(workflow_meta.id)
self.metadata_manager.commit()
workflow_meta = self.metadata_manager.get_workflow_by_id(workflow_meta.id)
self.assertEqual(None, workflow_meta)
workflow_meta = self.metadata_manager.get_workflow_by_name(namespace=namespace_name, name='workflow_1')
self.assertEqual(None, workflow_meta)
def test_get_and_list_workflows(self):
namespace_name = 'namespace'
content = 'source of workflow'
workflow_object = cloudpickle.dumps(content)
namespace_meta = self.metadata_manager.add_namespace(name=namespace_name, properties={'a': 'a'})
workflow_meta_1 = self.metadata_manager.add_workflow(namespace=namespace_name, name='workflow_1', content=content, workflow_object=workflow_object)
self.metadata_manager.commit()
workflow_meta_1_1 = self.metadata_manager.get_workflow_by_id(workflow_meta_1.id)
self.assertEqual(workflow_meta_1.name, workflow_meta_1_1.name)
self.assertEqual(namespace_name, workflow_meta_1_1.namespace)
workflow_meta_2 = self.metadata_manager.add_workflow(namespace=namespace_name, name='workflow_2', content=content, workflow_object=workflow_object)
self.metadata_manager.commit()
workflow_metas = self.metadata_manager.list_workflows(namespace=namespace_name)
self.assertEqual(2, len(workflow_metas))
workflow_metas = self.metadata_manager.list_workflows(namespace=namespace_name, page_size=1)
self.assertEqual(1, len(workflow_metas))
workflow_metas = self.metadata_manager.list_workflows(namespace=namespace_name, offset=1)
self.assertEqual(1, len(workflow_metas))
workflow_metas = self.metadata_manager.list_workflows(namespace=namespace_name, filters=Filters(filters=[(FilterEqual('name'), 'workflow_1')]))
self.assertEqual(1, len(workflow_metas))
self.assertEqual('workflow_1', workflow_metas[0].name)
workflow_metas = self.metadata_manager.list_workflows(namespace=namespace_name, filters=Filters(filters=[(FilterIn('name'), ['workflow_1', 'workflow_2'])]))
self.assertEqual(2, len(workflow_metas))
self.assertTrue((workflow_metas[0].name in {'workflow_1', 'workflow_2'}))
self.assertTrue((workflow_metas[1].name in {'workflow_1', 'workflow_2'}))
workflow_metas = self.metadata_manager.list_workflows(namespace=namespace_name, orders=Orders(orders=[(OrderBy('id'), 'ascend')]))
self.assertEqual(2, len(workflow_metas))
self.assertEqual('workflow_1', workflow_metas[0].name)
workflow_metas = self.metadata_manager.list_workflows(namespace=namespace_name, orders=Orders(orders=[(OrderBy('id'), 'descend')]))
self.assertEqual(2, len(workflow_metas))
self.assertEqual('workflow_2', workflow_metas[0].name)
def test_workflow_snapshot_operations(self):
namespace_name = 'namespace'
content = 'source of workflow'
workflow_object = cloudpickle.dumps(content)
namespace_meta = self.metadata_manager.add_namespace(name=namespace_name, properties={'a': 'a'})
workflow_meta = self.metadata_manager.add_workflow(namespace=namespace_name, name='workflow', content=content, workflow_object=workflow_object)
self.metadata_manager.commit()
snapshot_meta = self.metadata_manager.get_latest_snapshot(workflow_id=workflow_meta.id)
self.assertIsNone(snapshot_meta)
for i in range(3):
snapshot_meta = self.metadata_manager.add_workflow_snapshot(workflow_id=workflow_meta.id, workflow_object=workflow_meta.workflow_object, uri='url', signature=str(i))
self.metadata_manager.commit()
snapshot_meta = self.metadata_manager.get_workflow_snapshot(1)
self.assertEqual('0', snapshot_meta.signature)
snapshot_metas = self.metadata_manager.list_workflow_snapshots(workflow_id=workflow_meta.id)
self.assertEqual(3, len(snapshot_metas))
self.metadata_manager.delete_workflow_snapshot(1)
snapshot_metas = self.metadata_manager.list_workflow_snapshots(workflow_id=workflow_meta.id)
self.assertEqual(2, len(snapshot_metas))
snapshot_meta = self.metadata_manager.get_latest_snapshot(workflow_id=workflow_meta.id)
self.assertEqual(3, snapshot_meta.id)
def test_workflow_schedule_operations(self):
namespace_name = 'namespace'
content = 'source of workflow'
with Workflow(name='workflow') as workflow:
BashOperator(name='bash1', bash_command='echo 1')
workflow_object = cloudpickle.dumps(workflow)
namespace_meta = self.metadata_manager.add_namespace(name=namespace_name, properties={'a': 'a'})
self.metadata_manager.commit()
workflow_meta = self.metadata_manager.add_workflow(namespace=namespace_name, name='workflow', content=content, workflow_object=workflow_object)
self.metadata_manager.commit()
for i in range(3):
self.metadata_manager.add_workflow_schedule(workflow_id=workflow_meta.id, expression='*/1 * * * *')
self.metadata_manager.commit()
meta = self.metadata_manager.get_workflow_schedule(1)
self.assertEqual('*/1 * * * *', meta.expression)
metas = self.metadata_manager.list_workflow_schedules(workflow_id=workflow_meta.id)
self.assertEqual(3, len(metas))
self.metadata_manager.delete_workflow_schedule(1)
self.metadata_manager.commit()
metas = self.metadata_manager.list_workflow_schedules(workflow_id=workflow_meta.id)
self.assertEqual(2, len(metas))
self.metadata_manager.pause_workflow_schedule(2)
self.metadata_manager.commit()
meta = self.metadata_manager.get_workflow_schedule(2)
self.assertTrue(meta.is_paused)
self.metadata_manager.resume_workflow_schedule(2)
self.metadata_manager.commit()
meta = self.metadata_manager.get_workflow_schedule(2)
self.assertFalse(meta.is_paused)
def test_workflow_trigger_operations(self):
namespace_name = 'namespace'
content = 'source of workflow'
workflow_object = cloudpickle.dumps(content)
namespace_meta = self.metadata_manager.add_namespace(name=namespace_name, properties={'a': 'a'})
self.metadata_manager.commit()
workflow_meta = self.metadata_manager.add_workflow(namespace=namespace_name, name='workflow', content=content, workflow_object=workflow_object)
self.metadata_manager.commit()
for i in range(3):
self.metadata_manager.add_workflow_trigger(workflow_id=workflow_meta.id, rule=bytes(str(i), 'UTF-8'))
self.metadata_manager.commit()
meta = self.metadata_manager.get_workflow_trigger(1)
self.assertEqual('0', meta.rule.decode('utf-8'))
metas = self.metadata_manager.list_workflow_triggers(workflow_id=workflow_meta.id)
self.assertEqual(3, len(metas))
self.metadata_manager.delete_workflow_trigger(1)
self.metadata_manager.commit()
metas = self.metadata_manager.list_workflow_triggers(workflow_id=workflow_meta.id)
self.assertEqual(2, len(metas))
self.metadata_manager.pause_workflow_trigger(2)
self.metadata_manager.commit()
meta = self.metadata_manager.get_workflow_trigger(2)
self.assertTrue(meta.is_paused)
self.metadata_manager.resume_workflow_trigger(2)
self.metadata_manager.commit()
meta = self.metadata_manager.get_workflow_trigger(2)
self.assertFalse(meta.is_paused)
def test_workflow_execution_operations(self):
namespace_name = 'namespace'
content = 'source of workflow'
with Workflow(name='workflow') as workflow:
BashOperator(name='bash1', bash_command='echo 1')
workflow_object = cloudpickle.dumps(workflow)
namespace_meta = self.metadata_manager.add_namespace(name=namespace_name, properties={'a': 'a'})
self.metadata_manager.commit()
workflow_meta = self.metadata_manager.add_workflow(namespace=namespace_name, name='workflow', content=content, workflow_object=workflow_object)
self.metadata_manager.commit()
with self.assertRaises(Exception) as context:
self.metadata_manager.add_workflow_execution(workflow_id=workflow_meta.id, snapshot_id=1, run_type=ExecutionType.MANUAL.value)
self.metadata_manager.commit()
self.assertTrue(('FOREIGN KEY constraint failed' in str(context.exception)))
self.session_rollback()
snapshot = self.metadata_manager.add_workflow_snapshot(workflow_id=workflow_meta.id, workflow_object=workflow_meta.workflow_object, uri='url', signature='xxx')
self.metadata_manager.commit()
for i in range(3):
self.metadata_manager.add_workflow_execution(workflow_id=workflow_meta.id, snapshot_id=snapshot.id, run_type=ExecutionType.MANUAL.value)
self.metadata_manager.commit()
metas = self.metadata_manager.list_workflow_executions(workflow_id=workflow_meta.id)
self.assertEqual(3, len(metas))
meta = self.metadata_manager.get_workflow_execution(workflow_execution_id=metas[0].id)
self.assertIsNone(meta.end_date)
meta = self.metadata_manager.get_workflow_execution(workflow_execution_id=meta.id)
self.assertEqual(WorkflowStatus.INIT.value, meta.status)
meta = self.metadata_manager.update_workflow_execution(workflow_execution_id=meta.id, status=WorkflowStatus.SUCCESS.value)
self.metadata_manager.commit()
self.assertIsNotNone(meta.end_date)
meta = self.metadata_manager.get_workflow_execution(workflow_execution_id=meta.id)
self.assertEqual(WorkflowStatus.SUCCESS.value, meta.status)
meta = self.metadata_manager.delete_workflow_execution(workflow_execution_id=meta.id)
self.metadata_manager.commit()
metas = self.metadata_manager.list_workflow_executions(workflow_id=workflow_meta.id)
self.assertEqual(2, len(metas))
def test_task_execution_operations(self):
namespace_name = 'namespace'
content = 'source of workflow'
workflow_object = cloudpickle.dumps(content)
namespace_meta = self.metadata_manager.add_namespace(name=namespace_name, properties={'a': 'a'})
self.metadata_manager.commit()
workflow_meta = self.metadata_manager.add_workflow(namespace=namespace_meta.name, name='workflow', content=content, workflow_object=workflow_object)
self.metadata_manager.commit()
snapshot = self.metadata_manager.add_workflow_snapshot(workflow_id=workflow_meta.id, workflow_object=workflow_meta.workflow_object, uri='url', signature='xxx')
self.metadata_manager.commit()
with self.assertRaises(Exception) as context:
self.metadata_manager.add_task_execution(workflow_execution_id=1, task_name='task')
self.metadata_manager.commit()
self.assertTrue(('FOREIGN KEY constraint failed' in str(context.exception)))
self.session_rollback()
workflow_execution_meta = self.metadata_manager.add_workflow_execution(workflow_id=workflow_meta.id, snapshot_id=snapshot.id, run_type=ExecutionType.MANUAL.value)
self.metadata_manager.commit()
seq_num = self.metadata_manager.get_latest_sequence_number(workflow_execution_id=workflow_execution_meta.id, task_name='task')
self.assertEqual(0, seq_num)
for i in range(3):
self.metadata_manager.add_task_execution(workflow_execution_id=workflow_execution_meta.id, task_name='task')
self.metadata_manager.commit()
metas = self.metadata_manager.list_task_executions(workflow_execution_id=workflow_execution_meta.id)
self.assertEqual(3, len(metas))
seq_num = self.metadata_manager.get_latest_sequence_number(workflow_execution_id=workflow_execution_meta.id, task_name='task')
self.assertEqual(3, seq_num)
for i in range(3):
self.assertEqual((i + 1), metas[i].sequence_number)
meta = self.metadata_manager.get_task_execution_by_id(task_execution_id=metas[0].id)
self.assertEqual(TaskStatus.INIT.value, meta.status)
self.assertEqual(1, meta.try_number)
meta = self.metadata_manager.update_task_execution(task_execution_id=meta.id, try_number=2, status=TaskStatus.SUCCESS.value)
self.metadata_manager.commit()
meta = self.metadata_manager.get_task_execution(workflow_execution_id=meta.workflow_execution_id, task_name=meta.task_name, sequence_number=meta.sequence_number)
self.assertEqual(WorkflowStatus.SUCCESS.value, meta.status)
self.assertEqual(2, meta.try_number)
meta = self.metadata_manager.delete_task_execution(task_execution_id=meta.id)
self.metadata_manager.commit()
metas = self.metadata_manager.list_task_executions(workflow_execution_id=workflow_execution_meta.id)
self.assertEqual(2, len(metas))
def test_event_offset_operations(self):
namespace_name = 'namespace'
content = 'source of workflow'
workflow_object = cloudpickle.dumps(content)
namespace_meta = self.metadata_manager.add_namespace(name=namespace_name, properties={'a': 'a'})
self.metadata_manager.commit()
workflow_meta = self.metadata_manager.add_workflow(namespace=namespace_name, name='workflow_1', content=content, workflow_object=workflow_object)
self.metadata_manager.commit()
workflow_meta = self.metadata_manager.get_workflow_by_id(workflow_meta.id)
snapshot = self.metadata_manager.add_workflow_snapshot(workflow_id=workflow_meta.id, workflow_object=workflow_meta.workflow_object, uri='url', signature='xxx')
self.metadata_manager.commit()
workflow_execution_meta = self.metadata_manager.add_workflow_execution(workflow_id=workflow_meta.id, snapshot_id=snapshot.id, run_type=ExecutionType.MANUAL.value)
self.metadata_manager.commit()
offset = self.metadata_manager.get_workflow_event_offset(workflow_id=workflow_meta.id)
self.assertEqual((- 1), offset)
self.metadata_manager.set_workflow_event_offset(workflow_id=workflow_meta.id, event_offset=5)
self.metadata_manager.commit()
offset = self.metadata_manager.get_workflow_event_offset(workflow_id=workflow_meta.id)
self.assertEqual(5, offset)
offset = self.metadata_manager.get_workflow_execution_event_offset(workflow_execution_id=workflow_execution_meta.id)
self.assertEqual((- 1), offset)
self.metadata_manager.set_workflow_execution_event_offset(workflow_execution_id=workflow_execution_meta.id, event_offset=5)
self.metadata_manager.commit()
offset = self.metadata_manager.get_workflow_execution_event_offset(workflow_execution_id=workflow_execution_meta.id)
self.assertEqual(5, offset)
def test_state_operations(self):
namespace_name = 'namespace'
content = 'source of workflow'
workflow_object = cloudpickle.dumps(content)
namespace_meta = self.metadata_manager.add_namespace(name=namespace_name, properties={'a': 'a'})
self.metadata_manager.commit()
workflow_meta = self.metadata_manager.add_workflow(namespace=namespace_name, name='workflow_1', content=content, workflow_object=workflow_object)
self.metadata_manager.commit()
workflow_meta = self.metadata_manager.get_workflow_by_id(workflow_meta.id)
snapshot = self.metadata_manager.add_workflow_snapshot(workflow_id=workflow_meta.id, workflow_object=workflow_meta.workflow_object, uri='url', signature='xxx')
self.metadata_manager.commit()
workflow_execution_meta = self.metadata_manager.add_workflow_execution(workflow_id=workflow_meta.id, snapshot_id=snapshot.id, run_type=ExecutionType.MANUAL.value)
self.metadata_manager.commit()
state = self.metadata_manager.get_or_create_workflow_state(workflow_id=workflow_meta.id, descriptor=ValueStateDescriptor(name='s1'))
self.metadata_manager.commit()
self.assertTrue(isinstance(state, ValueState))
self.assertIsNone(state.value())
state.update('xx')
self.assertEqual('xx', state.value())
state = self.metadata_manager.get_or_create_workflow_execution_state(workflow_execution_id=workflow_execution_meta.id, descriptor=ValueStateDescriptor(name='s1'))
self.metadata_manager.commit()
self.assertTrue(isinstance(state, ValueState))
self.assertIsNone(state.value())
state.update('xx')
self.assertEqual('xx', state.value())
def test_max_event_offset(self):
namespace_name = 'namespace'
content = 'source of workflow'
workflow_object = cloudpickle.dumps(content)
self.metadata_manager.add_namespace(name=namespace_name, properties={'a': 'a'})
workflow_meta1 = self.metadata_manager.add_workflow(namespace=namespace_name, name='workflow_1', content=content, workflow_object=workflow_object)
self.metadata_manager.set_workflow_event_offset(workflow_meta1.id, 1)
snapshot1 = self.metadata_manager.add_workflow_snapshot(workflow_id=workflow_meta1.id, workflow_object=workflow_meta1.workflow_object, uri='url', signature='xxx')
workflow_meta2 = self.metadata_manager.add_workflow(namespace=namespace_name, name='workflow_2', content=content, workflow_object=workflow_object)
self.metadata_manager.set_workflow_event_offset(workflow_meta2.id, 2)
snapshot2 = self.metadata_manager.add_workflow_snapshot(workflow_id=workflow_meta2.id, workflow_object=workflow_meta2.workflow_object, uri='url', signature='xxx')
execution_meta1 = self.metadata_manager.add_workflow_execution(workflow_id=workflow_meta1.id, snapshot_id=snapshot1.id, run_type=ExecutionType.MANUAL.value)
self.metadata_manager.set_workflow_execution_event_offset(execution_meta1.id, 3)
execution_meta2 = self.metadata_manager.add_workflow_execution(workflow_id=workflow_meta2.id, snapshot_id=snapshot2.id, run_type=ExecutionType.MANUAL.value)
self.metadata_manager.set_workflow_execution_event_offset(execution_meta2.id, 4)
self.assertEqual(2, self.metadata_manager.get_max_event_offset_of_workflow())
self.assertEqual(4, self.metadata_manager.get_max_event_offset_of_workflow_execution()) |
def edit_message_text(token, text, chat_id=None, message_id=None, inline_message_id=None, parse_mode=None, entities=None, disable_web_page_preview=None, reply_markup=None):
method_url = 'editMessageText'
payload = {'text': text}
if chat_id:
payload['chat_id'] = chat_id
if message_id:
payload['message_id'] = message_id
if inline_message_id:
payload['inline_message_id'] = inline_message_id
if parse_mode:
payload['parse_mode'] = parse_mode
if entities:
payload['entities'] = json.dumps(types.MessageEntity.to_list_of_dicts(entities))
if (disable_web_page_preview is not None):
payload['disable_web_page_preview'] = disable_web_page_preview
if reply_markup:
payload['reply_markup'] = _convert_markup(reply_markup)
return _make_request(token, method_url, params=payload, method='post') |
class WhoIsOnline(MethodView):
def get(self):
if current_app.config['REDIS_ENABLED']:
online_users = get_online_users()
else:
online_users = User.query.filter((User.lastseen >= time_diff())).all()
return render_template('forum/online_users.html', online_users=online_users) |
class TestTotalsByEntityType(ApiBaseTest):
first_pac_total = {'committee_id': 'C', 'committee_type': 'O', 'cycle': 2018, 'committee_designation': 'A', 'all_loans_received': 1, 'allocated_federal_election_levin_share': 2, 'treasurer_name': 'Treasurer, Trudy', 'committee_state': 'DC', 'filing_frequency': 'Q', 'filing_frequency_full': 'Quarterly filer', 'first_file_date': datetime.date.fromisoformat('1982-12-31'), 'receipts': 50, 'disbursements': 200, 'sponsor_candidate_ids': ['H'], 'organization_type': 'C', 'organization_type_full': 'Corporation', 'first_f1_date': datetime.date.fromisoformat('1983-02-01')}
second_pac_total = {'committee_id': 'C', 'committee_type': 'N', 'cycle': 2016, 'committee_designation': 'B', 'all_loans_received': 10, 'allocated_federal_election_levin_share': 20, 'treasurer_name': 'Treasurer, Tom', 'committee_state': 'CT', 'filing_frequency': 'M', 'filing_frequency_full': 'Monthly filer', 'first_file_date': datetime.date.fromisoformat('1984-12-31'), 'receipts': 200, 'disbursements': 50, 'sponsor_candidate_ids': ['H'], 'organization_type': 'T', 'organization_type_full': 'Trade', 'first_f1_date': datetime.date.fromisoformat('1984-12-31')}
def test_pac_total_by_entity_type(self):
factories.TotalsPacFactory(**self.first_pac_total)
factories.TotalsPacFactory(**self.second_pac_total)
results = self._results(api.url_for(TotalsByEntityTypeView, entity_type='pac'))
assert (len(results) == 2)
assert (results[0]['committee_id'] == 'C')
assert (results[1]['committee_id'] == 'C')
result_first_file_date = results[1].pop('first_file_date')
expected_first_file_date = self.second_pac_total.pop('first_file_date').isoformat()
self.assertEqual(result_first_file_date, expected_first_file_date)
result_first_f1_date = results[1].pop('first_f1_date')
expected_first_f1_date = self.second_pac_total.pop('first_f1_date').isoformat()
self.assertEqual(result_first_f1_date, expected_first_f1_date)
test_subset = {k: v for (k, v) in results[1].items() if (k in self.second_pac_total)}
self.assertEqual(test_subset, self.second_pac_total)
def test_cycle_filter(self):
presidential_fields = {'committee_id': 'C', 'cycle': 2016, 'candidate_contribution': 1, 'exempt_legal_accounting_disbursement': 2, 'federal_funds': 300, 'committee_type': 'P'}
factories.CommitteeTotalsPerCycleFactory(**presidential_fields)
results = self._results(api.url_for(TotalsByEntityTypeView, cycle=2016, entity_type='presidential'))
assert (len(results) == 1)
self.assertEqual(results[0]['cycle'], presidential_fields['cycle'])
def test_designation_filter(self):
party_fields = {'committee_id': 'C', 'cycle': 2014, 'committee_name': 'REPUBLICAN COMMITTEE', 'committee_designation': 'U', 'committee_type': 'X', 'all_loans_received': 1, 'allocated_federal_election_levin_share': 2}
factories.TotalsPacFactory(**party_fields)
results = self._results(api.url_for(TotalsByEntityTypeView, committee_designation='U', entity_type='party'))
assert (len(results) == 1)
self.assertEqual(results[0]['committee_designation'], party_fields['committee_designation'])
def test_pac_party_multi_filters(self):
factories.TotalsPacFactory(**self.first_pac_total)
factories.TotalsPacFactory(**self.second_pac_total)
filters = ['committee_type', 'cycle', 'committee_id', 'committee_designation', 'committee_state', 'committee_id', 'filing_frequency', 'organization_type']
for field in filters:
results = self._results(api.url_for(TotalsByEntityTypeView, entity_type='pac-party', **{field: self.second_pac_total.get(field)}))
assert (len(results) == 1)
assert (results[0][field] == self.second_pac_total.get(field))
def test_pac_party_multi_committee_id(self):
factories.TotalsPacFactory(**self.first_pac_total)
factories.TotalsPacFactory(**self.second_pac_total)
factories.TotalsPacFactory(**{'committee_id': 'C', 'committee_type': 'Q', 'cycle': 2016, 'committee_designation': 'B', 'all_loans_received': 10, 'allocated_federal_election_levin_share': 20, 'treasurer_name': 'Treasurer, Tom', 'committee_state': 'CT', 'filing_frequency': 'M', 'filing_frequency_full': 'Monthly filer', 'first_file_date': datetime.date.fromisoformat('1984-12-31'), 'receipts': 200, 'disbursements': 50, 'sponsor_candidate_ids': ['H'], 'organization_type': 'T', 'organization_type_full': 'Trade'})
results = self._results(api.url_for(TotalsByEntityTypeView, entity_type='pac-party', committee_id=[self.first_pac_total.get('committee_id'), self.second_pac_total.get('committee_id')]))
assert (len(results) == 2)
self.assertTrue(all(((each['committee_id'] != 'C') for each in results)))
def test_filter_receipts(self):
factories.TotalsPacFactory(**self.first_pac_total)
factories.TotalsPacFactory(**self.second_pac_total)
results = self._results(api.url_for(TotalsByEntityTypeView, entity_type='pac-party', min_receipts=100))
self.assertTrue(all(((each['receipts'] >= 100) for each in results)))
results = self._results(api.url_for(TotalsByEntityTypeView, entity_type='pac-party', max_receipts=150))
self.assertTrue(all(((each['receipts'] <= 150) for each in results)))
results = self._results(api.url_for(TotalsByEntityTypeView, entity_type='pac-party', min_receipts=60, max_receipts=100))
self.assertTrue(all(((60 <= each['receipts'] <= 100) for each in results)))
def test_filter_disbursements(self):
factories.TotalsPacFactory(**self.first_pac_total)
factories.TotalsPacFactory(**self.second_pac_total)
results = self._results(api.url_for(TotalsByEntityTypeView, entity_type='pac-party', min_disbursements=100))
self.assertTrue(all(((each['disbursements'] >= 100) for each in results)))
results = self._results(api.url_for(TotalsByEntityTypeView, entity_type='pac-party', max_disbursements=150))
self.assertTrue(all(((each['disbursements'] <= 150) for each in results)))
results = self._results(api.url_for(TotalsByEntityTypeView, entity_type='pac-party', min_disbursements=60, max_disbursements=100))
self.assertTrue(all(((60 <= each['disbursements'] <= 100) for each in results)))
def test_treasurer_filter(self):
factories.TotalsPacFactory(**utils.extend(self.first_pac_total, {'treasurer_text': sa.func.to_tsvector('Treasurer, Trudy')}))
factories.TotalsPacFactory(**utils.extend(self.second_pac_total, {'treasurer_text': sa.func.to_tsvector('Treasurer, Tom')}))
results = self._results(api.url_for(TotalsByEntityTypeView, entity_type='pac-party', treasurer_name='Tom'))
assert (len(results) == 1)
assert (results[0]['committee_id'] == self.second_pac_total.get('committee_id'))
def test_sponsor_candidate_id_filter(self):
factories.TotalsPacFactory(**self.first_pac_total)
factories.TotalsPacFactory(**self.second_pac_total)
results = self._results(api.url_for(TotalsByEntityTypeView, entity_type='pac-party', sponsor_candidate_id='H'))
assert (len(results) == 1)
assert (results[0]['committee_id'] == self.second_pac_total.get('committee_id'))
def test_field_sponsor_candidate_list(self):
committee = factories.TotalsPacFactory(**self.first_pac_total)
factories.PacSponsorCandidatePerCycleFactory(committee_id=committee.committee_id, cycle=committee.cycle, sponsor_candidate_id='H', sponsor_candidate_name='Sponsor A')
factories.PacSponsorCandidatePerCycleFactory(committee_id='C', cycle=(committee.cycle + 2), sponsor_candidate_id='S', sponsor_candidate_name='Sponsor B')
factories.PacSponsorCandidatePerCycleFactory(committee_id='C', cycle=committee.cycle, sponsor_candidate_id='S', sponsor_candidate_name='Sponsor B')
results = self._results(api.url_for(TotalsByEntityTypeView, entity_type='pac-party', committee_id=committee.committee_id))
self.assertEqual(len(results), 1)
self.assertIn('sponsor_candidate_list', results[0])
self.assertEqual(results[0]['sponsor_candidate_list'][0]['sponsor_candidate_name'], 'Sponsor A')
self.assertEqual(results[0]['sponsor_candidate_list'][0]['sponsor_candidate_id'], 'H')
def test_first_f1_date_filter(self):
factories.TotalsPacFactory(**self.first_pac_total)
factories.TotalsPacFactory(**self.second_pac_total)
min_date = datetime.date(1982, 1, 1)
max_date = datetime.date(1984, 10, 30)
results = self._results(api.url_for(TotalsByEntityTypeView, entity_type='pac', min_first_f1_date=min_date, max_first_f1_date=max_date))
assert (len(results) == 1)
assert (results[0]['committee_id'] == 'C')
def test_entity_type_filter(self):
first_committee = {'committee_id': 'C', 'committee_type': 'H', 'cycle': 2016}
second_committee = {'committee_id': 'C', 'committee_type': 'P', 'cycle': 2016}
factories.CommitteeTotalsPerCycleFactory(**first_committee)
factories.CommitteeTotalsPerCycleFactory(**second_committee)
results = self._results(api.url_for(TotalsByEntityTypeView, entity_type='presidential'))
assert (len(results) == 1)
assert (results[0]['committee_id'] == 'C') |
def main(page: ft.Page):
def items(count):
items = []
for i in range(1, (count + 1)):
items.append(ft.Container(content=ft.Text(value=str(i)), alignment=ft.alignment.center, width=50, height=50, bgcolor=ft.colors.AMBER_500))
return items
def column_with_horiz_alignment(align: ft.CrossAxisAlignment):
return ft.Column([ft.Text(str(align), size=16), ft.Container(content=ft.Column(items(3), alignment=ft.MainAxisAlignment.START, horizontal_alignment=align), bgcolor=ft.colors.AMBER_100, width=100)])
page.add(ft.Row([column_with_horiz_alignment(ft.CrossAxisAlignment.START), column_with_horiz_alignment(ft.CrossAxisAlignment.CENTER), column_with_horiz_alignment(ft.CrossAxisAlignment.END)], spacing=30, alignment=ft.MainAxisAlignment.START)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.