code stringlengths 281 23.7M |
|---|
class LoadingPipeline():
def __init__(self, data_loader, columns: List[str]):
self.data_loader = data_loader
self.columns = columns
def fit(self, index):
None
def execute(self, index):
data_df = self.data_loader.load(index)
data_df = data_df[self.columns]
return data_df |
def test_parse_config_columns_sanity(raw_config_full):
config = loads_configuration(raw_config_full)
assert (len(config['Columns']) == 64), 'Unexpected columns length'
assert all(((0 <= width <= 65535) for width in config['Columns'])), 'Unexpected column width size'
assert (len(config['ColumnMap']) == 64), 'Unexpected column map length'
columns_width = [width for width in config['Columns'] if (width > 0)]
columns_map = [column for column in config['ColumnMap'] if (column != Column.NONE)]
assert (len(columns_width) == len(columns_map) == config['ColumnCount']), 'Unexpected' |
.gui()
.skipif((sys.platform != 'linux'), reason='Linux specific test')
def test_synchronized_capture_triggers_timeout(monkeypatch, dbus_portal):
timeout = 1
monkeypatch.setattr(dbus_portal, 'TIMEOUT_SECONDS', timeout)
monkeypatch.setattr(dbus_portal.OrgFreedesktopPortalScreenshot, 'grab_full_desktop', (lambda _: time.sleep((timeout + 0.1))))
with pytest.raises(TimeoutError):
_ = dbus_portal._synchronized_capture(interactive=False) |
_models('spacy.GPT-4.v2')
def openai_gpt_4_v2(config: Dict[(Any, Any)]=SimpleFrozenDict(temperature=_DEFAULT_TEMPERATURE), name: Literal[('gpt-4', 'gpt-4-0314', 'gpt-4-32k', 'gpt-4-32k-0314')]='gpt-4', strict: bool=OpenAI.DEFAULT_STRICT, max_tries: int=OpenAI.DEFAULT_MAX_TRIES, interval: float=OpenAI.DEFAULT_INTERVAL, max_request_time: float=OpenAI.DEFAULT_MAX_REQUEST_TIME, endpoint: Optional[str]=None) -> Callable[([Iterable[str]], Iterable[str])]:
return OpenAI(name=name, endpoint=(endpoint or Endpoints.CHAT.value), config=config, strict=strict, max_tries=max_tries, interval=interval, max_request_time=max_request_time) |
class CacheUpdateManager(Thread):
def __init__(self, log: Logger, component_cache: ComponentCacheType, refresh_queue: RefreshQueue, update_queue: UpdateQueue):
super().__init__()
self.daemon = True
self.name = 'CacheUpdateManager'
self.log: Logger = log
self._component_cache: ComponentCacheType = component_cache
self._refresh_queue: RefreshQueue = refresh_queue
self._update_queue: UpdateQueue = update_queue
self._check_refresh_queue = False
self._threads: List[CacheUpdateWorker] = []
self.stop_event: Event = Event()
def run(self):
while (not self.stop_event.is_set()):
self.manage_cache_tasks()
def manage_cache_tasks(self):
outstanding_threads = self._has_outstanding_threads()
try:
timeout = (NONBLOCKING_TIMEOUT if outstanding_threads else BLOCKING_TIMEOUT)
self._check_refresh_queue = (not self._check_refresh_queue)
if self._check_refresh_queue:
(catalog, action) = self._refresh_queue.get(timeout=timeout)
else:
(catalog, action) = self._update_queue.get(timeout=timeout)
except Empty:
pass
else:
updater_thread = CacheUpdateWorker(self._component_cache, (self._refresh_queue if self._check_refresh_queue else self._update_queue), catalog, action)
updater_thread.start()
queue_clause = ('refreshing' if self._check_refresh_queue else 'updating')
self.log.debug(f"CacheUpdateWorker {queue_clause} catalog: '{updater_thread.name}', action: '{action}'...")
self._threads.append(updater_thread)
def _has_outstanding_threads(self) -> bool:
outstanding_threads = False
for thread in self._threads:
thread.join(timeout=NONBLOCKING_TIMEOUT)
cumulative_run_time = int((time.time() - thread.task_start_time))
if thread.is_alive():
outstanding_threads = True
time_since_last_check = int((time.time() - thread.last_warn_time))
if (time_since_last_check > CATALOG_UPDATE_TIMEOUT):
thread.last_warn_time = time.time()
self.log.warning(f"Cache update for catalog '{thread.name}' is still processing after {cumulative_run_time} seconds ...")
else:
self.log.debug(f"CacheUpdateWorker completed for catalog: '{thread.name}', action: '{thread.action}'.")
self._threads.remove(thread)
thread.queue.task_done()
if (thread.last_warn_time != thread.task_start_time):
self.log.info(f"Cache update for catalog '{thread.name}' has completed after {cumulative_run_time} seconds")
if (len(self._threads) > WORKER_THREAD_WARNING_THRESHOLD):
self.log.warning(f'CacheUpdateWorker outstanding threads threshold ({WORKER_THREAD_WARNING_THRESHOLD}) has been exceeded. {len(self._threads)} threads are outstanding. This may indicate a possible issue.')
return outstanding_threads
def is_refreshing(self) -> bool:
return self._refresh_queue.refreshing
def init_refresh(self) -> None:
self._refresh_queue.refreshing = True
def stop(self):
self._refresh_queue.refreshing = False
self.stop_event.set()
self.log.debug('CacheUpdateManager stopped.') |
def write_json(save_path, dataset, waves, transcripts):
out_file = os.path.join(save_path, (dataset + '.json'))
with open(out_file, 'w') as fid:
for wave_file in waves:
audio = torchaudio.load(wave_file)
duration = (audio[0].numel() / audio[1])
key = os.path.basename(wave_file)
key = os.path.splitext(key)[0]
datum = {'text': transcripts[key], 'duration': duration, 'audio': wave_file}
json.dump(datum, fid)
fid.write('\n') |
def test_template_dpa_raises_exceptions_if_matching_sf_is_not_an_attack_selection_function(sf, building_container):
with pytest.raises(TypeError):
scared.TemplateDPAAttack(container_building=building_container, reverse_selection_function=sf, selection_function='foo', model=scared.HammingWeight()) |
def upgrade():
op.add_column('events', sa.Column('icon', sa.String(), nullable=True))
op.add_column('events', sa.Column('large', sa.String(), nullable=True))
op.add_column('events_version', sa.Column('icon', sa.String(), autoincrement=False, nullable=True))
op.add_column('events_version', sa.Column('large', sa.String(), autoincrement=False, nullable=True)) |
class ListMixinTest(QuickbooksUnitTestCase):
('quickbooks.mixins.ListMixin.where')
def test_all(self, where):
Department.all()
where.assert_called_once_with('', order_by='', max_results=100, start_position='', qb=None)
def test_all_with_qb(self):
with patch.object(self.qb_client, 'query') as query:
Department.all(qb=self.qb_client)
self.assertTrue(query.called)
('quickbooks.mixins.ListMixin.where')
def test_filter(self, where):
Department.filter(max_results=25, start_position='1', Active=True)
where.assert_called_once_with('Active = True', max_results=25, start_position='1', order_by='', qb=None)
def test_filter_with_qb(self):
with patch.object(self.qb_client, 'query') as query:
Department.filter(Active=True, qb=self.qb_client)
self.assertTrue(query.called)
('quickbooks.mixins.ListMixin.query')
def test_where(self, query):
Department.where('Active=True', start_position=1, max_results=10)
query.assert_called_once_with('SELECT * FROM Department WHERE Active=True STARTPOSITION 1 MAXRESULTS 10', qb=None)
('quickbooks.mixins.ListMixin.query')
def test_where_start_position_0(self, query):
Department.where('Active=True', start_position=0, max_results=10)
query.assert_called_once_with('SELECT * FROM Department WHERE Active=True STARTPOSITION 0 MAXRESULTS 10', qb=None)
def test_where_with_qb(self):
with patch.object(self.qb_client, 'query') as query:
Department.where('Active=True', start_position=1, max_results=10, qb=self.qb_client)
self.assertTrue(query.called)
('quickbooks.mixins.QuickBooks.query')
def test_query(self, query):
select = 'SELECT * FROM Department WHERE Active=True'
Department.query(select)
query.assert_called_once_with(select)
def test_query_with_qb(self):
with patch.object(self.qb_client, 'query') as query:
select = 'SELECT * FROM Department WHERE Active=True'
Department.query(select, qb=self.qb_client)
self.assertTrue(query.called)
('quickbooks.mixins.ListMixin.where')
def test_choose(self, where):
Department.choose(['name1', 'name2'], field='Name')
where.assert_called_once_with("Name in ('name1', 'name2')", qb=None)
def test_choose_with_qb(self):
with patch.object(self.qb_client, 'query') as query:
Department.choose(['name1', 'name2'], field='Name', qb=self.qb_client)
self.assertTrue(query.called)
('quickbooks.mixins.QuickBooks.query')
def test_count(self, query):
count = Department.count(where_clause='Active=True', qb=self.qb_client)
query.assert_called_once_with('SELECT COUNT(*) FROM Department WHERE Active=True')
('quickbooks.mixins.ListMixin.query')
def test_order_by(self, query):
Customer.filter(Active=True, order_by='DisplayName')
query.assert_called_once_with('SELECT * FROM Customer WHERE Active = True ORDERBY DisplayName', qb=None)
def test_order_by_with_qb(self):
with patch.object(self.qb_client, 'query') as query:
Customer.filter(Active=True, order_by='DisplayName', qb=self.qb_client)
self.assertTrue(query.called) |
def readiness_score_recommendation(readiness_score):
try:
readiness_score = int(readiness_score)
if (readiness_score == 0):
return ''
elif (readiness_score >= oura_high_threshold):
return 'High'
elif (readiness_score >= oura_med_threshold):
return 'Mod'
elif (readiness_score >= oura_low_threshold):
return 'Low'
else:
return 'Rest'
except:
return 'N/A' |
def _get_branch_results(build):
(branch_false, branch_true) = [sorted(i) for i in list(coverage.get_coverage_eval().values())[0]['EVMTester']['0'][1:]]
coverage.clear()
branch_results = {True: [], False: []}
for i in branch_true:
(key, map_) = _get_branch(build, i, True)
branch_results[key].append(map_)
for i in branch_false:
(key, map_) = _get_branch(build, i, False)
branch_results[key].append(map_)
return branch_results |
def update_function_text(feedrow, new_func):
current = feedrow.func.strip()
new_func = new_func.strip()
print('New function:', new_func)
print('Current function:', current)
if (current == new_func):
return {'error': True, 'message': 'Function has not changed? Nothing to do!', 'reload': False}
try:
rfdb.str_to_function(new_func, 'testing_compile')
except Exception:
resp = '<div class="center-block text-center"><h4>New function failed to compile!</h4></div>'
resp += (('<pre><code>' + traceback.format_exc()) + '</code></pre>')
return {'error': True, 'message': resp, 'reload': False}
feedrow.func = new_func
feedrow.last_changed = datetime.datetime.now()
return {'error': False, 'message': 'Function updated successfully!', 'reload': True} |
.parametrize('args', [('CG', 1), ('DG', 1), EnrichedElement(HDiv(TensorProductElement(FiniteElement('RT', triangle, 2), FiniteElement('DG', interval, 1))), HDiv(TensorProductElement(FiniteElement('DG', triangle, 1), FiniteElement('CG', interval, 2)))), EnrichedElement(HCurl(TensorProductElement(FiniteElement('RT', triangle, 2), FiniteElement('CG', interval, 2))), HCurl(TensorProductElement(FiniteElement('CG', triangle, 2), FiniteElement('DG', interval, 1))))])
def test_prism_vector(mesh_prism, args):
(x, y, z) = SpatialCoordinate(mesh_prism)
if isinstance(args, tuple):
V = VectorFunctionSpace(mesh_prism, *args)
f = Function(V).interpolate(as_vector([(0.2 + y), ((0.8 * x) + (0.2 * z)), y]))
else:
V = FunctionSpace(mesh_prism, args)
f = Function(V).project(as_vector([(0.2 + y), ((0.8 * x) + (0.2 * z)), y]))
assert np.allclose([0.6, 0.54, 0.4], f([0.6, 0.4, 0.3]))
assert np.allclose([0.8, 0.32, 0.6], f([0.2, 0.6, 0.8])) |
def issue_refund(payment, amount=None):
if ((payment.payment_service == 'Stripe') and settings.STRIPE_SECRET_KEY):
return stripe_issue_refund(payment, amount)
elif ((payment.payment_service == 'USAePay') and settings.USA_E_PAY_KEY):
return usaepay_issue_refund(payment, amount)
elif ((not (payment.payment_service == 'Stripe')) and (not (payment.payment_service == 'USAePay'))):
logger.info("issue_refund: Payment not issued through service so we can't refund it.")
return Payment.objects.create(bill=payment.bill, user=payment.user, payment_service=payment.payment_service, paid_amount=((- 1) * payment.paid_amount), payment_method='Refund', transaction_id='Manual')
else:
raise PaymentException('No payment system configured') |
class OptionSeriesFunnelSonificationTracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class ConverterDUT(Module):
def __init__(self, user_data_width, native_data_width, mem_depth, separate_rw=True, read_latency=0):
self.separate_rw = separate_rw
if separate_rw:
self.write_user_port = LiteDRAMNativeWritePort(address_width=32, data_width=user_data_width)
self.write_crossbar_port = LiteDRAMNativeWritePort(address_width=32, data_width=native_data_width)
self.read_user_port = LiteDRAMNativeReadPort(address_width=32, data_width=user_data_width)
self.read_crossbar_port = LiteDRAMNativeReadPort(address_width=32, data_width=native_data_width)
self.write_driver = NativePortDriver(self.write_user_port)
self.read_driver = NativePortDriver(self.read_user_port)
else:
self.write_user_port = LiteDRAMNativePort(mode='both', address_width=32, data_width=user_data_width)
self.write_crossbar_port = LiteDRAMNativePort(mode='both', address_width=32, data_width=native_data_width)
self.write_driver = NativePortDriver(self.write_user_port)
self.read_user_port = self.write_user_port
self.read_crossbar_port = self.write_crossbar_port
self.read_driver = self.write_driver
self.driver_generators = [self.write_driver.write_data_handler(), self.read_driver.read_data_handler(latency=read_latency)]
self.memory = DRAMMemory(native_data_width, mem_depth)
def do_finalize(self):
if self.separate_rw:
self.submodules.write_converter = LiteDRAMNativePortConverter(self.write_user_port, self.write_crossbar_port)
self.submodules.read_converter = LiteDRAMNativePortConverter(self.read_user_port, self.read_crossbar_port)
else:
self.submodules.converter = LiteDRAMNativePortConverter(self.write_user_port, self.write_crossbar_port)
def read(self, address, **kwargs):
return (yield from self.read_driver.read(address, **kwargs))
def write(self, address, data, **kwargs):
if (self.write_user_port.data_width > self.write_crossbar_port.data_width):
kwargs['data_with_cmd'] = True
return (yield from self.write_driver.write(address, data, **kwargs)) |
def add_to_app(components: List[Standalone.Component], app_path: Union[(str, Path)], folder: str='assets', name: str='{selector}', raise_exception: bool=False, view_path: str=node.APP_FOLDER) -> dict:
result = {'dependencies': {}, 'styles': [], 'scripts': [], 'modules': {}}
for component in components:
result[component.selector] = npm.check_component_requirements(component, app_path, raise_exception)
result['dependencies'].update(result[component.selector])
assets_path = Path(app_path, 'src', folder)
assets_path.mkdir(parents=True, exist_ok=True)
component_files = to_component(component, name=name, out_path=str(assets_path))
result['styles'].extend(npm.get_styles(component.requirements))
result['scripts'].extend(npm.get_scripts(component.requirements))
result['modules'][component.__name__] = component.get_import(('../%s/%s' % (folder, component_files['component'][:(- 3)])), suffix='Component', root_path=Path(app_path, PROJECT_SRC_ALIAS, view_path))
angular_config_path = Path(app_path, ANGULAR_JSON_CONFIG)
if angular_config_path.exists():
app_path = Path(app_path)
with open(angular_config_path) as ap:
angular_config = json.loads(ap.read(), object_pairs_hook=OrderedDict)
for cat in ['styles', 'scripts']:
for style in set(result[cat]):
if (style not in angular_config['projects'][app_path.name]['architect']['build']['options'][cat]):
angular_config['projects'][app_path.name]['architect']['build']['options'][cat].insert(0, style)
with open(angular_config_path, 'w') as ap:
json.dump(angular_config, ap, indent=2)
else:
count_styles = len(result['styles'])
count_scripts = len(result['scripts'])
logging.warning(('%s styles and %s scripts not added' % (count_styles, count_scripts)))
logging.warning(('Cannot locate file: %s' % angular_config_path))
app_module_path = Path(app_path, PROJECT_SRC_ALIAS, view_path, 'app.module.ts')
if app_module_path.exists():
auto_update = False
with open(app_module_path) as am:
(imports, config) = map((lambda x: x.strip()), am.read().split(''))
imports = (imports + '\n\n// Auto generated')
for module in result['modules'].values():
if (module not in imports):
imports = ('%s\n%s;' % (imports, module))
auto_update = True
if auto_update:
with open(app_module_path, 'w') as am:
am.write(('%s\n\%s' % (imports, config)))
return result |
def create_custom_doctype():
if (not frappe.db.exists('DocType', 'Test Patient Feedback')):
doc = frappe.get_doc({'doctype': 'DocType', 'module': 'Healthcare', 'custom': 1, 'is_submittable': 1, 'fields': [{'label': 'Date', 'fieldname': 'date', 'fieldtype': 'Date'}, {'label': 'Patient', 'fieldname': 'patient', 'fieldtype': 'Link', 'options': 'Patient'}, {'label': 'Rating', 'fieldname': 'rating', 'fieldtype': 'Rating'}, {'label': 'Feedback', 'fieldname': 'feedback', 'fieldtype': 'Small Text'}], 'permissions': [{'role': 'System Manager', 'read': 1}], 'name': 'Test Patient Feedback'})
doc.insert()
return doc
else:
return frappe.get_doc('DocType', 'Test Patient Feedback') |
def generate_bigquery(bigquery_config: BigQueryConfig) -> List[Dict[(str, str)]]:
log.info('Generating datasets from BigQuery')
try:
bigquery_datasets = generate_bigquery_datasets(bigquery_config)
except ConnectorFailureException as error:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=str(error))
return [i.dict(exclude_none=True) for i in bigquery_datasets] |
class SourceCode(_common_models.FlyteIdlEntity):
link: Optional[str] = None
def to_flyte_idl(self):
return description_entity_pb2.SourceCode(link=self.link)
def from_flyte_idl(cls, pb2_object: description_entity_pb2.SourceCode) -> 'SourceCode':
return (cls(link=pb2_object.link) if pb2_object.link else None) |
class OptionSeriesSolidgaugeDataDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
class Channel(str):
def __new__(cls, name: str, low: float, high: float, mirror_range: bool=False, bound: bool=False, flags: int=0, limit: Tuple[(Optional[float], Optional[float])]=(None, None), nans: float=0.0) -> 'Channel':
obj = super().__new__(cls, name)
obj.low = low
obj.high = high
mirror = ((flags & FLG_MIRROR_PERCENT) and (abs(low) == high))
obj.span = (high if mirror else (high - low))
obj.offset = (0.0 if mirror else (- low))
obj.bound = bound
obj.flags = flags
obj.limit = limit
obj.nans = nans
return obj |
class Solution():
def sumOfDistancesInTree(self, N: int, edges: List[List[int]]) -> List[int]:
G = defaultdict(set)
for (i, j) in edges:
G[i].add(j)
G[j].add(i)
def dfs(node, parent, depth):
ans = 1
for neib in G[node]:
if (neib != parent):
ans += dfs(neib, node, (depth + 1))
weights[node] = ans
depths[node] = depth
return ans
def dfs2(node, parent, w):
ans[node] = w
for neib in G[node]:
if (neib != parent):
dfs2(neib, node, ((w + N) - (2 * weights[neib])))
(weights, depths, ans) = (([0] * N), ([0] * N), ([0] * N))
dfs(0, (- 1), 0)
dfs2(0, (- 1), sum(depths))
return ans |
def _get_kwargs(*, client: Client, start_date: Union[(Unset, None, datetime.date)]=UNSET, end_date: Union[(Unset, None, datetime.date)]=UNSET) -> Dict[(str, Any)]:
url = '{}/billing/user_spending'.format(client.base_url)
headers: Dict[(str, str)] = client.get_headers()
cookies: Dict[(str, Any)] = client.get_cookies()
params: Dict[(str, Any)] = {}
json_start_date: Union[(Unset, None, str)] = UNSET
if (not isinstance(start_date, Unset)):
json_start_date = (start_date.isoformat() if start_date else None)
params['start_date'] = json_start_date
json_end_date: Union[(Unset, None, str)] = UNSET
if (not isinstance(end_date, Unset)):
json_end_date = (end_date.isoformat() if end_date else None)
params['end_date'] = json_end_date
params = {k: v for (k, v) in params.items() if ((v is not UNSET) and (v is not None))}
return {'method': 'get', 'url': url, 'headers': headers, 'cookies': cookies, 'timeout': client.get_timeout(), 'follow_redirects': client.follow_redirects, 'params': params} |
def construct_ner_instruction(text: str) -> str:
example_output = [{'Entity': 'Barack Obama', 'Type': 'Person', 'Confidence': 0.98}, {'Entity': '44th President', 'Type': 'Position', 'Confidence': 0.95}, {'Entity': 'United States', 'Type': 'Country', 'Confidence': 0.99}]
return f'''Please extract named entities, their types, and assign a confidence score between 0 and 1 from the following text. The text is enclosed within triple backticks.
Input Text:
```{text}```
For example, if the input text is:
```Barack Obama was the 44th President of the United States.```
Your output should be in the following format:
```
{example_output}
```
''' |
def test_butterworth_returns_correct_value_with_highpass_filter_type_and_float32_precision(trace):
(b, a) = signal.butter(3, (.0 / (.0 / 2)), 'highpass')
b = b.astype('float32')
a = a.astype('float32')
expected = signal.lfilter(b, a, trace)
result = scared.signal_processing.butterworth(trace, .0, .0, filter_type=scared.signal_processing.FilterType.HIGH_PASS)
assert (max_diff_percent(expected, result) < 1e-06) |
def parse_annotations(annot, annot_file):
if (annot == True):
with open(annot_file, 'r') as annot_f:
for line in annot_f:
if line.startswith('#'):
continue
(hit, annotation) = parse_annotation_line(line)
(yield annotation)
else:
pass
return |
def data_processor(df, logger) -> list:
df = df.dropna(thresh=2)
df.columns = df.columns.str.strip()
df = df.tail(288)
df['Date/Time'] = df['Date/Time'].map(timestamp_converter)
known_keys = (GENERATION_MAPPING.keys() | {'Date/Time', 'Load'})
column_headers = set(df.columns)
unknown_keys = (column_headers - known_keys)
for k in unknown_keys:
logger.warning(f'New data {k} seen in US-BPA data source', extra={'key': 'US-BPA'})
keys_to_remove = (unknown_keys | {'Load'})
processed_data = []
for (index, row) in df.iterrows():
production = row.to_dict()
dt = production.pop('Date/Time')
dt = dt.to_pydatetime()
mapped_production = {GENERATION_MAPPING[k]: v for (k, v) in production.items() if (k not in keys_to_remove)}
processed_data.append((dt, mapped_production))
return processed_data |
class WafFirewallVersionResponseDataAttributesAllOf(ModelNormal):
allowed_values = {('last_deployment_status',): {'None': None, 'NULL': 'null', 'PENDING': 'pending', 'IN_PROGRESS': 'in progress', 'COMPLETED': 'completed', 'FAILED': 'failed'}}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'active': (bool,), 'active_rules_fastly_block_count': (int,), 'active_rules_fastly_log_count': (int,), 'active_rules_fastly_score_count': (int,), 'active_rules_owasp_block_count': (int,), 'active_rules_owasp_log_count': (int,), 'active_rules_owasp_score_count': (int,), 'active_rules_trustwave_block_count': (int,), 'active_rules_trustwave_log_count': (int,), 'last_deployment_status': (str, none_type), 'deployed_at': (str,), 'error': (str,)}
_property
def discriminator():
return None
attribute_map = {'active': 'active', 'active_rules_fastly_block_count': 'active_rules_fastly_block_count', 'active_rules_fastly_log_count': 'active_rules_fastly_log_count', 'active_rules_fastly_score_count': 'active_rules_fastly_score_count', 'active_rules_owasp_block_count': 'active_rules_owasp_block_count', 'active_rules_owasp_log_count': 'active_rules_owasp_log_count', 'active_rules_owasp_score_count': 'active_rules_owasp_score_count', 'active_rules_trustwave_block_count': 'active_rules_trustwave_block_count', 'active_rules_trustwave_log_count': 'active_rules_trustwave_log_count', 'last_deployment_status': 'last_deployment_status', 'deployed_at': 'deployed_at', 'error': 'error'}
read_only_vars = {'active', 'active_rules_fastly_block_count', 'active_rules_fastly_log_count', 'active_rules_fastly_score_count', 'active_rules_owasp_block_count', 'active_rules_owasp_log_count', 'active_rules_owasp_score_count', 'active_rules_trustwave_block_count', 'active_rules_trustwave_log_count', 'last_deployment_status', 'deployed_at', 'error'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def get_vocab_lists():
return [{'list_name': 'HSK Level 1', 'list_id': '1ebcad3f-5dfd-6bfe-bda4-acde', 'list_difficulty_level': 'Beginner', 'date_created': '2018-12-16T23:06:48.467526', 'created_by': 'admin'}, {'list_name': 'HSK Level 2', 'list_id': '1ebcad3f-adc0-6f42-b8b1-acde', 'list_difficulty_level': 'Beginner', 'date_created': '2018-12-16T23:06:48.467526', 'created_by': 'admin'}, {'list_name': 'HSK Level 3', 'list_id': '1ebcad3f-f815-6b92-b3e8-acde', 'list_difficulty_level': 'Intermediate', 'date_created': '2018-12-16T23:06:48.467526', 'created_by': 'admin'}, {'list_name': 'HSK Level 4', 'list_id': '1ebcad40-414f-6bc8-859d-acde', 'list_difficulty_level': 'Intermediate', 'date_created': '2018-12-16T23:06:48.467526', 'created_by': 'admin'}, {'list_name': 'HSK Level 5', 'list_id': '1ebcad40-bb9e-6ece-a366-acde', 'list_difficulty_level': 'Advanced', 'date_created': '2018-12-16T23:06:48.467526', 'created_by': 'admin'}, {'list_name': 'HSK Level 6', 'list_id': '1ebcad41-197a-6700-95a3-acde', 'list_difficulty_level': 'Advanced', 'date_created': '2018-12-16T23:06:48.467526', 'created_by': 'admin'}] |
class Plugin(plugin.PluginProto):
PLUGIN_ID = 59
PLUGIN_NAME = 'Input - Rotary Encoder (TESTING)'
PLUGIN_VALUENAME1 = 'Counter'
def __init__(self, taskindex):
plugin.PluginProto.__init__(self, taskindex)
self.dtype = rpieGlobals.DEVICE_TYPE_DUAL
self.vtype = rpieGlobals.SENSOR_TYPE_DIMMER
self.valuecount = 1
self.senddataoption = True
self.timeroption = False
self.timeroptional = True
self.inverselogicoption = False
self.recdataoption = False
self.clklast = (- 1)
self.timer100ms = False
def plugin_exit(self):
if (self.enabled and (self.timer100ms == False)):
try:
gpios.HWPorts.remove_event_detect(self.taskdevicepin[0])
except:
pass
return True
def plugin_init(self, enableplugin=None):
plugin.PluginProto.plugin_init(self, enableplugin)
self.decimals[0] = 0
try:
if (float(self.uservar[0]) < int(self.taskdevicepluginconfig[1])):
self.set_value(1, self.taskdevicepluginconfig[1], False)
if (float(self.uservar[0]) > int(self.taskdevicepluginconfig[2])):
self.set_value(1, self.taskdevicepluginconfig[2], False)
except:
self.set_value(1, self.taskdevicepluginconfig[1], False)
if ((int(self.taskdevicepin[0]) >= 0) and self.enabled and (int(self.taskdevicepin[1]) >= 0)):
try:
gpios.HWPorts.remove_event_detect(self.taskdevicepin[0])
except:
pass
try:
btime = int(self.taskdevicepluginconfig[3])
if (btime < 0):
btime = 0
except:
btime = 10
try:
self.clklast = gpios.HWPorts.input(int(self.taskdevicepin[0]))
gpios.HWPorts.add_event_detect(self.taskdevicepin[0], gpios.FALLING, self.p059_handler, btime)
except:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, 'Event can not be added')
self.initialized = False
else:
self.initialized = False
def webform_load(self):
webserver.addFormNote('1st GPIO=CLK, 2nd GPIO=DT, BOTH of them needs to be INPUT-PULLUP')
choice1 = int(float(self.taskdevicepluginconfig[0]))
options = ['1', '2', '3', '4']
optionvalues = [1, 2, 3, 4]
webserver.addFormSelector('Step', 'p059_step', len(options), options, optionvalues, None, choice1)
try:
minv = int(self.taskdevicepluginconfig[1])
except:
minv = 0
webserver.addFormNumericBox('Limit min.', 'p059_min', minv, (- 65535), 65535)
try:
maxv = int(self.taskdevicepluginconfig[2])
except:
maxv = 100
if (minv >= maxv):
maxv = (minv + 1)
webserver.addFormNumericBox('Limit max.', 'p059_max', maxv, (- 65535), 65535)
try:
bt = int(self.taskdevicepluginconfig[3])
except:
bt = 10
webserver.addFormNumericBox('GPIO bounce time', 'p059_bounce', bt, 0, 1000)
webserver.addUnit('ms')
return True
def webform_save(self, params):
changed = False
par = webserver.arg('p059_step', params)
if (par == ''):
par = 1
if (str(self.taskdevicepluginconfig[0]) != str(par)):
changed = True
try:
self.taskdevicepluginconfig[0] = int(par)
except:
self.taskdevicepluginconfig[0] = 1
par = webserver.arg('p059_min', params)
if (par == ''):
par = 0
if (str(self.taskdevicepluginconfig[1]) != str(par)):
changed = True
try:
self.taskdevicepluginconfig[1] = int(par)
except:
self.taskdevicepluginconfig[1] = 0
par = webserver.arg('p059_max', params)
if (par == ''):
par = 100
if (int(self.taskdevicepluginconfig[1]) >= int(par)):
par = (int(self.taskdevicepluginconfig[1]) + 1)
if (str(self.taskdevicepluginconfig[2]) != str(par)):
changed = True
try:
self.taskdevicepluginconfig[2] = int(par)
except:
self.taskdevicepluginconfig[2] = 100
par = webserver.arg('p059_bounce', params)
try:
if (par == ''):
par = 10
else:
par = int(par)
except:
par = 10
if (par != int(self.taskdevicepluginconfig[3])):
changed = True
self.taskdevicepluginconfig[3] = par
if changed:
self.plugin_init()
return True
def p059_handler(self, channel):
if (self.initialized and self.enabled):
aclk = gpios.HWPorts.input(self.taskdevicepin[0])
if (aclk != self.clklast):
dtstate = gpios.HWPorts.input(self.taskdevicepin[1])
try:
ac = float(self.uservar[0])
except:
ac = 0
if (dtstate != aclk):
if (ac < int(self.taskdevicepluginconfig[2])):
ac += int(self.taskdevicepluginconfig[0])
elif (ac > int(self.taskdevicepluginconfig[1])):
ac -= int(self.taskdevicepluginconfig[0])
self.clklast = aclk
self.set_value(1, ac, True)
self._lastdataservetime = rpieTime.millis() |
class CmdMap(MuxCommand):
key = 'map'
def func(self):
size = _BASIC_MAP_SIZE
max_size = _MAX_MAP_SIZE
if self.args.isnumeric():
size = min(max_size, int(self.args))
map_here = Map(self.caller, size=size).show_map()
self.caller.msg((map_here, {'type': 'map'})) |
def check_skin_installed():
params = {'addonid': 'skin.estuary_embycon', 'properties': ['version', 'enabled']}
result = JsonRpc('Addons.GetAddonDetails').execute(params)
log.debug('EmbyCon Skin Details: {0}', result)
installed = (result.get('result') is not None)
version = 'na'
if installed:
version = result.get('result').get('addon').get('version')
if (not installed):
clone_default_skin() |
class OptionPlotoptionsBellcurveSonificationTracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestErasureEmailConnectorMethods():
email_defined = EmailSchema(third_party_vendor_name="Dawn's Bookstore", recipient_email_address='', advanced_settings=AdvancedSettings(identity_types=IdentityTypes(email=True, phone_number=False)))
phone_defined = EmailSchema(third_party_vendor_name="Dawn's Bookstore", recipient_email_address='', advanced_settings=AdvancedSettings(identity_types=IdentityTypes(email=False, phone_number=True)))
email_and_phone_defined = EmailSchema(third_party_vendor_name="Dawn's Bookstore", recipient_email_address='', advanced_settings=AdvancedSettings(identity_types=IdentityTypes(email=True, phone_number=True)))
def test_get_erasure_email_connection_configs_none(self, db):
assert (not get_erasure_email_connection_configs(db).first())
def test_get_erasure_email_connection_configs(self, db, attentive_email_connection_config):
assert (get_erasure_email_connection_configs(db).count() == 1)
assert (get_erasure_email_connection_configs(db).first().name == attentive_email_connection_config.name)
attentive_email_connection_config.disabled = True
attentive_email_connection_config.save(db=db)
assert (not get_erasure_email_connection_configs(db).first())
def test_get_erasure_email_connection_configs_read_only(self, db, sovrn_email_connection_config):
sovrn_email_connection_config.access = AccessLevel.read
sovrn_email_connection_config.save(db=db)
assert (not get_erasure_email_connection_configs(db).first())
.parametrize('email_schema, identity_types', [(email_defined, ['email']), (phone_defined, ['phone_number']), (email_and_phone_defined, ['email', 'phone_number'])])
def test_get_identity_types_for_connector_both_types_supplied(self, email_schema, identity_types):
assert (get_identity_types_for_connector(email_schema) == identity_types)
.parametrize('email_schema, user_identities, filtered_identities', [(email_defined, {'email': ''}, {'email': ''}), (email_defined, {'phone_number': ''}, {}), (email_defined, {'email': '', 'phone_number': ''}, {'email': ''}), (phone_defined, {'email': ''}, {}), (phone_defined, {'phone_number': ''}, {'phone_number': ''}), (phone_defined, {'email': '', 'phone_number': ''}, {'phone_number': ''}), (email_and_phone_defined, {'email': ''}, {'email': ''}), (email_and_phone_defined, {'phone_number': ''}, {'phone_number': ''}), (email_and_phone_defined, {'email': '', 'phone_number': ''}, {'email': '', 'phone_number': ''})])
def test_get_user_identities_for_connector(self, email_schema, user_identities, filtered_identities):
assert (filter_user_identities_for_connector(email_schema, user_identities) == filtered_identities)
('fides.api.service.connectors.erasure_email_connector.dispatch_message')
def test_send_single_erasure_email_no_org_defined(self, mock_dispatch, db):
with pytest.raises(MessageDispatchException) as exc:
send_single_erasure_email(db=db, subject_email='', subject_name='To whom it may concern', batch_identities=[''], test_mode=True)
assert (not mock_dispatch.called)
assert (exc.value.message == 'Cannot send an email to third-party vendor. No organization name found.')
('fides.api.service.connectors.erasure_email_connector.dispatch_message')
def test_send_single_erasure_email(self, mock_dispatch, test_fides_org, db, messaging_config):
send_single_erasure_email(db=db, subject_email='', subject_name='To whom it may concern', batch_identities=['customer-'], test_mode=True)
assert mock_dispatch.called
call_kwargs = mock_dispatch.call_args.kwargs
assert (call_kwargs['db'] == db)
assert (call_kwargs['action_type'] == MessagingActionType.MESSAGE_ERASURE_REQUEST_FULFILLMENT)
assert (call_kwargs['to_identity'].email == '')
assert (call_kwargs['to_identity'].phone_number is None)
assert (call_kwargs['to_identity'].ga_client_id is None)
assert (call_kwargs['service_type'] == 'mailgun')
message_body_params = call_kwargs['message_body_params']
assert (message_body_params.controller == 'Test Org')
assert (message_body_params.third_party_vendor_name == 'To whom it may concern')
assert (message_body_params.identities == ['customer-'])
assert (call_kwargs['subject_override'] == 'Test notification of user erasure requests from Test Org')
('fides.api.service.connectors.erasure_email_connector.dispatch_message')
.usefixtures('test_fides_org', 'messaging_config', 'set_notification_service_type_to_twilio_email')
def test_send_single_erasure_email_respects_messaging_service_type(self, mock_dispatch, db):
send_single_erasure_email(db=db, subject_email='', subject_name='To whom it may concern', batch_identities=['customer-'], test_mode=True)
assert mock_dispatch.called
call_kwargs = mock_dispatch.call_args.kwargs
assert (call_kwargs['service_type'] == 'twilio_email')
def test_needs_email(self, test_attentive_erasure_email_connector, privacy_request_with_erasure_policy):
assert (test_attentive_erasure_email_connector.needs_email({'email': ''}, privacy_request_with_erasure_policy) is True)
def test_needs_email_without_erasure_rules(self, test_attentive_erasure_email_connector, privacy_request):
assert (test_attentive_erasure_email_connector.needs_email({'email': ''}, privacy_request) is False)
def test_needs_email_unsupported_identity(self, test_attentive_erasure_email_connector, privacy_request_with_erasure_policy):
assert (test_attentive_erasure_email_connector.needs_email({'phone': ''}, privacy_request_with_erasure_policy) is False) |
def load_exclude_definitions(file_globs: List[str]) -> List[FieldNestedEntry]:
if (not file_globs):
return []
excludes: List[FieldNestedEntry] = loader.load_definitions(file_globs)
if (not excludes):
raise ValueError('--exclude specified, but no exclusions found in {}'.format(file_globs))
return excludes |
class IPv4TCPDstMasked(MatchTest):
def runTest(self):
match = ofp.match([ofp.oxm.eth_type(2048), ofp.oxm.ip_proto(6), ofp.oxm.tcp_dst_masked(52, 254)])
matching = {'tcp dport=53': simple_tcp_packet(tcp_dport=53), 'tcp dport=52': simple_tcp_packet(tcp_dport=52)}
nonmatching = {'tcp dport=54': simple_tcp_packet(tcp_dport=54), 'tcp dport=51': simple_tcp_packet(tcp_dport=51)}
self.verify_match(match, matching, nonmatching) |
class desc_stats_request(stats_request):
version = 5
type = 18
stats_type = 0
def __init__(self, xid=None, flags=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = desc_stats_request()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 18)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 0)
obj.flags = reader.read('!H')[0]
reader.skip(4)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
return True
def pretty_print(self, q):
q.text('desc_stats_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REQ_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.breakable()
q.text('}') |
def use_experimental_feature(ctx, param, value):
if (not value):
return
if (value == 'dynamic-feed'):
import bench.cli
bench.cli.dynamic_feed = True
bench.cli.verbose = True
else:
from bench.exceptions import FeatureDoesNotExistError
raise FeatureDoesNotExistError(f'Feature {value} does not exist')
from bench.cli import is_envvar_warn_set
if is_envvar_warn_set:
return
click.secho(f"WARNING: bench is using it's new CLI rendering engine. This behaviour has been enabled by passing --{value} in the command. This feature is experimental and may not be implemented for all commands yet.", fg='yellow') |
class BstDfs(Bst):
def in_order_traversal(self, node, visit_func):
if (node is not None):
self.in_order_traversal(node.left, visit_func)
visit_func(node)
self.in_order_traversal(node.right, visit_func)
def pre_order_traversal(self, node, visit_func):
if (node is not None):
visit_func(node)
self.pre_order_traversal(node.left, visit_func)
self.pre_order_traversal(node.right, visit_func)
def post_order_traversal(self, node, visit_func):
if (node is not None):
self.post_order_traversal(node.left, visit_func)
self.post_order_traversal(node.right, visit_func)
visit_func(node) |
class BaseFileAdmin(BaseView, ActionsMixin):
can_upload = True
can_download = True
can_delete = True
can_delete_dirs = True
can_mkdir = True
can_rename = True
allowed_extensions = None
editable_extensions = tuple()
list_template = 'admin/file/list.html'
upload_template = 'admin/file/form.html'
upload_modal_template = 'admin/file/modals/form.html'
mkdir_template = 'admin/file/form.html'
mkdir_modal_template = 'admin/file/modals/form.html'
rename_template = 'admin/file/form.html'
rename_modal_template = 'admin/file/modals/form.html'
edit_template = 'admin/file/form.html'
edit_modal_template = 'admin/file/modals/form.html'
form_base_class = form.BaseForm
rename_modal = False
upload_modal = False
mkdir_modal = False
edit_modal = False
possible_columns = ('name', 'rel_path', 'is_dir', 'size', 'date')
column_list = ('name', 'size', 'date')
column_sortable_list = column_list
default_sort_column = None
default_desc = 0
column_labels = dict(((column, column.capitalize()) for column in column_list))
date_format = '%Y-%m-%d %H:%M:%S'
def __init__(self, base_url=None, name=None, category=None, endpoint=None, url=None, verify_path=True, menu_class_name=None, menu_icon_type=None, menu_icon_value=None, storage=None):
self.base_url = base_url
self.storage = storage
self.init_actions()
self._on_windows = (platform.system() == 'Windows')
if (self.allowed_extensions and (not isinstance(self.allowed_extensions, set))):
self.allowed_extensions = set(self.allowed_extensions)
if (self.editable_extensions and (not isinstance(self.editable_extensions, set))):
self.editable_extensions = set(self.editable_extensions)
super(BaseFileAdmin, self).__init__(name, category, endpoint, url, menu_class_name=menu_class_name, menu_icon_type=menu_icon_type, menu_icon_value=menu_icon_value)
def is_accessible_path(self, path):
return True
def get_base_path(self):
return self.storage.get_base_path()
def get_base_url(self):
return self.base_url
def get_upload_form(self):
class UploadForm(self.form_base_class):
upload = fields.FileField(lazy_gettext('File to upload'))
def __init__(self, *args, **kwargs):
super(UploadForm, self).__init__(*args, **kwargs)
self.admin = kwargs['admin']
def validate_upload(self, field):
if (not self.upload.data):
raise validators.ValidationError(gettext('File required.'))
filename = self.upload.data.filename
if (not self.admin.is_file_allowed(filename)):
raise validators.ValidationError(gettext('Invalid file type.'))
return UploadForm
def get_edit_form(self):
class EditForm(self.form_base_class):
content = fields.TextAreaField(lazy_gettext('Content'), (validators.InputRequired(),))
return EditForm
def get_name_form(self):
def validate_name(self, field):
regexp = re.compile('^(?!^(PRN|AUX|CLOCK\\$|NUL|CON|COM\\d|LPT\\d|\\..*)(\\..+)?$)[^\\x00-\\x1f\\\\?*:\\";|/]+$')
if (not regexp.match(field.data)):
raise validators.ValidationError(gettext('Invalid name'))
class NameForm(self.form_base_class):
name = fields.StringField(lazy_gettext('Name'), validators=[validators.InputRequired(), validate_name])
path = fields.HiddenField()
return NameForm
def get_delete_form(self):
class DeleteForm(self.form_base_class):
path = fields.HiddenField(validators=[validators.InputRequired()])
return DeleteForm
def get_action_form(self):
class ActionForm(self.form_base_class):
action = fields.HiddenField()
url = fields.HiddenField()
return ActionForm
def upload_form(self):
upload_form_class = self.get_upload_form()
if request.form:
formdata = request.form.copy()
formdata.update(request.files)
return upload_form_class(formdata, admin=self)
elif request.files:
return upload_form_class(request.files, admin=self)
else:
return upload_form_class(admin=self)
def name_form(self):
name_form_class = self.get_name_form()
if request.form:
return name_form_class(request.form)
elif request.args:
return name_form_class(request.args)
else:
return name_form_class()
def edit_form(self):
edit_form_class = self.get_edit_form()
if request.form:
return edit_form_class(request.form)
else:
return edit_form_class()
def delete_form(self):
delete_form_class = self.get_delete_form()
if request.form:
return delete_form_class(request.form)
else:
return delete_form_class()
def action_form(self):
action_form_class = self.get_action_form()
if request.form:
return action_form_class(request.form)
else:
return action_form_class()
def is_file_allowed(self, filename):
ext = op.splitext(filename)[1].lower()
if ext.startswith('.'):
ext = ext[1:]
if (self.allowed_extensions and (ext not in self.allowed_extensions)):
return False
return True
def is_file_editable(self, filename):
ext = op.splitext(filename)[1].lower()
if ext.startswith('.'):
ext = ext[1:]
if ((not self.editable_extensions) or (ext not in self.editable_extensions)):
return False
return True
def is_in_folder(self, base_path, directory):
return op.normpath(directory).startswith(base_path)
def save_file(self, path, file_data):
self.storage.save_file(path, file_data)
def validate_form(self, form):
return helpers.validate_form_on_submit(form)
def _get_dir_url(self, endpoint, path=None, **kwargs):
if (not path):
return self.get_url(endpoint, **kwargs)
else:
if self._on_windows:
path = path.replace('\\', '/')
kwargs['path'] = path
return self.get_url(endpoint, **kwargs)
def _get_file_url(self, path, **kwargs):
if self._on_windows:
path = path.replace('\\', '/')
if self.is_file_editable(path):
route = '.edit'
else:
route = '.download'
return self.get_url(route, path=path, **kwargs)
def _normalize_path(self, path):
base_path = self.get_base_path()
if (path is None):
directory = base_path
path = ''
else:
path = op.normpath(path)
if base_path:
directory = self._separator.join([base_path, path])
else:
directory = path
directory = op.normpath(directory)
if (not self.is_in_folder(base_path, directory)):
abort(404)
if (not self.storage.path_exists(directory)):
abort(404)
return (base_path, directory, path)
def is_action_allowed(self, name):
if ((name == 'delete') and (not self.can_delete)):
return False
elif ((name == 'edit') and (len(self.editable_extensions) == 0)):
return False
return True
def on_rename(self, full_path, dir_base, filename):
pass
def on_edit_file(self, full_path, path):
pass
def on_file_upload(self, directory, path, filename):
pass
def on_mkdir(self, parent_dir, dir_name):
pass
def before_directory_delete(self, full_path, dir_name):
pass
def before_file_delete(self, full_path, filename):
pass
def on_directory_delete(self, full_path, dir_name):
pass
def on_file_delete(self, full_path, filename):
pass
def is_column_visible(self, column):
return (column in self.column_list)
def is_column_sortable(self, column):
return (column in self.column_sortable_list)
def column_label(self, column):
return self.column_labels[column]
def timestamp_format(self, timestamp):
return datetime.fromtimestamp(timestamp).strftime(self.date_format)
def _save_form_files(self, directory, path, form):
filename = self._separator.join([directory, secure_filename(form.upload.data.filename)])
if self.storage.path_exists(filename):
secure_name = self._separator.join([path, secure_filename(form.upload.data.filename)])
raise Exception(gettext('File "%(name)s" already exists.', name=secure_name))
else:
self.save_file(filename, form.upload.data)
self.on_file_upload(directory, path, filename)
def _separator(self):
return self.storage.separator
def _get_breadcrumbs(self, path):
accumulator = []
breadcrumbs = []
for n in path.split(self._separator):
accumulator.append(n)
breadcrumbs.append((n, self._separator.join(accumulator)))
return breadcrumbs
('/old_index')
('/old_b/<path:path>')
def index(self, path=None):
warnings.warn('deprecated: use index_view instead.', DeprecationWarning)
return redirect(self.get_url('.index_view', path=path))
('/')
('/b/<path:path>')
def index_view(self, path=None):
if self.can_delete:
delete_form = self.delete_form()
else:
delete_form = None
(base_path, directory, path) = self._normalize_path(path)
if (not self.is_accessible_path(path)):
flash(gettext('Permission denied.'), 'error')
return redirect(self._get_dir_url('.index_view'))
items = []
if (directory != base_path):
parent_path = op.normpath(self._separator.join([path, '..']))
if (parent_path == '.'):
parent_path = None
items.append(('..', parent_path, True, 0, 0))
for item in self.storage.get_files(path, directory):
(file_name, rel_path, is_dir, size, last_modified) = item
if self.is_accessible_path(rel_path):
items.append(item)
sort_column = (request.args.get('sort', None, type=str) or self.default_sort_column)
sort_desc = (request.args.get('desc', 0, type=int) or self.default_desc)
if (sort_column is None):
if self.default_sort_column:
sort_column = self.default_sort_column
if self.default_desc:
sort_desc = self.default_desc
try:
column_index = self.possible_columns.index(sort_column)
except ValueError:
sort_column = self.default_sort_column
if (sort_column is None):
items.sort(key=itemgetter(0))
items.sort(key=itemgetter(2), reverse=True)
if (not self._on_windows):
items.sort(key=(lambda x: (x[0], x[1], x[2], x[3], datetime.utcfromtimestamp(x[4]))), reverse=True)
else:
items.sort(key=itemgetter(column_index), reverse=sort_desc)
breadcrumbs = self._get_breadcrumbs(path)
(actions, actions_confirmation) = self.get_actions_list()
if actions:
action_form = self.action_form()
else:
action_form = None
def sort_url(column, path, invert=False):
desc = None
if (not path):
path = None
if (invert and (not sort_desc)):
desc = 1
return self.get_url('.index_view', path=path, sort=column, desc=desc)
return self.render(self.list_template, dir_path=path, breadcrumbs=breadcrumbs, get_dir_url=self._get_dir_url, get_file_url=self._get_file_url, items=items, actions=actions, actions_confirmation=actions_confirmation, action_form=action_form, delete_form=delete_form, sort_column=sort_column, sort_desc=sort_desc, sort_url=sort_url, timestamp_format=self.timestamp_format)
('/upload/', methods=('GET', 'POST'))
('/upload/<path:path>', methods=('GET', 'POST'))
def upload(self, path=None):
(base_path, directory, path) = self._normalize_path(path)
if (not self.can_upload):
flash(gettext('File uploading is disabled.'), 'error')
return redirect(self._get_dir_url('.index_view', path))
if (not self.is_accessible_path(path)):
flash(gettext('Permission denied.'), 'error')
return redirect(self._get_dir_url('.index_view'))
form = self.upload_form()
if self.validate_form(form):
try:
self._save_form_files(directory, path, form)
flash(gettext('Successfully saved file: %(name)s', name=form.upload.data.filename), 'success')
return redirect(self._get_dir_url('.index_view', path))
except Exception as ex:
flash(gettext('Failed to save file: %(error)s', error=ex), 'error')
if (self.upload_modal and request.args.get('modal')):
template = self.upload_modal_template
else:
template = self.upload_template
return self.render(template, form=form, header_text=gettext('Upload File'), modal=request.args.get('modal'))
('/download/<path:path>')
def download(self, path=None):
if (not self.can_download):
abort(404)
(base_path, directory, path) = self._normalize_path(path)
base_url = self.get_base_url()
if base_url:
base_url = urljoin(self.get_url('.index_view'), base_url)
return redirect(urljoin(quote(base_url), quote(path)))
return self.storage.send_file(directory)
('/mkdir/', methods=('GET', 'POST'))
('/mkdir/<path:path>', methods=('GET', 'POST'))
def mkdir(self, path=None):
(base_path, directory, path) = self._normalize_path(path)
dir_url = self._get_dir_url('.index_view', path)
if (not self.can_mkdir):
flash(gettext('Directory creation is disabled.'), 'error')
return redirect(dir_url)
if (not self.is_accessible_path(path)):
flash(gettext('Permission denied.'), 'error')
return redirect(self._get_dir_url('.index_view'))
form = self.name_form()
if self.validate_form(form):
try:
self.storage.make_dir(directory, form.name.data)
self.on_mkdir(directory, form.name.data)
flash(gettext('Successfully created directory: %(directory)s', directory=form.name.data), 'success')
return redirect(dir_url)
except Exception as ex:
flash(gettext('Failed to create directory: %(error)s', error=ex), 'error')
else:
helpers.flash_errors(form, message='Failed to create directory: %(error)s')
if (self.mkdir_modal and request.args.get('modal')):
template = self.mkdir_modal_template
else:
template = self.mkdir_template
return self.render(template, form=form, dir_url=dir_url, header_text=gettext('Create Directory'))
def delete_file(self, file_path):
self.storage.delete_file(file_path)
('/delete/', methods=('POST',))
def delete(self):
form = self.delete_form()
path = form.path.data
if path:
return_url = self._get_dir_url('.index_view', op.dirname(path))
else:
return_url = self.get_url('.index_view')
if self.validate_form(form):
(base_path, full_path, path) = self._normalize_path(path)
if (not self.can_delete):
flash(gettext('Deletion is disabled.'), 'error')
return redirect(return_url)
if (not self.is_accessible_path(path)):
flash(gettext('Permission denied.'), 'error')
return redirect(self._get_dir_url('.index_view'))
if self.storage.is_dir(full_path):
if (not self.can_delete_dirs):
flash(gettext('Directory deletion is disabled.'), 'error')
return redirect(return_url)
try:
self.before_directory_delete(full_path, path)
self.storage.delete_tree(full_path)
self.on_directory_delete(full_path, path)
flash(gettext('Directory "%(path)s" was successfully deleted.', path=path), 'success')
except Exception as ex:
flash(gettext('Failed to delete directory: %(error)s', error=ex), 'error')
else:
try:
self.before_file_delete(full_path, path)
self.delete_file(full_path)
self.on_file_delete(full_path, path)
flash(gettext('File "%(name)s" was successfully deleted.', name=path), 'success')
except Exception as ex:
flash(gettext('Failed to delete file: %(name)s', name=ex), 'error')
else:
helpers.flash_errors(form, message='Failed to delete file. %(error)s')
return redirect(return_url)
('/rename/', methods=('GET', 'POST'))
def rename(self):
form = self.name_form()
path = form.path.data
if path:
(base_path, full_path, path) = self._normalize_path(path)
return_url = self._get_dir_url('.index_view', op.dirname(path))
else:
return redirect(self.get_url('.index_view'))
if (not self.can_rename):
flash(gettext('Renaming is disabled.'), 'error')
return redirect(return_url)
if (not self.is_accessible_path(path)):
flash(gettext('Permission denied.'), 'error')
return redirect(self._get_dir_url('.index_view'))
if (not self.storage.path_exists(full_path)):
flash(gettext('Path does not exist.'), 'error')
return redirect(return_url)
if self.validate_form(form):
try:
dir_base = op.dirname(full_path)
filename = secure_filename(form.name.data)
self.storage.rename_path(full_path, self._separator.join([dir_base, filename]))
self.on_rename(full_path, dir_base, filename)
flash(gettext('Successfully renamed "%(src)s" to "%(dst)s"', src=op.basename(path), dst=filename), 'success')
except Exception as ex:
flash(gettext('Failed to rename: %(error)s', error=ex), 'error')
return redirect(return_url)
else:
helpers.flash_errors(form, message='Failed to rename: %(error)s')
if (self.rename_modal and request.args.get('modal')):
template = self.rename_modal_template
else:
template = self.rename_template
return self.render(template, form=form, path=op.dirname(path), name=op.basename(path), dir_url=return_url, header_text=gettext('Rename %(name)s', name=op.basename(path)))
('/edit/', methods=('GET', 'POST'))
def edit(self):
next_url = None
path = request.args.getlist('path')
if (not path):
return redirect(self.get_url('.index_view'))
if (len(path) > 1):
next_url = self.get_url('.edit', path=path[1:])
path = path[0]
(base_path, full_path, path) = self._normalize_path(path)
if ((not self.is_accessible_path(path)) or (not self.is_file_editable(path))):
flash(gettext('Permission denied.'), 'error')
return redirect(self._get_dir_url('.index_view'))
dir_url = self._get_dir_url('.index_view', op.dirname(path))
next_url = (next_url or dir_url)
form = self.edit_form()
error = False
if self.validate_form(form):
form.process(request.form, content='')
if form.validate():
try:
self.storage.write_file(full_path, request.form['content'])
except IOError:
flash(gettext('Error saving changes to %(name)s.', name=path), 'error')
error = True
else:
self.on_edit_file(full_path, path)
flash(gettext('Changes to %(name)s saved successfully.', name=path), 'success')
return redirect(next_url)
else:
helpers.flash_errors(form, message='Failed to edit file. %(error)s')
try:
content = self.storage.read_file(full_path)
except IOError:
flash(gettext('Error reading %(name)s.', name=path), 'error')
error = True
except:
flash(gettext('Unexpected error while reading from %(name)s', name=path), 'error')
error = True
else:
try:
content = content.decode('utf8')
except UnicodeDecodeError:
flash(gettext('Cannot edit %(name)s.', name=path), 'error')
error = True
except:
flash(gettext('Unexpected error while reading from %(name)s', name=path), 'error')
error = True
else:
form.content.data = content
if error:
return redirect(next_url)
if (self.edit_modal and request.args.get('modal')):
template = self.edit_modal_template
else:
template = self.edit_template
return self.render(template, dir_url=dir_url, path=path, form=form, error=error, header_text=gettext('Editing %(path)s', path=path))
('/action/', methods=('POST',))
def action_view(self):
return self.handle_action()
('delete', lazy_gettext('Delete'), lazy_gettext('Are you sure you want to delete these files?'))
def action_delete(self, items):
if (not self.can_delete):
flash(gettext('File deletion is disabled.'), 'error')
return
for path in items:
(base_path, full_path, path) = self._normalize_path(path)
if self.is_accessible_path(path):
try:
self.delete_file(full_path)
flash(gettext('File "%(name)s" was successfully deleted.', name=path), 'success')
except Exception as ex:
flash(gettext('Failed to delete file: %(name)s', name=ex), 'error')
('edit', lazy_gettext('Edit'))
def action_edit(self, items):
return redirect(self.get_url('.edit', path=items)) |
class Migration(migrations.Migration):
dependencies = [('awards', '0092_transactionfpds_entity_data_source'), ('recipient', '0020_auto__1352'), ('search', '0007_transactionsearch_parent_uei'), ('transactions', '0008_sourceprocurementtransaction_entity_data_source')]
operations = [migrations.RunSQL(sql='CREATE SCHEMA IF NOT EXISTS raw;', reverse_sql='DROP SCHEMA raw;'), migrations.RunSQL(sql='CREATE SCHEMA IF NOT EXISTS int;', reverse_sql='DROP SCHEMA int;'), migrations.RunSQL(sql='CREATE SCHEMA IF NOT EXISTS temp;', reverse_sql='DROP SCHEMA temp;'), migrations.RunSQL(sql='CREATE SCHEMA IF NOT EXISTS rpt;', reverse_sql='DROP SCHEMA rpt;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.broker_subaward SET SCHEMA raw;', reverse_sql='ALTER TABLE raw.broker_subaward SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE public.duns SET SCHEMA raw;', reverse_sql='ALTER TABLE raw.duns SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE public.source_assistance_transaction SET SCHEMA raw;', reverse_sql='ALTER TABLE raw.source_assistance_transaction SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE public.source_procurement_transaction SET SCHEMA raw;', reverse_sql='ALTER TABLE raw.source_procurement_transaction SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.subaward SET SCHEMA int;', reverse_sql='ALTER TABLE int.subaward SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE public.transaction_normalized SET SCHEMA int;', reverse_sql='ALTER TABLE int.transaction_normalized SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE public.transaction_fabs SET SCHEMA int;', reverse_sql='ALTER TABLE int.transaction_fabs SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE public.transaction_fpds SET SCHEMA int;', reverse_sql='ALTER TABLE int.transaction_fpds SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE public.transaction_delta SET SCHEMA int;', reverse_sql='ALTER TABLE int.transaction_delta SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.transaction_search_0 SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.transaction_search_0 SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.transaction_search_1 SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.transaction_search_1 SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.transaction_search_2 SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.transaction_search_2 SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.transaction_search_3 SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.transaction_search_3 SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.transaction_search_4 SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.transaction_search_4 SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.transaction_search_5 SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.transaction_search_5 SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.transaction_search_6 SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.transaction_search_6 SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.transaction_search_7 SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.transaction_search_7 SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.transaction_search_8 SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.transaction_search_8 SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.transaction_search_9 SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.transaction_search_9 SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.mv_contract_award_search SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.mv_contract_award_search SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.mv_idv_award_search SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.mv_idv_award_search SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.mv_loan_award_search SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.mv_loan_award_search SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.mv_grant_award_search SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.mv_grant_award_search SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.mv_directpayment_award_search SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.mv_directpayment_award_search SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.mv_other_award_search SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.mv_other_award_search SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.mv_pre2008_award_search SET SCHEMA temp;', reverse_sql='ALTER TABLE IF EXISTS temp.mv_pre2008_award_search SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.subaward_view SET SCHEMA rpt;', reverse_sql='ALTER TABLE IF EXISTS rpt.subaward_view SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE public.recipient_lookup SET SCHEMA rpt;', reverse_sql='ALTER TABLE rpt.recipient_lookup SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE public.recipient_profile SET SCHEMA rpt;', reverse_sql='ALTER TABLE rpt.recipient_profile SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE public.transaction_search SET SCHEMA rpt;', reverse_sql='ALTER TABLE rpt.transaction_search SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE public.awards SET SCHEMA rpt;', reverse_sql='ALTER TABLE rpt.awards SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE public.parent_award SET SCHEMA rpt;', reverse_sql='ALTER TABLE rpt.parent_award SET SCHEMA public;'), migrations.RunSQL(sql='ALTER TABLE IF EXISTS public.vw_award_search SET SCHEMA rpt;', reverse_sql='ALTER TABLE IF EXISTS rpt.vw_award_search SET SCHEMA public;')] |
def test_get_columns():
definition = DataDefinition(columns=[ColumnDefinition('id', ColumnType.Categorical), ColumnDefinition('datetime', ColumnType.Datetime), ColumnDefinition('target', ColumnType.Categorical), ColumnDefinition('predicted', ColumnType.Categorical), ColumnDefinition('class_1', ColumnType.Numerical), ColumnDefinition('class_2', ColumnType.Numerical), ColumnDefinition('class_3', ColumnType.Numerical), ColumnDefinition('column_1', ColumnType.Categorical), ColumnDefinition('column_2', ColumnType.Numerical), ColumnDefinition('column_3', ColumnType.Numerical), ColumnDefinition('column_4', ColumnType.Datetime), ColumnDefinition('column_5', ColumnType.Datetime), ColumnDefinition('column_6', ColumnType.Datetime)], id_column=ColumnDefinition('id', ColumnType.Categorical), datetime_column=ColumnDefinition('datetime', ColumnType.Datetime), target=ColumnDefinition('target', ColumnType.Categorical), prediction_columns=PredictionColumns(predicted_values=ColumnDefinition('predicted', ColumnType.Categorical), prediction_probas=[ColumnDefinition('class_1', ColumnType.Numerical), ColumnDefinition('class_2', ColumnType.Numerical), ColumnDefinition('class_3', ColumnType.Numerical)]), task='classification', classification_labels=['class_1', 'class_2', 'class_3'], embeddings=None, reference_present=True, user_id=None, item_id=None, recommendations_type=None)
all_columns = ['id', 'datetime', 'target', 'predicted', 'class_1', 'class_2', 'class_3', 'column_1', 'column_2', 'column_3', 'column_4', 'column_5', 'column_6']
assert (all_columns == [cd.column_name for cd in definition.get_columns()])
cat_columns = ['id', 'target', 'predicted', 'column_1']
assert (cat_columns == [cd.column_name for cd in definition.get_columns(filter_def=ColumnType.Categorical)])
num_columns = ['class_1', 'class_2', 'class_3', 'column_2', 'column_3']
assert (num_columns == [cd.column_name for cd in definition.get_columns(filter_def=ColumnType.Numerical)])
dt_columns = ['datetime', 'column_4', 'column_5', 'column_6']
assert (dt_columns == [cd.column_name for cd in definition.get_columns(filter_def=ColumnType.Datetime)])
features = ['column_1', 'column_2', 'column_3', 'column_4', 'column_5', 'column_6']
assert (features == [cd.column_name for cd in definition.get_columns(features_only=True)])
cat_features = ['column_1']
assert (cat_features == [cd.column_name for cd in definition.get_columns(filter_def=ColumnType.Categorical, features_only=True)])
num_features = ['column_2', 'column_3']
assert (num_features == [cd.column_name for cd in definition.get_columns(filter_def=ColumnType.Numerical, features_only=True)])
dt_features = ['column_4', 'column_5', 'column_6']
assert (dt_features == [cd.column_name for cd in definition.get_columns(filter_def=ColumnType.Datetime, features_only=True)]) |
def deprecated(warn: bool=True, alternative: Optional[Callable]=None, deprecation_text=None):
def decorator(function):
def wrapper(*args, **kwargs):
info = f'`{function.__name__}` is deprecated.'
if alternative:
info += f' Use `{alternative.__name__}` instead'
if deprecation_text:
info += (' ' + deprecation_text)
if (not warn):
logger.info(info)
else:
logger.warning(info)
return function(*args, **kwargs)
return wrapper
return decorator |
_heads([JacobiTheta, JacobiThetaQ])
def tex_JacobiTheta(head, args, **kwargs):
argstr = [arg.latex(**kwargs) for arg in args]
midsep = ','
if (len(args) == 3):
index = args[0].latex(in_small=True)
z = argstr[1]
tau = argstr[2]
return ('\\theta_{%s}\\!\\left(%s %s %s\\right)' % (index, z, midsep, tau))
if (len(args) == 4):
index = args[0].latex(in_small=True)
z = argstr[1]
tau = argstr[2]
if args[3].is_integer():
r = args[3]._integer
if ((r >= 0) and (r <= 3)):
return ('\\theta%s_{%s}\\!\\left(%s %s %s\\right)' % (("'" * r), index, z, midsep, tau))
r = args[3].latex(in_small=True)
return ('\\theta^{(%s)}_{%s}\\!\\left(%s %s %s\\right)' % (r, index, z, midsep, tau))
raise ValueError |
def schedule_jobs(sched):
for (module, name, interval_seconds) in jobs.JOBS:
log.info('Scheduling %s to run every %s hours.', module, (interval_seconds / (60 * 60)))
sched.add_job(run_job, trigger='interval', seconds=interval_seconds, start_date='2000-1-1 0:00:00', name=name, id=name, args=(module,), coalesce=True, max_instances=1, misfire_grace_time=((60 * 60) * 2)) |
class RelationshipWafFirewallVersions(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'waf_firewall_versions': (RelationshipWafFirewallVersionWafFirewallVersion,)}
_property
def discriminator():
return None
attribute_map = {'waf_firewall_versions': 'waf_firewall_versions'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def remove_index_for_records(records, space):
from redis.exceptions import ResponseError
r = frappe.cache()
for d in records:
try:
key = r.make_key(f'{PREFIX}{space}:{d.name}').decode()
r.ft(space).delete_document(key)
except ResponseError:
pass |
def build_request_data(award_ids, subawards):
return json.dumps({'filters': {'time_period': [{'start_date': '2007-10-01', 'end_date': '2020-09-30'}], 'award_type_codes': ['A', 'B', 'C', 'D'], 'award_ids': award_ids}, 'fields': [('Sub-Award ID' if subawards else 'Award ID')], 'sort': ('Sub-Award ID' if subawards else 'Award ID'), 'subawards': subawards}) |
.parametrize('primary_type, expected', (('Mail', 'Mail(Person from,Person to,string contents)Person(string name,address wallet)'), ('Person', 'Person(string name,address wallet)')))
def test_encode_type_eip712(primary_type, expected, eip712_example_types):
assert (encode_type(primary_type, eip712_example_types) == expected) |
class BrokenCommand(click.Command):
def __init__(self, name: str) -> None:
click.Command.__init__(self, name)
util_name = os.path.basename(((sys.argv and sys.argv[0]) or __file__))
self.help = ('\nWarning: entry point could not be loaded. Contact its author for help.\n\n\x08\n' + traceback.format_exc())
self.short_help = (' Warning: could not load plugin. See `%s %s --help`.' % (util_name, self.name))
def invoke(self, ctx: click.Context) -> None:
click.echo(self.help, color=ctx.color)
ctx.exit(1)
def parse_args(self, ctx: click.Context, args: List) -> List:
return args |
class DEITP(DeltaE):
NAME = 'itp'
def __init__(self, scalar: float=720) -> None:
self.scalar = scalar
def distance(self, color: Color, sample: Color, scalar: (float | None)=None, **kwargs: Any) -> float:
if (scalar is None):
scalar = self.scalar
(i1, t1, p1) = color.convert('ictcp').coords(nans=False)
(i2, t2, p2) = sample.convert('ictcp').coords(nans=False)
return (scalar * math.sqrt(((((i1 - i2) ** 2) + (0.25 * ((t1 - t2) ** 2))) + ((p1 - p2) ** 2)))) |
class Line(CommConfigs):
def fill(self):
return self._get_commons()
def fill(self, value: Union[(bool, str)]):
self._set_commons(value)
def stepped(self):
return self._get_commons()
def stepped(self, value: Union[(bool, str)]):
self._set_commons(value)
def showLine(self):
return self._get_commons()
def showLine(self, flag: bool):
self._set_commons(flag)
def tension(self):
return self._get_commons()
def tension(self, value: float):
self._set_commons(value) |
def major_object_class_with_children(major_code, minor_codes):
retval = []
for minor_code in minor_codes:
retval.append(baker.make('references.ObjectClass', id=minor_code, major_object_class=major_code, major_object_class_name=f'{major_code} name', object_class=f'000{minor_code}', object_class_name=f'000{minor_code} name'))
return retval |
def test_offset_with_connector_param_reference(response_with_body):
config = OffsetPaginationConfiguration(incremental_param='page', increment_by=1, limit={'connector_param': 'limit'})
connector_params = {'limit': 10}
request_params: SaaSRequestParams = SaaSRequestParams(method=HTTPMethod.GET, path='/conversations', query_params={'page': 1})
paginator = OffsetPaginationStrategy(config)
next_request: Optional[SaaSRequestParams] = paginator.get_next_request(request_params, connector_params, response_with_body, 'conversations')
assert (next_request == SaaSRequestParams(method=HTTPMethod.GET, path='/conversations', query_params={'page': 2})) |
class LAD(TC_base):
def __init__(self, M, A, B):
TC_base.__init__(self, nc=1, variableNames=['u'], mass={0: {0: 'linear'}}, advection={0: {0: 'linear'}}, diffusion={0: {0: {0: 'constant'}}}, potential={0: {0: 'u'}}, reaction={0: {0: 'linear'}})
self.M = M
self.A = A
self.B = B
def evaluate(self, t, c):
c[('m', 0)][:] = (self.M * c[('u', 0)])
c[('dm', 0, 0)][:] = self.M
c[('f', 0)][(..., 0)] = (self.B[0] * c[('u', 0)])
c[('f', 0)][(..., 1)] = (self.B[1] * c[('u', 0)])
c[('df', 0, 0)][(..., 0)] = self.B[0]
c[('df', 0, 0)][(..., 1)] = self.B[1]
c[('a', 0, 0)][(..., 0, 0)] = self.A[0][0]
c[('a', 0, 0)][(..., 1, 1)] = self.A[1][1] |
def extractBobateatranslationsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('wfiltu', 'Why fall in love if you can attend Tsinghua University', 'translated'), ('sooew', 'Show Off Once in Every World (SOOEW)', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def add_scorer_args(parser: argparse.ArgumentParser, cli_argument_list: Optional[List[str]]=None):
if (cli_argument_list is None):
(args, _) = parser.parse_known_args()
else:
(args, _) = parser.parse_known_args(cli_argument_list)
for metric in args.latency_metrics:
get_scorer_class('latency', metric).add_args(parser)
for metric in args.quality_metrics:
get_scorer_class('quality', metric).add_args(parser) |
def _tokenize_paren_block(string, pos):
paren_re = re.compile('[()]')
paren_level = (string[:pos].count('(') - string[:pos].count(')'))
while (paren_level > 0):
m = paren_re.search(string[pos:])
if (m.group(0) == '('):
paren_level += 1
else:
paren_level -= 1
pos += m.end(0)
return (string[:pos], string[pos:]) |
class Falcon(HuggingFace):
MODEL_NAMES = Literal[('falcon-rw-1b', 'falcon-7b', 'falcon-7b-instruct', 'falcon-40b-instruct')]
def __init__(self, name: MODEL_NAMES, config_init: Optional[Dict[(str, Any)]], config_run: Optional[Dict[(str, Any)]]):
self._tokenizer: Optional['transformers.AutoTokenizer'] = None
super().__init__(name=name, config_init=config_init, config_run=config_run)
assert isinstance(self._tokenizer, transformers.PreTrainedTokenizerBase)
self._config_run['pad_token_id'] = self._tokenizer.pad_token_id
self._hf_config_run = transformers.GenerationConfig.from_pretrained(self._name, **self._config_run)
self._hf_config_run.max_new_tokens = self._hf_config_run.max_length
def init_model(self) -> Any:
self._tokenizer = transformers.AutoTokenizer.from_pretrained(self._name)
return transformers.pipeline('text-generation', model=self._name, tokenizer=self._tokenizer, return_full_text=False, **self._config_init)
def hf_account(self) -> str:
return 'tiiuae'
def __call__(self, prompts: Iterable[str]) -> Iterable[str]:
return [self._model(pr, generation_config=self._hf_config_run)[0]['generated_text'] for pr in prompts]
def compile_default_configs() -> Tuple[(Dict[(str, Any)], Dict[(str, Any)])]:
(default_cfg_init, default_cfg_run) = HuggingFace.compile_default_configs()
return ({**default_cfg_init, 'trust_remote_code': True}, default_cfg_run) |
class ConditionNode(AbstractSyntaxTreeNode):
def __init__(self, condition: LogicCondition, reaching_condition: LogicCondition, ast: Optional[AbstractSyntaxInterface]=None):
super().__init__(reaching_condition, ast)
self.condition: LogicCondition = condition
def __hash__(self) -> int:
return super().__hash__()
def __eq__(self, other) -> bool:
return (super().__eq__(other) and self.condition.is_equal_to(other.condition))
def __str__(self) -> str:
return f'if ({str(self.condition)})'
def __repr__(self) -> str:
return f'''ConditionNode({self.reaching_condition})
{str(self)}
TrueNode({(type(self.true_branch_child) if self.true_branch_child else None)}
FalseNode({(type(self.false_branch_child) if self.false_branch_child else None)}))'''
def copy(self) -> ConditionNode:
return ConditionNode(self.condition, self.reaching_condition)
def children(self) -> Tuple[(Union[(TrueNode, FalseNode)], ...)]:
children = super().children
assert all((isinstance(c, (TrueNode, FalseNode)) for c in children)), 'A condition node has only True- and FalseNode children!'
return children
def true_branch(self) -> Optional[TrueNode]:
true_nodes = [child for child in self.children if isinstance(child, TrueNode)]
assert (len(true_nodes) < 2), f'A condition node can not have more than one True Branch'
return (true_nodes[0] if true_nodes else None)
def false_branch(self) -> Optional[FalseNode]:
false_nodes = [child for child in self.children if isinstance(child, FalseNode)]
assert (len(false_nodes) < 2), f'A condition node can not have more than one False Branch'
return (false_nodes[0] if false_nodes else None)
def true_branch_child(self) -> Optional[AbstractSyntaxTreeNode]:
return (self.true_branch.child if self.true_branch else None)
def false_branch_child(self) -> Optional[AbstractSyntaxTreeNode]:
return (self.false_branch.child if self.false_branch else None)
def get_end_nodes(self) -> Iterable[Union[(CodeNode, SwitchNode, LoopNode, ConditionNode)]]:
if ((self.true_branch_child is not None) and (self.false_branch_child is not None)):
(yield from super().get_end_nodes())
else:
(yield self)
def does_end_with_break(self) -> bool:
if ((self.true_branch_child is not None) and (self.false_branch_child is not None)):
return super().does_end_with_break
return False
def does_end_with_continue(self) -> bool:
if ((self.true_branch_child is not None) and (self.false_branch_child is not None)):
return super().does_end_with_continue
return False
def does_end_with_return(self) -> bool:
if ((self.true_branch_child is not None) and (self.false_branch_child is not None)):
return super().does_end_with_return
return False
def get_possible_case_candidate_condition(self) -> Optional[LogicCondition]:
self.clean()
if (self.false_branch is None):
return (self.reaching_condition & self.condition)
return super().get_possible_case_candidate_condition()
def simplify_reaching_condition(self, condition_handler: ConditionHandler):
self.clean()
if ((self.false_branch is None) and (not self.reaching_condition.is_true)):
self.condition &= self.reaching_condition
self.condition.remove_redundancy(condition_handler)
self.reaching_condition = LogicCondition.initialize_true(self.reaching_condition.context)
super().simplify_reaching_condition(condition_handler)
def switch_branches(self):
self._ast.switch_branches(self)
def clean(self) -> None:
for dead_child in (child for child in self.children if (child.child is None)):
self._ast.remove_subtree(dead_child)
if ((len(self.children) == 1) and (self.true_branch is None)):
self.switch_branches()
super().clean()
def replace_variable(self, replacee: Variable, replacement: Variable) -> None:
self._ast.substitute_variable_in_condition(self.condition, replacee, replacement)
def accept(self, visitor: ASTVisitorInterface[T]) -> T:
return visitor.visit_condition_node(self)
def get_required_variables(self, condition_map: Optional[Dict[(LogicCondition, Condition)]]=None) -> Iterable[Variable]:
if (not condition_map):
return
for symbol in self.condition.get_symbols():
if (symbol not in condition_map):
logging.warning('LogicCondition not in condition map.')
continue
(yield from condition_map[symbol].requirements) |
def get_price(symbol: str):
info = yfinance.Ticker(symbol).fast_info
change = (info.last_price - info.previous_close)
percent_change = ((change / info.last_price) * 100)
price_str = format_cents(str(info.last_price))
percent_str = format_cents(str(percent_change))
if (percent_str[0] != '-'):
percent_str = ('+' + percent_str)
return (price_str, percent_str) |
.parametrize('input_vars_1', ['dates_full', None])
.parametrize('input_vars_2', ['dates_na', ['dates_full', 'dates_na'], None])
def test_raises_error_when_nan_in_reference_in_transform(df_nan, input_vars_1, input_vars_2):
tr = DatetimeSubtraction(variables=input_vars_1, reference=input_vars_2, missing_values='raise')
tr.fit(df_nan.fillna('Mar-2010'))
with pytest.raises(ValueError):
tr.transform(df_nan) |
def get_id(view_kwargs):
if (view_kwargs.get('event_identifier') is not None):
event = safe_query_kwargs(Event, view_kwargs, 'event_identifier', 'identifier')
if (event.id is not None):
view_kwargs['event_id'] = event.id
if (view_kwargs.get('event_id') is not None):
stripe_authorization = safe_query_kwargs(StripeAuthorization, view_kwargs, 'event_id', 'event_id')
view_kwargs['id'] = stripe_authorization.id
return view_kwargs |
def test_invalid_downstream_ref():
node0 = OperatorNode({}, {'name': 'test0', 'type': 'none', 'upstream_dependencies': ['test1']})
node1 = OperatorNode({}, {'name': 'test1', 'type': 'none'})
node2 = OperatorNode({}, {'name': 'test2', 'type': 'none', 'downstream_dependencies': ['test4']})
with pytest.raises(Exception):
_GraphUtil.build_subgraph([node0, node1, node2]) |
(scope='function')
def mariadb_integration_db(mariadb_integration_session):
truncate_all_tables(mariadb_integration_session)
with open('./docker/sample_data/mariadb_example_data.sql', 'r') as query_file:
lines = query_file.read().splitlines()
filtered = [line for line in lines if (not line.startswith('--'))]
queries = ' '.join(filtered).split(';')
[mariadb_integration_session.execute(f'{sqlalchemy.text(query.strip())};') for query in queries if query]
(yield mariadb_integration_session)
truncate_all_tables(mariadb_integration_session) |
class TranslateFlinger(Flinger):
def __init__(self, size: np.ndarray, scale: np.ndarray, offset: np.ndarray) -> None:
super().__init__(size)
self.scale: np.ndarray = scale
self.offset: np.ndarray = offset
def fling(self, coordinates: np.ndarray) -> np.ndarray:
return (self.scale * (coordinates + self.offset)) |
def remove_signing_keys(build_dir):
for (root, dirs, files) in os.walk(build_dir):
gradlefile = None
if ('build.gradle' in files):
gradlefile = 'build.gradle'
elif ('build.gradle.kts' in files):
gradlefile = 'build.gradle.kts'
if gradlefile:
path = os.path.join(root, gradlefile)
with open(path, 'r') as o:
lines = o.readlines()
changed = False
opened = 0
i = 0
with open(path, 'w') as o:
while (i < len(lines)):
line = lines[i]
i += 1
while line.endswith('\\\n'):
line = (line.rstrip('\\\n') + lines[i])
i += 1
if gradle_comment.match(line):
o.write(line)
continue
if (opened > 0):
opened += line.count('{')
opened -= line.count('}')
continue
if gradle_signing_configs.match(line):
changed = True
opened += 1
continue
if any((s.match(line) for s in gradle_line_matches)):
changed = True
continue
if (opened == 0):
o.write(line)
if changed:
logging.info(('Cleaned %s of keysigning configs at %s' % (gradlefile, path)))
for propfile in ['project.properties', 'build.properties', 'default.properties', 'ant.properties']:
if (propfile in files):
path = os.path.join(root, propfile)
with open(path, 'r', encoding='iso-8859-1') as o:
lines = o.readlines()
changed = False
with open(path, 'w', encoding='iso-8859-1') as o:
for line in lines:
if any((line.startswith(s) for s in ('key.store', 'key.alias'))):
changed = True
continue
o.write(line)
if changed:
logging.info(('Cleaned %s of keysigning configs at %s' % (propfile, path))) |
class TemporalEnsemble(BaseEnsemble):
def __init__(self, step_size=1, burn_in=None, window=None, lag=0, scorer=None, raise_on_exception=True, array_check=None, verbose=False, n_jobs=(- 1), backend='threading', model_selection=False, sample_size=20, layers=None):
super(TemporalEnsemble, self).__init__(shuffle=False, random_state=None, scorer=scorer, raise_on_exception=raise_on_exception, verbose=verbose, n_jobs=n_jobs, layers=layers, backend=backend, array_check=array_check, model_selection=model_selection, sample_size=sample_size)
self.__initialized__ = 0
self.step_size = step_size
self.burn_in = burn_in
self.window = window
self.lag = lag
self.__initialized__ = 1
def add_meta(self, estimator, **kwargs):
return self.add(estimators=estimator, meta=True, **kwargs)
def add(self, estimators, preprocessing=None, proba=False, meta=False, propagate_features=None, **kwargs):
s = kwargs.pop('step_size', self.step_size)
b = kwargs.pop('burn_in', self.burn_in)
w = kwargs.pop('window', self.window)
l = kwargs.pop('lag', self.lag)
if meta:
idx = FullIndex()
else:
idx = TemporalIndex(s, b, w, l, raise_on_exception=self.raise_on_exception)
return super(TemporalEnsemble, self).add(estimators=estimators, indexer=idx, preprocessing=preprocessing, proba=proba, propagate_features=propagate_features, **kwargs) |
def upgrade() -> None:
op.create_table('announcements', sa.Column('id', sa.Integer(), nullable=False), sa.Column('channel', sa.Enum('ALL', 'FEED', 'TELEGRAM', name='channel'), nullable=False), sa.Column('date', AwareDateTime(), nullable=False), sa.Column('text_markdown', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id'))
op.create_table('igdb_info', sa.Column('id', sa.Integer(), nullable=False), sa.Column('url', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('short_description', sa.String(), nullable=True), sa.Column('release_date', AwareDateTime(), nullable=True), sa.Column('user_score', sa.Integer(), nullable=True), sa.Column('user_ratings', sa.Integer(), nullable=True), sa.Column('meta_score', sa.Integer(), nullable=True), sa.Column('meta_ratings', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id'))
op.create_table('steam_info', sa.Column('id', sa.Integer(), nullable=False), sa.Column('url', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('short_description', sa.String(), nullable=True), sa.Column('release_date', AwareDateTime(), nullable=True), sa.Column('genres', sa.String(), nullable=True), sa.Column('publishers', sa.String(), nullable=True), sa.Column('image_url', sa.String(), nullable=True), sa.Column('recommendations', sa.Integer(), nullable=True), sa.Column('percent', sa.Integer(), nullable=True), sa.Column('score', sa.Integer(), nullable=True), sa.Column('metacritic_score', sa.Integer(), nullable=True), sa.Column('metacritic_url', sa.String(), nullable=True), sa.Column('recommended_price_eur', sa.Float(), nullable=True), sa.PrimaryKeyConstraint('id'))
op.create_table('users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('registration_date', AwareDateTime(), nullable=False), sa.Column('telegram_id', sa.String(), nullable=True), sa.Column('telegram_chat_id', sa.String(), nullable=False), sa.Column('telegram_user_details', sa.JSON(), nullable=True), sa.Column('timezone_offset', sa.Integer(), nullable=True), sa.Column('inactive', sa.String(), nullable=True), sa.Column('offers_received_count', sa.Integer(), nullable=False), sa.Column('last_announcement_id', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('id'))
op.create_table('games', sa.Column('id', sa.Integer(), nullable=False), sa.Column('igdb_id', sa.Integer(), nullable=True), sa.Column('steam_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['igdb_id'], ['igdb_info.id']), sa.ForeignKeyConstraint(['steam_id'], ['steam_info.id']), sa.PrimaryKeyConstraint('id'))
op.create_table('telegram_subscriptions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('source', sa.Enum('APPLE', 'AMAZON', 'EPIC', 'GOG', 'GOOGLE', 'HUMBLE', 'ITCH', 'STEAM', 'UBISOFT', name='source'), nullable=False), sa.Column('type', sa.Enum('GAME', 'LOOT', name='offertype'), nullable=False), sa.Column('duration', sa.Enum('ALWAYS', 'CLAIMABLE', 'TEMPORARY', name='offerduration'), nullable=False), sa.Column('last_offer_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['user_id'], ['users.id']), sa.PrimaryKeyConstraint('id'))
op.create_table('offers', sa.Column('id', sa.Integer(), nullable=False), sa.Column('source', sa.Enum('APPLE', 'AMAZON', 'EPIC', 'GOG', 'GOOGLE', 'HUMBLE', 'ITCH', 'STEAM', 'UBISOFT', name='source'), nullable=False), sa.Column('type', sa.Enum('GAME', 'LOOT', name='offertype'), nullable=False), sa.Column('duration', sa.Enum('ALWAYS', 'CLAIMABLE', 'TEMPORARY', name='offerduration'), nullable=False), sa.Column('title', sa.String(), nullable=False), sa.Column('probable_game_name', sa.String(), nullable=False), sa.Column('seen_last', AwareDateTime(), nullable=False), sa.Column('rawtext', sa.JSON(), nullable=True), sa.Column('url', sa.String(), nullable=True), sa.Column('game_id', sa.Integer(), nullable=True), sa.Column('category', sa.Enum('VALID', 'CHEAP', 'DEMO', 'PRERELEASE', name='category'), nullable=False), sa.Column('img_url', sa.String(), nullable=True), sa.Column('seen_first', AwareDateTime(), nullable=True), sa.Column('valid_from', AwareDateTime(), nullable=True), sa.Column('valid_to', AwareDateTime(), nullable=True), sa.ForeignKeyConstraint(['game_id'], ['games.id']), sa.PrimaryKeyConstraint('id')) |
class ReadabilityExtractor(AbstractExtractor):
def __init__(self):
self.name = 'readability'
def extract(self, item):
doc = Document(deepcopy(item['spider_response'].body))
description = doc.summary()
article_candidate = ArticleCandidate()
article_candidate.extractor = self._name
article_candidate.title = doc.short_title()
article_candidate.description = description
article_candidate.text = self._text(item)
article_candidate.topimage = self._topimage(item)
article_candidate.author = self._author(item)
article_candidate.publish_date = self._publish_date(item)
article_candidate.language = self._language(item)
return article_candidate |
class ArrayField(forms.CharField):
widget = ArrayWidget
def __init__(self, *args, **kwargs):
self.tags = kwargs.pop('tags', False)
super(ArrayField, self).__init__(*args, **kwargs)
def clean(self, value):
value = super(ArrayField, self).clean(value)
try:
return parse_items(value, sort=self.tags)
except ValueError:
raise forms.ValidationError(_('Please provide a comma-separated list of items.')) |
class OptionPlotoptionsErrorbarDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get(None)
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('rgba(0, 0, 0, 0.6)')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def pathFormatter(self):
return self._config_get(None)
def pathFormatter(self, value: Any):
self._config(value, js_type=False)
def zIndex(self):
return self._config_get(901)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def extractSingletranslationsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesOrganizationSonificationContexttracksMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesOrganizationSonificationContexttracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesOrganizationSonificationContexttracksMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesOrganizationSonificationContexttracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesOrganizationSonificationContexttracksMappingHighpassResonance) |
class PoleHook(Boxes):
def __init__(self) -> None:
Boxes.__init__(self)
self.addSettingsArgs(edges.FingerJointSettings)
self.argparser.add_argument('--diameter', action='store', type=float, default=50.0, help='diameter of the thing to hook')
self.argparser.add_argument('--screw', action='store', type=float, default=7.8, help='diameter of the screw in mm')
self.argparser.add_argument('--screwhead', action='store', type=float, default=13.0, help='with of the screw head in mm')
self.argparser.add_argument('--screwheadheight', action='store', type=float, default=5.5, help='height of the screw head in mm')
self.argparser.add_argument('--pin', action='store', type=float, default=4.0, help='diameter of the pin in mm')
def fork(self, d, w, edge='e', full=True, move=None):
tw = (d + (2 * w))
th = (2 * d)
if self.move(tw, th, move, True):
return
e = self.edges.get(edge, edge)
self.moveTo(0, e.margin())
if (e is self.edges['e']):
self.bedBoltHole(tw)
else:
e(tw, bedBolts=edges.Bolts(1))
if full:
self.hole(((- 0.5) * w), (2 * d), (self.pin / 2))
self.polyline(0, 90, (2 * d), (180, (w / 2)), d, ((- 180), (d / 2)), (0.5 * d), (180, (w / 2)), (1.5 * d), 90)
else:
self.polyline(0, 90, d, 90, w, 90, 0, ((- 180), (d / 2)), (0.5 * d), (180, (w / 2)), (1.5 * d), 90)
self.move(tw, th, move)
def lock(self, l1, l2, w, move=None):
l1 += (w / 2)
l2 += (w / 2)
if self.move(l1, l2, move, True):
return
self.hole((w / 2), (w / 2), (self.pin / 2))
self.moveTo((w / 2), 0)
self.polyline((l2 - w), (180, (w / 2)), (l2 - (2 * w)), ((- 90), (w / 2)), (l1 - (2 * w)), (180, (w / 2)), (l1 - w), (90, (w / 2)))
self.move(l1, l2, move)
def backplate(self):
tw = (self.diameter + (2 * self.ww))
t = self.thickness
b = edges.Bolts(1)
bs = (0.0,)
self.fingerHolesAt(((- tw) / 2), ((- 2) * t), tw, 0, bedBolts=b, bedBoltSettings=bs)
self.fingerHolesAt(((- tw) / 2), 0, tw, 0, bedBolts=b, bedBoltSettings=bs)
self.fingerHolesAt(((- tw) / 2), ((+ 2) * t), tw, 0, bedBolts=b, bedBoltSettings=bs)
def clamp(self):
d = (self.diameter + (2 * self.ww))
self.moveTo(10, ((- 0.5) * d), 90)
self.edge(d)
self.moveTo(0, (- 8), (- 180))
self.edge(d)
def render(self):
d = self.diameter
t = self.thickness
shh = self.screwheadheight
self.bedBoltSettings = (self.screw, self.screwhead, shh, ((d / 4) + shh), (d / 4))
self.ww = ww = (4 * t)
self.fork(d, ww, 'f', move='right')
self.fork(d, ww, 'f', move='right')
self.fork(d, ww, 'f', full=False, move='right')
self.fork(d, ww, full=False, move='right')
self.fork(d, ww, full=False, move='right')
self.parts.disc((d + (2 * ww)), callback=self.backplate, hole=self.screw, move='right')
self.parts.disc((d + (2 * ww)), hole=self.screw, move='right')
self.parts.disc((d + (2 * ww)), callback=self.clamp, hole=(self.screw + (0.5 * t)), move='right')
self.parts.disc((d + (2 * ww)), hole=(self.screw + (0.5 * t)), move='right')
self.parts.waivyKnob(50, callback=(lambda : self.nutHole(self.screwhead)), move='right')
self.parts.waivyKnob(50, callback=(lambda : self.nutHole(self.screwhead)), move='right')
self.parts.waivyKnob(50, hole=(self.screw + (0.5 * t)), move='right')
ll = ((((d ** 2) + ((0.5 * (d + ww)) ** 2)) ** 0.5) - (0.5 * d))
for i in range(3):
self.lock(ll, ll, ww, move='right')
for i in range(2):
self.parts.disc(ww, move='up') |
def draw_walls(svg: svgwrite.Drawing, building: Building, segment: Segment, height: float, shift_1: np.ndarray, shift_2: np.ndarray, use_building_colors: bool) -> None:
color: Color = (building.wall_color if use_building_colors else building.wall_default_color)
color: Color
if building.is_construction:
color_part: float = (segment.angle * 0.2)
color = Color(rgb=((color.get_red() + color_part), (color.get_green() + color_part), (color.get_blue() + color_part)))
elif (height <= (0.25 / BUILDING_SCALE)):
color = Color(color)
color.set_luminance((color.get_luminance() * 0.7))
elif (height <= (0.5 / BUILDING_SCALE)):
color = Color(color)
color.set_luminance((color.get_luminance() * 0.85))
else:
color_part: float = ((segment.angle * 0.2) - 0.1)
color = Color(rgb=(max(min((color.get_red() + color_part), 1), 0), max(min((color.get_green() + color_part), 1), 0), max(min((color.get_blue() + color_part), 1), 0)))
command: PathCommands = ['M', (segment.point_1 + shift_1), 'L', (segment.point_2 + shift_1), (segment.point_2 + shift_2), (segment.point_1 + shift_2), (segment.point_1 + shift_1), 'Z']
path: Path = Path(d=command, fill=color.hex, stroke=color.hex, stroke_width=1, stroke_linejoin='round')
svg.add(path) |
def is_ipv4_address(sts_ipaddr):
if (not isinstance(sts_ipaddr, str)):
return False
if (len(sts_ipaddr) < 7):
return False
seq = sts_ipaddr.split('.')
if (len(seq) != 4):
return False
for c in seq:
try:
v = int(c)
if (v > 255):
return False
except ValueError:
return False
''
try:
socket.inet_aton(sts_ipaddr)
except OSError:
return False
return True |
class OptionPlotoptionsFunnel3dOnpointConnectoroptions(Options):
def dashstyle(self):
return self._config_get(None)
def dashstyle(self, text: str):
self._config(text, js_type=False)
def stroke(self):
return self._config_get(None)
def stroke(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(1)
def width(self, num: float):
self._config(num, js_type=False) |
def inspect_node(node):
node_information = {}
ssh = node.connect()
if (not ssh):
log.error('Unable to connect to node %s', node.name)
return
(_in, _out, _err) = ssh.exec_command('(type >& /dev/null -a srun && echo slurm) || (type >& /dev/null -a qconf && echo sge) || (type >& /dev/null -a pbsnodes && echo pbs) || echo UNKNOWN')
node_information['type'] = _out.read().strip()
(_in, _out, _err) = ssh.exec_command('arch')
node_information['architecture'] = _out.read().strip()
if (node_information['type'] == 'slurm'):
inspect_slurm_cluster(ssh, node_information)
elif (node_information['type'] == 'sge'):
inspect_sge_cluster(ssh, node_information)
ssh.close()
return node_information |
def _expand_virtual_root(repo: IConfigRepository, root: DefaultsTreeNode, overrides: Overrides, skip_missing: bool) -> DefaultsTreeNode:
children: List[Union[(DefaultsTreeNode, InputDefault)]] = []
assert (root.children is not None)
for d in reversed(root.children):
assert isinstance(d, InputDefault)
new_root = DefaultsTreeNode(node=d, parent=root)
d.update_parent('', '')
subtree = _create_defaults_tree_impl(repo=repo, root=new_root, is_root_config=d.primary, skip_missing=skip_missing, interpolated_subtree=False, overrides=overrides)
if (subtree.children is None):
children.append(d)
else:
children.append(subtree)
if (len(children) > 0):
root.children = list(reversed(children))
return root |
class BaseEditor(Editor):
names = Property()
mapping = Property()
inverse_mapping = Property()
def values_changed(self):
(self._names, self._mapping, self._inverse_mapping) = enum_values_changed(self._value(), self.string_value)
def rebuild_editor(self):
raise NotImplementedError
def init(self, parent):
factory = self.factory
if (factory.name != ''):
(self._object, self._name, self._value) = self.parse_extended_name(factory.name)
self.values_changed()
self._object.observe(self._update_values_and_rebuild_editor, (self._name + '.items'), dispatch='ui')
else:
self._value = (lambda : self.factory.values)
self.values_changed()
factory.observe(self._update_values_and_rebuild_editor, 'values', dispatch='ui')
def dispose(self):
if (self._object is not None):
self._object.observe(self._update_values_and_rebuild_editor, (self._name + '.items'), remove=True, dispatch='ui')
else:
self.factory.observe(self._update_values_and_rebuild_editor, 'values', remove=True, dispatch='ui')
super().dispose()
def _get_names(self):
return self._names
def _get_mapping(self):
return self._mapping
def _get_inverse_mapping(self):
return self._inverse_mapping
def _update_values_and_rebuild_editor(self, event):
self.values_changed()
self.rebuild_editor() |
def get_systems_managed_by_user(user_id: str, auth_header: Dict[(str, str)], server_url: str) -> List[FidesKey]:
get_systems_path = SYSTEM_MANAGER_PATH.format(user_id)
response = requests.get((server_url + get_systems_path), headers=auth_header)
handle_cli_response(response, verbose=False)
return [system['fides_key'] for system in response.json()] |
class OptionSeriesCylinderSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_hosts_priority():
name = 'example.com'
addr_from_ns = '1.0.2.0'
hr = _make_host_resolver()
rp = greendns.ResolverProxy(hosts_resolver=hr, filename=None)
base = _make_mock_base_resolver()
base.rr.address = addr_from_ns
rp._resolver = base()
rrns = greendns.resolve(name, _proxy=rp).rrset[0]
assert (rrns.address == addr_from_ns)
hr.hosts.write(b'1.2.3.4 example.com\ndead:beef::1 example.com\n')
hr.hosts.flush()
hr._load()
rrs4 = greendns.resolve(name, family=socket.AF_INET, _proxy=rp).rrset
assert (len(rrs4) == 1)
assert (rrs4[0].address == '1.2.3.4'), rrs4[0].address
rrs6 = greendns.resolve(name, family=socket.AF_INET6, _proxy=rp).rrset
assert (len(rrs6) == 1)
assert (rrs6[0].address == 'dead:beef::1'), rrs6[0].address |
class OptionSeriesFunnelEvents(Options):
def afterAnimate(self):
return self._config_get(None)
def afterAnimate(self, value: Any):
self._config(value, js_type=False)
def checkboxClick(self):
return self._config_get(None)
def checkboxClick(self, value: Any):
self._config(value, js_type=False)
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def hide(self):
return self._config_get(None)
def hide(self, value: Any):
self._config(value, js_type=False)
def legendItemClick(self):
return self._config_get(None)
def legendItemClick(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def show(self):
return self._config_get(None)
def show(self, value: Any):
self._config(value, js_type=False) |
class PageParking(AbstractObject):
def __init__(self, api=None):
super(PageParking, self).__init__()
self._isPageParking = True
self._api = api
class Field(AbstractObject.Field):
lot = 'lot'
street = 'street'
valet = 'valet'
_field_types = {'lot': 'unsigned int', 'street': 'unsigned int', 'valet': 'unsigned int'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def sub_pub_grouping_map(graph: lg.Graph) -> Dict[(str, str)]:
sub_pub_grouping_map: Dict[(str, str)] = {}
for stream in graph.__streams__.values():
difference = set(stream.topic_paths).difference(LabgraphMonitorNode.in_edges)
if difference:
upstream_edge = max(difference, key=len)
for edge in stream.topic_paths:
if (edge != upstream_edge):
edge_path = '/'.join(edge.split('/')[:(- 1)])
edge_grouping = type(graph.__descendants__[edge_path]).__name__
topic_path = edge.split('/')[(- 1)]
group_path = '/'.join(upstream_edge.split('/')[:(- 1)])
grouping = type(graph.__descendants__[group_path]).__name__
if (grouping in sub_pub_grouping_map):
sub_pub_grouping_map[grouping]['topics'].add(topic_path)
sub_pub_grouping_map[grouping]['subscribers'].add(edge_grouping)
else:
sub_pub_grouping_map[grouping] = {'topics': {topic_path}, 'subscribers': {edge_grouping}}
return sub_pub_grouping_map |
()
('--debug', is_flag=True, help='Enable debug mode.')
('--number', type=click.Choice(['one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'ten', 'eleven', 'twelve', 'thirteen', 'fourteen', 'fifteen', 'sixteen', 'seventeen', 'eighteen', 'nineteen', 'twenty', 'twenty-one', 'twenty-two', 'twenty-three', 'twenty-four', 'twenty-five', 'twenty-six', 'twenty-seven', 'twenty-eight', 'twenty-nine', 'thirty']), show_default=True, help='This click choice has loads of options.')
def cli(debug: bool, number: str) -> None:
print(f"Debug mode is {('on' if debug else 'off')}") |
class AnalysisPlugin(AnalysisBasePlugin):
NAME = 'qemu_exec'
DESCRIPTION = 'test binaries for executability in QEMU and display help if available'
VERSION = '0.5.2'
DEPENDENCIES = ['file_type']
FILE = __file__
FILE_TYPES = ['application/x-executable', 'application/x-pie-executable', 'application/x-sharedlib']
FACT_EXTRACTION_FOLDER_NAME = 'fact_extracted'
arch_to_bin_dict = OrderedDict([('aarch64', ['aarch64']), ('ARM', ['aarch64', 'arm', 'armeb']), ('MIPS32', ['mipsel', 'mips', 'mipsn32', 'mipsn32el']), ('MIPS64', ['mips64', 'mips64el']), ('MIPS', ['mipsel', 'mips', 'mips64', 'mips64el', 'mipsn32', 'mipsn32el']), ('80386', ['i386']), ('80486', ['x86_64', 'i386']), ('x86', ['x86_64', 'i386']), ('PowerPC', ['ppc', 'ppc64', 'ppc64le']), ('PPC', ['ppc', 'ppc64', 'ppc64le']), ('Renesas SH', ['sh4', 'sh4eb'])])
root_path = None
def __init__(self, *args, unpacker=None, **kwargs):
self.unpacker = (Unpacker() if (unpacker is None) else unpacker)
super().__init__(*args, **kwargs)
def process_object(self, file_object: FileObject) -> FileObject:
if (self.NAME not in file_object.processed_analysis):
file_object.processed_analysis[self.NAME] = {}
file_object.processed_analysis[self.NAME]['summary'] = []
if (file_object.processed_analysis['file_type']['result']['mime'] in self.FILE_TYPES):
return self._process_included_binary(file_object)
return self._process_container(file_object)
def _process_included_binary(self, file_object: FileObject) -> FileObject:
file_object.processed_analysis[self.NAME]['parent_flag'] = True
return file_object
def _process_container(self, file_object: FileObject) -> FileObject:
if (not file_object.files_included):
return file_object
tmp_dir = self.unpacker.unpack_fo(file_object)
extracted_files_dir = self.unpacker.get_extracted_files_dir(tmp_dir.name)
if extracted_files_dir.is_dir():
try:
self.root_path = self._find_root_path(extracted_files_dir)
file_list = self._find_relevant_files(extracted_files_dir)
if file_list:
file_object.processed_analysis[self.NAME]['files'] = {}
self._process_included_files(file_list, file_object)
finally:
tmp_dir.cleanup()
return file_object
def _find_relevant_files(self, extracted_files_dir: Path):
result = []
for path in safe_rglob(extracted_files_dir):
if (path.is_file() and (not path.is_symlink())):
file_type = get_file_type_from_path(path.absolute())
if self._has_relevant_type(file_type):
result.append((f'/{path.relative_to(Path(self.root_path))}', file_type['full']))
return result
def _find_root_path(self, extracted_files_dir: Path) -> Path:
root_path = extracted_files_dir
if (root_path / self.FACT_EXTRACTION_FOLDER_NAME).is_dir():
root_path /= self.FACT_EXTRACTION_FOLDER_NAME
return root_path
def _has_relevant_type(self, file_type: dict):
if ((file_type is not None) and (file_type['mime'] in self.FILE_TYPES)):
return True
return False
def _process_included_files(self, file_list, file_object):
manager = Manager()
executor = ThreadPoolExecutor(max_workers=8)
results_dict = manager.dict()
jobs = self._run_analysis_jobs(executor, file_list, file_object, results_dict)
for future in jobs:
future.result()
executor.shutdown(wait=False)
self._enter_results(dict(results_dict), file_object)
self._add_tag(file_object)
manager.shutdown()
def _run_analysis_jobs(self, executor: ThreadPoolExecutor, file_list: list[tuple[(str, str)]], file_object: FileObject, results_dict: dict) -> list[Future]:
jobs = []
for (file_path, full_type) in file_list:
uid = self._get_uid(file_path, self.root_path)
if self._analysis_not_already_completed(file_object, uid):
for arch_suffix in self._find_arch_suffixes(full_type):
jobs.append(executor.submit(process_qemu_job, file_path, arch_suffix, self.root_path, results_dict, uid))
return jobs
def _analysis_not_already_completed(self, file_object, uid):
return (uid not in file_object.processed_analysis[self.NAME]['files'])
def _get_uid(file_path, root_path: Path):
return create_uid(get_binary_from_file(str((root_path / file_path[1:]))))
def _find_arch_suffixes(self, full_type):
for arch_string in self.arch_to_bin_dict:
if (arch_string in full_type):
return self.arch_to_bin_dict[arch_string]
return []
def _enter_results(self, results, file_object):
tmp = file_object.processed_analysis[self.NAME]['files'] = results
for uid in tmp:
tmp[uid][EXECUTABLE] = _valid_execution_in_results(tmp[uid]['results'])
file_object.processed_analysis['qemu_exec']['summary'] = self._get_summary(tmp)
def _add_tag(self, file_object: FileObject):
result = file_object.processed_analysis[self.NAME]['files']
if any((result[uid][EXECUTABLE] for uid in result)):
self.add_analysis_tag(file_object=file_object, tag_name=self.NAME, value='QEMU executable', color=TagColor.BLUE, propagate=True)
def _get_summary(results: dict):
if any((results[uid][EXECUTABLE] for uid in results)):
return [EXECUTABLE]
return [] |
def zeroInflow(x):
if ((x[0] == 0.0) and (x[1] <= 0.5)):
return (lambda x, t: 0.0)
if ((x[0] == 1.0) and (x[1] >= 0.5)):
return (lambda x, t: 0.0)
if ((x[1] == 0.0) and (x[0] >= 0.5)):
return (lambda x, t: 0.0)
if ((x[1] == 1.0) and (x[0] <= 0.5)):
return (lambda x, t: 0.0) |
_icmp_type(ICMP_TIME_EXCEEDED)
class TimeExceeded(_ICMPv4Payload):
_PACK_STR = '!xBxx'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, data_len=0, data=None):
if ((data_len >= 0) and (data_len <= 255)):
self.data_len = data_len
else:
raise ValueError(('Specified data length (%d) is invalid.' % data_len))
self.data = data
def parser(cls, buf, offset):
(data_len,) = struct.unpack_from(cls._PACK_STR, buf, offset)
msg = cls(data_len)
offset += cls._MIN_LEN
if (len(buf) > offset):
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(TimeExceeded._PACK_STR, self.data_len))
if (self.data is not None):
hdr += self.data
return hdr
def __len__(self):
length = self._MIN_LEN
if (self.data is not None):
length += len(self.data)
return length |
def random_range_int(low, high, _randomstate_lambda=None):
def decorator(func):
(func)
_synthetic()
def decorator_inner(self, *args, **kw):
randomstate = __get_random_state(self, _randomstate_lambda)
value = randomstate.uniform(low=low, high=high, size=1)[0]
return func(self, value)
return decorator_inner
return decorator |
class TestDialogues():
def setup_class(cls):
cls.agent_addr = 'agent address'
cls.env_addr = 'env address'
cls.agent_dialogues = AgentDialogues(cls.agent_addr)
cls.env_dialogues = EnvironmentDialogues(cls.env_addr)
def test_create_self_initiated(self):
result = self.agent_dialogues._create_self_initiated(dialogue_opponent_addr=self.env_addr, dialogue_reference=(str(0), ''), role=GymDialogue.Role.AGENT)
assert isinstance(result, GymDialogue)
assert (result.role == GymDialogue.Role.AGENT), 'The role must be Agent.'
def test_create_opponent_initiated(self):
result = self.agent_dialogues._create_opponent_initiated(dialogue_opponent_addr=self.env_addr, dialogue_reference=(str(0), ''), role=GymDialogue.Role.AGENT)
assert isinstance(result, GymDialogue)
assert (result.role == GymDialogue.Role.AGENT), 'The role must be agent.' |
_users_without_mfa.command()
_context
def execute(ctx):
options = '[*] Available options\n[1] Load harvested users from a json file and check their enrolled MFA factors\n[2] Harvest all users and check their enrolled MFA factors\n[0] Exit this menu\n[*] Choose from the above options'
while True:
value = click.prompt(options, type=int)
if (value == 1):
file_path = Path(click.prompt('[*] Enter full path of file containing harvested Okta users'))
if file_path.exists():
msg = f'Attempting to check MFA factors for users in file, {file_path}'
LOGGER.info(msg)
index_event(ctx.obj.es, module=__name__, event_type='INFO', event=msg)
click.echo(f'[*] {msg}')
users = load_json_file(file_path)
check_enrolled_factors(ctx, users)
return
else:
msg = f'File not found, {file_path}'
LOGGER.error(msg)
index_event(ctx.obj.es, module=__name__, event_type='ERROR', event=msg)
click.secho(f'[!] {msg}', fg='red')
elif (value == 2):
if click.confirm('[*] Do you want to attempt to harvest information for all users? This may take a while to avoid exceeding API rate limits', default=True):
msg = 'Attempting to harvest all Okta users'
LOGGER.info(msg)
index_event(ctx.obj.es, module=__name__, event_type='INFO', event=msg)
click.echo(f'[*] {msg}')
users = ctx.obj.okta.get_users(ctx)
check_enrolled_factors(ctx, users)
return
elif (value == 0):
return
else:
click.secho('[!] Invalid option selected', fg='red') |
def burn_key_digest(esp, efuses, args):
datafile = args.keyfile
args.keypurpose = ['SECURE_BOOT_DIGEST']
args.block = ['BLOCK_KEY0']
digest = espsecure._digest_sbv2_public_key(datafile)
digest = digest[:16]
num_bytes = (efuses['BLOCK_KEY0_HI_128'].bit_len // 8)
if (len(digest) != num_bytes):
raise esptool.FatalError(('Incorrect digest size %d. Digest must be %d bytes (%d bits) of raw binary key data.' % (len(digest), num_bytes, (num_bytes * 8))))
burn_key(esp, efuses, args, digest=[digest]) |
def test_array_type_records():
schema = {'type': 'array', 'items': {'type': 'record', 'name': 'test_array_type', 'fields': [{'name': 'field1', 'type': 'string'}, {'name': 'field2', 'type': 'int'}]}}
records = [[{'field1': 'foo', 'field2': 1}], [{'field1': 'bar', 'field2': 2}]]
new_records = roundtrip(schema, records)
assert (records == new_records) |
.sandbox_test
def test_walk_local_copy_to_s3(source_folder):
dc = Config.for_sandbox().data_config
explicit_empty_folder = UUID(int=random.getrandbits(128)).hex
raw_output_path = f's3://my-s3-bucket/testdata/{explicit_empty_folder}'
provider = FileAccessProvider(local_sandbox_dir='/tmp/unittest', raw_output_prefix=raw_output_path, data_config=dc)
ctx = FlyteContextManager.current_context()
local_fd = FlyteDirectory(path=source_folder)
local_fd_crawl = local_fd.crawl()
local_fd_crawl = [x for x in local_fd_crawl]
with FlyteContextManager.with_context(ctx.with_file_access(provider)):
fd = FlyteDirectory.new_remote()
assert (raw_output_path in fd.path)
for (root_path, suffix) in local_fd_crawl:
new_file = fd.new_file(suffix)
with open(os.path.join(root_path, suffix), 'rb') as r:
with new_file.open('w') as w:
print(f'Writing, t {type(w)} p {new_file.path} |{suffix}|')
w.write(str(r.read()))
new_crawl = fd.crawl()
new_suffixes = [y for (x, y) in new_crawl]
assert (len(new_suffixes) == 2) |
class ParentFieldListFilter(ChoicesFieldListFilter):
def __init__(self, f, request, params, model, model_admin, field_path=None):
super().__init__(f, request, params, model, model_admin, field_path)
parent_ids = model.objects.exclude(parent=None).values_list('parent__id', flat=True).order_by('parent__id').distinct()
parents = model.objects.filter(pk__in=parent_ids).values_list('pk', 'title', 'level')
self.lookup_choices = [(pk, '{}{}'.format((' ' * level), shorten_string(title, max_length=25))) for (pk, title, level) in parents]
def choices(self, cl):
(yield {'selected': (self.lookup_val is None), 'query_string': cl.get_query_string({}, [self.lookup_kwarg]), 'display': _('All')})
for (pk, title) in self.lookup_choices:
(yield {'selected': (pk == int((self.lookup_val or '0'))), 'query_string': cl.get_query_string({self.lookup_kwarg: pk}), 'display': mark_safe(smart_str(title))})
def title(self):
return _('Parent') |
def test_handle_timer_canceled(decision_context, workflow_clock):
event = HistoryEvent()
decision_context.handle_timer_canceled(event)
workflow_clock.handle_timer_canceled.assert_called_once()
(args, kwargs) = workflow_clock.handle_timer_canceled.call_args_list[0]
assert (args[0] is event) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.