code stringlengths 281 23.7M |
|---|
def test_global_ptr():
base_args = ['python', 'decompile.py', 'tests/samples/bin/systemtests/64/0/globals']
args1 = (base_args + ['global_ptr_add'])
output = str(subprocess.run(args1, check=True, capture_output=True).stdout)
assert (output.count('c = 0x0') == 1)
assert (output.count('d = 0x0') == 1)
(len(re.findall('var_[0-9]+= d', output)) == 1)
(len(re.findall('var_[0-9]+= c', output)) == 1)
(len(re.findall('_add(var_[0-9]+, var_[0-9]+)', output)) == 1) |
class DcSerializer(s.InstanceSerializer):
_model_ = Dc
_update_fields_ = ('alias', 'owner', 'access', 'desc', 'site', 'groups')
_default_fields_ = ('name', 'alias', 'owner', 'site')
owner_changed = None
groups_changed = None
groups_added = None
groups_removed = None
removed_users = None
name = s.RegexField('^[A-Za-z0-9][A-Za-z0-9\\._-]*$', max_length=16)
alias = s.SafeCharField(max_length=32)
site = s.RegexField('^[a-z0-9][a-z0-9\\.:-]+[a-z0-9]$', max_length=260, min_length=1)
owner = s.SlugRelatedField(slug_field='username', queryset=User.objects, read_only=False, required=False)
access = s.IntegerChoiceField(choices=Dc.ACCESS, default=Dc.PRIVATE)
desc = s.SafeCharField(max_length=128, required=False)
created = s.DateTimeField(read_only=True, required=False)
def __init__(self, request, instance, *args, **kwargs):
super(DcSerializer, self).__init__(request, instance, *args, **kwargs)
if (not kwargs.get('many', False)):
self.fields['owner'].default = request.user.username
self.fields['owner'].queryset = get_owners(request, all=True)
def validate_alias(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
validate_alias(self.object, value)
return attrs
def validate_site(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if (self.object.pk and (self.object.site == value)):
pass
elif Dc.objects.filter(site__iexact=value).exists():
raise s.ValidationError(_('This site hostname is already in use. Please supply a different site hostname.'))
return attrs
def validate_access(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if (self.object.pk and self.object.is_default() and (int(value) != Dc.PUBLIC)):
raise s.ValidationError(_('Default datacenter must be public.'))
return attrs
def validate_owner(self, attrs, source):
try:
user = attrs[source]
except KeyError:
pass
else:
if (user is None):
if self.object.pk:
del attrs['owner']
else:
attrs['owner'] = self.request.user
elif self.object.pk:
if (self.object.is_default() and (not user.is_staff)):
raise s.ValidationError(_('Default datacenter must be owned by user with SuperAdmin rights.'))
if (user != self.object.owner):
self.owner_changed = self.object.owner
validate_owner(self.object, user, _('Datacenter'))
return attrs
def validate_groups(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
if self.object.pk:
current_roles = set(self.object.roles.all())
new_roles = set(value)
if (current_roles != new_roles):
self.groups_added = (new_roles - current_roles)
self.groups_removed = (current_roles - new_roles)
self.groups_changed = current_roles.symmetric_difference(new_roles)
self.removed_users = User.objects.distinct().filter(roles__in=self.groups_removed)
return attrs |
class OptionSeriesColumnOnpointConnectoroptions(Options):
def dashstyle(self):
return self._config_get(None)
def dashstyle(self, text: str):
self._config(text, js_type=False)
def stroke(self):
return self._config_get(None)
def stroke(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(1)
def width(self, num: float):
self._config(num, js_type=False) |
class HealthCheckError(ABC):
code: Optional[str] = None
def __init__(self, description: str):
self.description = description
async def process(self, model_instance_storage: ModelInstancesStorage, model_instance: dm.ModelInstance, alert_manager: 'BaseAlertManager'):
def __str__(self):
return f'[{self.code}]: {self.description}'
def __repr__(self):
return str(self.code) |
def anda():
for x in range((len(cobra) - 1), 0, (- 1)):
cobra[x][0] = cobra[(x - 1)][0]
cobra[x][1] = cobra[(x - 1)][1]
if (c.direcao == 0):
cobra[0][0] -= 1
if (c.direcao == 1):
cobra[0][1] -= 1
if (c.direcao == 2):
cobra[0][0] += 1
if (c.direcao == 3):
cobra[0][1] += 1
if (cobra[0] == c.maca):
cobra.append([50, 50])
c.maca = [(1 + random.randint(0, 27)), (1 + random.randint(0, 21))]
c.delete(c.m)
c.m = c.create_rectangle((20 * c.maca[0]), (20 * c.maca[1]), (20 * (c.maca[0] + 1)), (20 * (c.maca[1] + 1)), fill='red')
for gomo in c.r:
c.delete(gomo)
for gomo in cobra:
c.r.append(c.create_rectangle((20 * gomo[0]), (20 * gomo[1]), (20 * (gomo[0] + 1)), (20 * (gomo[1] + 1)), fill='blue'))
vivo = True
for x in range(1, len(cobra)):
if ((cobra[0][0] == cobra[x][0]) and (cobra[0][1] == cobra[x][1])):
vivo = False
if ((cobra[0][0] > 0) and (cobra[0][1] > 0) and (cobra[0][0] < 29) and (cobra[0][1] < 23) and vivo):
t = Timer(0.3, anda)
t.start() |
def should_message_be_processed(event: Dict[(str, Any)], rules: List[str], ignore_rules: List[str]) -> ProcessingResult:
flat_event = flatten_json(event)
user = event['userIdentity']
event_name = event['eventName']
logger.debug({'Rules:': rules, 'ignore_rules': ignore_rules})
logger.debug({'Flattened event': flat_event})
errors = []
for ignore_rule in ignore_rules:
try:
if (eval(ignore_rule, {}, {'event': flat_event}) is True):
logger.info({'Event matched ignore rule and will not be processed': {'ignore_rule': ignore_rule, 'flat_event': flat_event}})
return ProcessingResult(False, errors)
except Exception as e:
logger.exception({'Event parsing failed': {'error': e, 'ignore_rule': ignore_rule, 'flat_event': flat_event}})
errors.append({'error': e, 'rule': ignore_rule})
for rule in rules:
try:
if (eval(rule, {}, {'event': flat_event}) is True):
logger.info({'Event matched rule and will be processed': {'rule': rule, 'flat_event': flat_event}})
return ProcessingResult(True, errors)
except Exception as e:
logger.exception({'Event parsing failed': {'error': e, 'rule': rule, 'flat_event': flat_event}})
errors.append({'error': e, 'rule': rule})
logger.info({'Event did not match any rules and will not be processed': {'event': event_name, 'user': user}})
return ProcessingResult(False, errors) |
class ComputationAPI(ContextManager['ComputationAPI'], StackManipulationAPI):
logger: ExtendedDebugLogger
state: 'StateAPI'
msg: MessageAPI
transaction_context: TransactionContextAPI
code: CodeStreamAPI
children: List['ComputationAPI']
return_data: bytes = b''
accounts_to_delete: Dict[(Address, Address)]
_memory: MemoryAPI
_stack: StackAPI
_gas_meter: GasMeterAPI
_error: VMError
_output: bytes = b''
_log_entries: List[Tuple[(int, Address, Tuple[(int, ...)], bytes)]]
opcodes: Dict[(int, OpcodeAPI)]
_precompiles: Dict[(Address, Callable[(['ComputationAPI'], 'ComputationAPI')])]
def __init__(self, state: 'StateAPI', message: MessageAPI, transaction_context: TransactionContextAPI) -> None:
...
def _configure_gas_meter(self) -> GasMeterAPI:
...
def is_origin_computation(self) -> bool:
...
def is_success(self) -> bool:
...
def is_error(self) -> bool:
...
def error(self) -> VMError:
...
def error(self, value: VMError) -> None:
raise NotImplementedError
def raise_if_error(self) -> None:
...
def should_burn_gas(self) -> bool:
...
def should_return_gas(self) -> bool:
...
def should_erase_return_data(self) -> bool:
...
def extend_memory(self, start_position: int, size: int) -> None:
...
def memory_write(self, start_position: int, size: int, value: bytes) -> None:
...
def memory_read(self, start_position: int, size: int) -> memoryview:
...
def memory_read_bytes(self, start_position: int, size: int) -> bytes:
...
def get_gas_meter(self) -> GasMeterAPI:
...
def consume_gas(self, amount: int, reason: str) -> None:
...
def return_gas(self, amount: int) -> None:
...
def refund_gas(self, amount: int) -> None:
...
def get_gas_used(self) -> int:
...
def get_gas_remaining(self) -> int:
...
def stack_swap(self, position: int) -> None:
...
def stack_dup(self, position: int) -> None:
...
def output(self) -> bytes:
...
def output(self, value: bytes) -> None:
raise NotImplementedError
def precompiles(self) -> Dict[(Address, Callable[(['ComputationAPI'], None)])]:
...
def get_precompiles(cls) -> Dict[(Address, Callable[(['ComputationAPI'], None)])]:
...
def get_opcode_fn(self, opcode: int) -> OpcodeAPI:
...
def prepare_child_message(self, gas: int, to: Address, value: int, data: BytesOrView, code: bytes, **kwargs: Any) -> MessageAPI:
...
def apply_child_computation(self, child_msg: MessageAPI) -> 'ComputationAPI':
...
def generate_child_computation(self, child_msg: MessageAPI) -> 'ComputationAPI':
...
def add_child_computation(self, child_computation: 'ComputationAPI') -> None:
...
def get_gas_refund(self) -> int:
...
def register_account_for_deletion(self, beneficiary: Address) -> None:
...
def get_accounts_for_deletion(self) -> Tuple[(Tuple[(Address, Address)], ...)]:
...
def add_log_entry(self, account: Address, topics: Tuple[(int, ...)], data: bytes) -> None:
...
def get_raw_log_entries(self) -> Tuple[(Tuple[(int, bytes, Tuple[(int, ...)], bytes)], ...)]:
...
def get_log_entries(self) -> Tuple[(Tuple[(bytes, Tuple[(int, ...)], bytes)], ...)]:
...
def apply_message(cls, state: 'StateAPI', message: MessageAPI, transaction_context: TransactionContextAPI) -> 'ComputationAPI':
...
def apply_create_message(cls, state: 'StateAPI', message: MessageAPI, transaction_context: TransactionContextAPI) -> 'ComputationAPI':
...
def apply_computation(cls, state: 'StateAPI', message: MessageAPI, transaction_context: TransactionContextAPI) -> 'ComputationAPI':
... |
def extractStitchietranslatorsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [("the royal's cute little wife", "The Royal's Cute Little Wife", 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
titlemap = [('All The Male Protagonists Have Blackened Chapter ', 'All The Male Leads Have Blackened', 'translated'), ('All The Male Leads Have Blackened Chapter ', 'All The Male Leads Have Blackened', 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('The Royals Cute Little Wife: Chapter ', "The Royal's Cute Little Wife", 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class CatClient(NamespacedClient):
_rewrite_parameters()
def aliases(self, *, name: t.Optional[t.Union[(str, t.Sequence[str])]]=None, error_trace: t.Optional[bool]=None, expand_wildcards: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['all', 'closed', 'hidden', 'none', 'open']", str)]], t.Union[("t.Literal['all', 'closed', 'hidden', 'none', 'open']", str)])]]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (name not in SKIP_IN_PATH):
__path = f'/_cat/aliases/{_quote(name)}'
else:
__path = '/_cat/aliases'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (expand_wildcards is not None):
__query['expand_wildcards'] = expand_wildcards
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def allocation(self, *, node_id: t.Optional[t.Union[(str, t.Sequence[str])]]=None, bytes: t.Optional[t.Union[("t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str)]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (node_id not in SKIP_IN_PATH):
__path = f'/_cat/allocation/{_quote(node_id)}'
else:
__path = '/_cat/allocation'
__query: t.Dict[(str, t.Any)] = {}
if (bytes is not None):
__query['bytes'] = bytes
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def component_templates(self, *, name: t.Optional[str]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (name not in SKIP_IN_PATH):
__path = f'/_cat/component_templates/{_quote(name)}'
else:
__path = '/_cat/component_templates'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def count(self, *, index: t.Optional[t.Union[(str, t.Sequence[str])]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (index not in SKIP_IN_PATH):
__path = f'/_cat/count/{_quote(index)}'
else:
__path = '/_cat/count'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def fielddata(self, *, fields: t.Optional[t.Union[(str, t.Sequence[str])]]=None, bytes: t.Optional[t.Union[("t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str)]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (fields not in SKIP_IN_PATH):
__path = f'/_cat/fielddata/{_quote(fields)}'
else:
__path = '/_cat/fielddata'
__query: t.Dict[(str, t.Any)] = {}
if (bytes is not None):
__query['bytes'] = bytes
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def health(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, time: t.Optional[t.Union[("t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str)]]=None, ts: t.Optional[bool]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
__path = '/_cat/health'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (time is not None):
__query['time'] = time
if (ts is not None):
__query['ts'] = ts
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def help(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> TextApiResponse:
__path = '/_cat'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def indices(self, *, index: t.Optional[t.Union[(str, t.Sequence[str])]]=None, bytes: t.Optional[t.Union[("t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str)]]=None, error_trace: t.Optional[bool]=None, expand_wildcards: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['all', 'closed', 'hidden', 'none', 'open']", str)]], t.Union[("t.Literal['all', 'closed', 'hidden', 'none', 'open']", str)])]]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, health: t.Optional[t.Union[("t.Literal['green', 'red', 'yellow']", str)]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, include_unloaded_segments: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, pri: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, time: t.Optional[t.Union[("t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str)]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (index not in SKIP_IN_PATH):
__path = f'/_cat/indices/{_quote(index)}'
else:
__path = '/_cat/indices'
__query: t.Dict[(str, t.Any)] = {}
if (bytes is not None):
__query['bytes'] = bytes
if (error_trace is not None):
__query['error_trace'] = error_trace
if (expand_wildcards is not None):
__query['expand_wildcards'] = expand_wildcards
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (health is not None):
__query['health'] = health
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (include_unloaded_segments is not None):
__query['include_unloaded_segments'] = include_unloaded_segments
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (pri is not None):
__query['pri'] = pri
if (s is not None):
__query['s'] = s
if (time is not None):
__query['time'] = time
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def master(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
__path = '/_cat/master'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def ml_data_frame_analytics(self, *, id: t.Optional[str]=None, allow_no_match: t.Optional[bool]=None, bytes: t.Optional[t.Union[("t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str)]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", str)]], t.Union[("t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", str)])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", str)]], t.Union[("t.Literal['assignment_explanation', 'create_time', 'description', 'dest_index', 'failure_reason', 'id', 'model_memory_limit', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'progress', 'source_index', 'state', 'type', 'version']", str)])]]=None, time: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (id not in SKIP_IN_PATH):
__path = f'/_cat/ml/data_frame/analytics/{_quote(id)}'
else:
__path = '/_cat/ml/data_frame/analytics'
__query: t.Dict[(str, t.Any)] = {}
if (allow_no_match is not None):
__query['allow_no_match'] = allow_no_match
if (bytes is not None):
__query['bytes'] = bytes
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (time is not None):
__query['time'] = time
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def ml_datafeeds(self, *, datafeed_id: t.Optional[str]=None, allow_no_match: t.Optional[bool]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", str)]], t.Union[("t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", str)])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", str)]], t.Union[("t.Literal['ae', 'bc', 'id', 'na', 'ne', 'ni', 'nn', 's', 'sba', 'sc', 'seah', 'st']", str)])]]=None, time: t.Optional[t.Union[("t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str)]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (datafeed_id not in SKIP_IN_PATH):
__path = f'/_cat/ml/datafeeds/{_quote(datafeed_id)}'
else:
__path = '/_cat/ml/datafeeds'
__query: t.Dict[(str, t.Any)] = {}
if (allow_no_match is not None):
__query['allow_no_match'] = allow_no_match
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (time is not None):
__query['time'] = time
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def ml_jobs(self, *, job_id: t.Optional[str]=None, allow_no_match: t.Optional[bool]=None, bytes: t.Optional[t.Union[("t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str)]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", str)]], t.Union[("t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", str)])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", str)]], t.Union[("t.Literal['assignment_explanation', 'buckets.count', 'buckets.time.exp_avg', 'buckets.time.exp_avg_hour', 'buckets.time.max', 'buckets.time.min', 'buckets.time.total', 'data.buckets', 'data.earliest_record', 'data.empty_buckets', 'data.input_bytes', 'data.input_fields', 'data.input_records', 'data.invalid_dates', 'data.last', 'data.last_empty_bucket', 'data.last_sparse_bucket', 'data.latest_record', 'data.missing_fields', 'data.out_of_order_timestamps', 'data.processed_fields', 'data.processed_records', 'data.sparse_buckets', 'forecasts.memory.avg', 'forecasts.memory.max', 'forecasts.memory.min', 'forecasts.memory.total', 'forecasts.records.avg', 'forecasts.records.max', 'forecasts.records.min', 'forecasts.records.total', 'forecasts.time.avg', 'forecasts.time.max', 'forecasts.time.min', 'forecasts.time.total', 'forecasts.total', 'id', 'model.bucket_allocation_failures', 'model.by_fields', 'model.bytes', 'model.bytes_exceeded', 'model.categorization_status', 'model.categorized_doc_count', 'model.dead_category_count', 'model.failed_category_count', 'model.frequent_category_count', 'model.log_time', 'model.memory_limit', 'model.memory_status', 'model.over_fields', 'model.partition_fields', 'model.rare_category_count', 'model.timestamp', 'model.total_category_count', 'node.address', 'node.ephemeral_id', 'node.id', 'node.name', 'opened_time', 'state']", str)])]]=None, time: t.Optional[t.Union[("t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str)]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (job_id not in SKIP_IN_PATH):
__path = f'/_cat/ml/anomaly_detectors/{_quote(job_id)}'
else:
__path = '/_cat/ml/anomaly_detectors'
__query: t.Dict[(str, t.Any)] = {}
if (allow_no_match is not None):
__query['allow_no_match'] = allow_no_match
if (bytes is not None):
__query['bytes'] = bytes
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (time is not None):
__query['time'] = time
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters(parameter_aliases={'from': 'from_'})
def ml_trained_models(self, *, model_id: t.Optional[str]=None, allow_no_match: t.Optional[bool]=None, bytes: t.Optional[t.Union[("t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str)]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, from_: t.Optional[int]=None, h: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", str)]], t.Union[("t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", str)])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", str)]], t.Union[("t.Literal['create_time', 'created_by', 'data_frame_analytics_id', 'description', 'heap_size', 'id', 'ingest.count', 'ingest.current', 'ingest.failed', 'ingest.pipelines', 'ingest.time', 'license', 'operations', 'version']", str)])]]=None, size: t.Optional[int]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (model_id not in SKIP_IN_PATH):
__path = f'/_cat/ml/trained_models/{_quote(model_id)}'
else:
__path = '/_cat/ml/trained_models'
__query: t.Dict[(str, t.Any)] = {}
if (allow_no_match is not None):
__query['allow_no_match'] = allow_no_match
if (bytes is not None):
__query['bytes'] = bytes
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (from_ is not None):
__query['from'] = from_
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (size is not None):
__query['size'] = size
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def nodeattrs(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
__path = '/_cat/nodeattrs'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def nodes(self, *, bytes: t.Optional[t.Union[("t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str)]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, full_id: t.Optional[t.Union[(bool, str)]]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, include_unloaded_segments: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
__path = '/_cat/nodes'
__query: t.Dict[(str, t.Any)] = {}
if (bytes is not None):
__query['bytes'] = bytes
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (full_id is not None):
__query['full_id'] = full_id
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (include_unloaded_segments is not None):
__query['include_unloaded_segments'] = include_unloaded_segments
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def pending_tasks(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
__path = '/_cat/pending_tasks'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def plugins(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
__path = '/_cat/plugins'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def recovery(self, *, index: t.Optional[t.Union[(str, t.Sequence[str])]]=None, active_only: t.Optional[bool]=None, bytes: t.Optional[t.Union[("t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str)]]=None, detailed: t.Optional[bool]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (index not in SKIP_IN_PATH):
__path = f'/_cat/recovery/{_quote(index)}'
else:
__path = '/_cat/recovery'
__query: t.Dict[(str, t.Any)] = {}
if (active_only is not None):
__query['active_only'] = active_only
if (bytes is not None):
__query['bytes'] = bytes
if (detailed is not None):
__query['detailed'] = detailed
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def repositories(self, *, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
__path = '/_cat/repositories'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def segments(self, *, index: t.Optional[t.Union[(str, t.Sequence[str])]]=None, bytes: t.Optional[t.Union[("t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str)]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (index not in SKIP_IN_PATH):
__path = f'/_cat/segments/{_quote(index)}'
else:
__path = '/_cat/segments'
__query: t.Dict[(str, t.Any)] = {}
if (bytes is not None):
__query['bytes'] = bytes
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def shards(self, *, index: t.Optional[t.Union[(str, t.Sequence[str])]]=None, bytes: t.Optional[t.Union[("t.Literal['b', 'gb', 'kb', 'mb', 'pb', 'tb']", str)]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (index not in SKIP_IN_PATH):
__path = f'/_cat/shards/{_quote(index)}'
else:
__path = '/_cat/shards'
__query: t.Dict[(str, t.Any)] = {}
if (bytes is not None):
__query['bytes'] = bytes
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def snapshots(self, *, repository: t.Optional[t.Union[(str, t.Sequence[str])]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, ignore_unavailable: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (repository not in SKIP_IN_PATH):
__path = f'/_cat/snapshots/{_quote(repository)}'
else:
__path = '/_cat/snapshots'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (ignore_unavailable is not None):
__query['ignore_unavailable'] = ignore_unavailable
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def tasks(self, *, actions: t.Optional[t.Sequence[str]]=None, detailed: t.Optional[bool]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, node_id: t.Optional[t.Sequence[str]]=None, parent_task_id: t.Optional[str]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
__path = '/_cat/tasks'
__query: t.Dict[(str, t.Any)] = {}
if (actions is not None):
__query['actions'] = actions
if (detailed is not None):
__query['detailed'] = detailed
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (node_id is not None):
__query['node_id'] = node_id
if (parent_task_id is not None):
__query['parent_task_id'] = parent_task_id
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def templates(self, *, name: t.Optional[str]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (name not in SKIP_IN_PATH):
__path = f'/_cat/templates/{_quote(name)}'
else:
__path = '/_cat/templates'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters()
def thread_pool(self, *, thread_pool_patterns: t.Optional[t.Union[(str, t.Sequence[str])]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, h: t.Optional[t.Union[(str, t.Sequence[str])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(str, t.Sequence[str])]]=None, time: t.Optional[t.Union[("t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str)]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (thread_pool_patterns not in SKIP_IN_PATH):
__path = f'/_cat/thread_pool/{_quote(thread_pool_patterns)}'
else:
__path = '/_cat/thread_pool'
__query: t.Dict[(str, t.Any)] = {}
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (time is not None):
__query['time'] = time
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers)
_rewrite_parameters(parameter_aliases={'from': 'from_'})
def transforms(self, *, transform_id: t.Optional[str]=None, allow_no_match: t.Optional[bool]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, format: t.Optional[str]=None, from_: t.Optional[int]=None, h: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", str)]], t.Union[("t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", str)])]]=None, help: t.Optional[bool]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, s: t.Optional[t.Union[(t.Sequence[t.Union[("t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", str)]], t.Union[("t.Literal['changes_last_detection_time', 'checkpoint', 'checkpoint_duration_time_exp_avg', 'checkpoint_progress', 'create_time', 'delete_time', 'description', 'dest_index', 'docs_per_second', 'documents_deleted', 'documents_indexed', 'documents_processed', 'frequency', 'id', 'index_failure', 'index_time', 'index_total', 'indexed_documents_exp_avg', 'last_search_time', 'max_page_search_size', 'pages_processed', 'pipeline', 'processed_documents_exp_avg', 'processing_time', 'reason', 'search_failure', 'search_time', 'search_total', 'source_index', 'state', 'transform_type', 'trigger_count', 'version']", str)])]]=None, size: t.Optional[int]=None, time: t.Optional[t.Union[("t.Literal['d', 'h', 'm', 'micros', 'ms', 'nanos', 's']", str)]]=None, v: t.Optional[bool]=None) -> t.Union[(ObjectApiResponse[t.Any], TextApiResponse)]:
if (transform_id not in SKIP_IN_PATH):
__path = f'/_cat/transforms/{_quote(transform_id)}'
else:
__path = '/_cat/transforms'
__query: t.Dict[(str, t.Any)] = {}
if (allow_no_match is not None):
__query['allow_no_match'] = allow_no_match
if (error_trace is not None):
__query['error_trace'] = error_trace
if (filter_path is not None):
__query['filter_path'] = filter_path
if (format is not None):
__query['format'] = format
if (from_ is not None):
__query['from'] = from_
if (h is not None):
__query['h'] = h
if (help is not None):
__query['help'] = help
if (human is not None):
__query['human'] = human
if (local is not None):
__query['local'] = local
if (master_timeout is not None):
__query['master_timeout'] = master_timeout
if (pretty is not None):
__query['pretty'] = pretty
if (s is not None):
__query['s'] = s
if (size is not None):
__query['size'] = size
if (time is not None):
__query['time'] = time
if (v is not None):
__query['v'] = v
__headers = {'accept': 'text/plain,application/json'}
return self.perform_request('GET', __path, params=__query, headers=__headers) |
('src.lib.cmd_exec.run_command')
.object(source_tree.SourceTree, 'copy_source_directory')
def test_get_envoy_hashes_for_benchmark_additional_hashes(mock_copy_source_directory, mock_run_command):
job_control = proto_control.JobControl(remote=False, scavenging_benchmark=True)
_generate_default_benchmark_images(job_control)
_generate_default_envoy_source(job_control)
job_control.images.envoy_image = 'envoyproxy/envoy:v1.16.0'
for index in range(1, 4):
job_control.images.additional_envoy_images.append('envoyproxy/envoy:tag{i}'.format(i=index))
mock_copy_source_directory.return_value = True
mock_run_command.side_effect = _run_command_side_effect
manager = source_manager.SourceManager(job_control)
hashes = manager.get_envoy_hashes_for_benchmark()
expected_hashes = {'tag1', 'tag2', 'tag3', 'v1.16.0', 'expected_previous_commit_hash', 'expected_baseline_hash'}
assert (hashes == expected_hashes) |
def get_credentials(fn, url, username, allowed):
url_str = maybe_string(url)
username_str = maybe_string(username)
creds = fn(url_str, username_str, allowed)
if ((not hasattr(creds, 'credential_type')) or (not hasattr(creds, 'credential_tuple'))):
raise TypeError('credential does not implement interface')
cred_type = creds.credential_type
if (not (allowed & cred_type)):
raise TypeError('invalid credential type')
ccred = ffi.new('git_cred **')
if (cred_type == C.GIT_CREDTYPE_USERPASS_PLAINTEXT):
(name, passwd) = creds.credential_tuple
err = C.git_cred_userpass_plaintext_new(ccred, to_bytes(name), to_bytes(passwd))
elif (cred_type == C.GIT_CREDTYPE_SSH_KEY):
(name, pubkey, privkey, passphrase) = creds.credential_tuple
if ((pubkey is None) and (privkey is None)):
err = C.git_cred_ssh_key_from_agent(ccred, to_bytes(name))
else:
err = C.git_cred_ssh_key_new(ccred, to_bytes(name), to_bytes(pubkey), to_bytes(privkey), to_bytes(passphrase))
else:
raise TypeError('unsupported credential type')
check_error(err)
return ccred |
class ImageEnumTablePopupView(QtGui.QTableView):
def __init__(self, parent):
QtGui.QTableView.__init__(self, parent)
hheader = self.horizontalHeader()
if is_qt4:
hheader.setResizeMode(QtGui.QHeaderView.ResizeMode.ResizeToContents)
else:
hheader.setSectionResizeMode(QtGui.QHeaderView.ResizeMode.ResizeToContents)
hheader.hide()
vheader = self.verticalHeader()
if is_qt4:
vheader.setResizeMode(QtGui.QHeaderView.ResizeMode.ResizeToContents)
else:
vheader.setSectionResizeMode(QtGui.QHeaderView.ResizeMode.ResizeToContents)
vheader.hide()
self.setShowGrid(False) |
class TestGetIdentityVerificationConfig():
(scope='function')
def url(self) -> str:
return (V1_URL_PREFIX + ID_VERIFICATION_CONFIG)
(scope='function')
def subject_identity_verification_required(self, db):
config = get_config()
original_value = config.execution.subject_identity_verification_required
config.execution.subject_identity_verification_required = True
ApplicationConfig.update_config_set(db, config)
(yield)
config.execution.subject_identity_verification_required = original_value
ApplicationConfig.update_config_set(db, config)
(scope='function')
def subject_identity_verification_required_via_api(self, db):
config = get_config()
original_value = config.execution.subject_identity_verification_required
config.execution.subject_identity_verification_required = False
ApplicationConfig.update_config_set(db, config)
(yield)
config.execution.subject_identity_verification_required = original_value
ApplicationConfig.update_config_set(db, config)
def test_get_config_with_verification_required_no_email_config(self, url, db, api_client: TestClient, subject_identity_verification_required):
resp = api_client.get(url)
assert (resp.status_code == 200)
response_data = resp.json()
assert (response_data['identity_verification_required'] is True)
assert (response_data['valid_email_config_exists'] is False)
def test_get_config_with_verification_required_with_email_config(self, url, db, api_client: TestClient, messaging_config, subject_identity_verification_required):
resp = api_client.get(url)
assert (resp.status_code == 200)
response_data = resp.json()
assert (response_data['identity_verification_required'] is True)
assert (response_data['valid_email_config_exists'] is True)
def test_get_config_with_verification_not_required_with_email_config(self, url, db, api_client: TestClient, messaging_config):
resp = api_client.get(url)
assert (resp.status_code == 200)
response_data = resp.json()
assert (response_data['identity_verification_required'] is False)
assert (response_data['valid_email_config_exists'] is True)
def test_get_config_with_verification_not_required_with_no_email_config(self, url, db, api_client: TestClient):
resp = api_client.get(url)
assert (resp.status_code == 200)
response_data = resp.json()
assert (response_data['identity_verification_required'] is False)
assert (response_data['valid_email_config_exists'] is False) |
.parametrize('test_file, expected', [(REEK_SIM_PORO, ['PORO']), (ROFF_PROPS, ['PORV', 'PORO', 'EQLNUM', 'FIPNUM']), (ROFF_THREE_PROPS, ['Poro', 'EQLNUM', 'Facies'])])
def test_read_roff_properties(test_file, expected):
xtg_file = _XTGeoFile(test_file)
assert (list(read_roff_properties(xtg_file)) == expected) |
()
_option
('--runtime', default='3.8')
('--toolkit', default='pyside6')
('--environment', default=None, help='Name of EDM environment to check.')
('--strict/--not-strict', default=False, help='Use strict configuration for flake8 [default: --not-strict]')
def flake8(edm, runtime, toolkit, environment, strict):
parameters = get_parameters(edm, runtime, toolkit, environment)
config = ''
if strict:
config = '--config=flake8_strict.cfg '
commands = [('edm run -e {environment} -- python -m flake8 ' + config)]
execute(commands, parameters) |
def test_encode_categories_in_k_binary_plus_select_vars_automatically(df_enc_big):
encoder = OneHotEncoder(top_categories=None, variables=None, drop_last=False)
X = encoder.fit_transform(df_enc_big)
assert (encoder.top_categories is None)
assert (encoder.variables is None)
assert (encoder.drop_last is False)
transf = {'var_A_A': 6, 'var_A_B': 10, 'var_A_C': 4, 'var_A_D': 10, 'var_A_E': 2, 'var_A_F': 2, 'var_A_G': 6, 'var_B_A': 10, 'var_B_B': 6, 'var_B_C': 4, 'var_B_D': 10, 'var_B_E': 2, 'var_B_F': 2, 'var_B_G': 6, 'var_C_A': 4, 'var_C_B': 6, 'var_C_C': 10, 'var_C_D': 10, 'var_C_E': 2, 'var_C_F': 2, 'var_C_G': 6}
assert (encoder.variables_ == ['var_A', 'var_B', 'var_C'])
assert (encoder.variables_binary_ == [])
assert (encoder.n_features_in_ == 3)
assert (encoder.encoder_dict_ == {'var_A': ['A', 'B', 'C', 'D', 'E', 'F', 'G'], 'var_B': ['A', 'B', 'C', 'D', 'E', 'F', 'G'], 'var_C': ['A', 'B', 'C', 'D', 'E', 'F', 'G']})
assert (X.sum().to_dict() == transf)
assert ('var_A' not in X.columns) |
def send_email_report(report):
from_email = gmail_login
to_email = [gmail_dest]
subject = ('Virus Total Hunting Report - ' + str(now))
text = report
message = 'Subject: {}\n\n{}'.format(subject, text)
try:
server = smtplib.SMTP_SSL(smtp_serv, smtp_port)
server.ehlo()
server.login(from_email, gmail_pass)
server.sendmail(from_email, to_email, message)
server.quit()
print('[*] Report have been sent to your email!')
except smtplib.SMTPException as e:
print(('[!] SMTP error: ' + str(e)))
sys.exit() |
def main():
curr_lexicon = dict()
ap = argparse.ArgumentParser(description=('Convert Finnish dictionary TSV data ' + 'into xerox/HFST lexc format'))
ap.add_argument('--quiet', '-q', action='store_false', dest='verbose', default=False, help='do not print output to stdout while processing')
ap.add_argument('--verbose', '-v', action='store_true', default=False, help='print each step to stdout while processing')
ap.add_argument('--master', '-m', action='append', required=True, dest='masterfilenames', metavar='MFILE', help='read lexical roots from MFILEs')
ap.add_argument('--continuations', '-c', action='append', required=True, dest='contfilenames', metavar='CONTFILE', help='read continuations from CONTFILEs')
ap.add_argument('--exclude-pos', '-x', action='append', metavar='XPOS', help='exclude all XPOS parts of speech from generation')
ap.add_argument('--include-lemmas', '-I', action='append', type=open, metavar='ILFILE', help='read lemmas to include from ILFILE')
ap.add_argument('--exclude-blacklisted', '-B', action='append', type=str, metavar='BLIST', help='exclude lemmas in BLIST blacklist', choices=['FGK', 'PROPN-BLOCKING', 'NOUN-BLOCKING-PROPN', 'TOOSHORTFORCOMPOUND'])
ap.add_argument('--include-origin', '-O', action='append', type=str, metavar='ORIGIN', help='include lemmas from ORIGIN source', choices=['kotus', 'omorfi', 'unihu', 'finnwordnet', 'fiwiktionary', 'omorfi++'])
ap.add_argument('--version', '-V', action='version')
ap.add_argument('--output', '-o', '--one-file', '-1', type=argparse.FileType('w'), required=True, metavar='OFILE', help='write output to OFILE')
ap.add_argument('--fields', '-F', action='store', default=2, metavar='N', help='read N fields from master')
ap.add_argument('--separator', action='store', default='\t', metavar='SEP', help='use SEP as separator')
ap.add_argument('--comment', '-C', action='append', default=['#'], metavar='COMMENT', help='skip lines starting with COMMENT thatdo not have SEPs')
ap.add_argument('--strip', action='store', metavar='STRIP', help='strip STRIP from fields before using')
ap.add_argument('--format', '-f', action='store', default='omor', help='use specific output format for lexc data', choices=['omor', 'giella', 'ftb3', 'ftb1', 'none', 'apertium', 'labelsegments'])
ap.add_argument('--omor-new-para', action='store_true', default=False, help='include NEW_PARA= in raw analyses')
ap.add_argument('--omor-allo', action='store_true', default=False, help='include ALLO= in raw analyses')
ap.add_argument('--omor-props', action='store_true', default=False, help='include PROPER= in raw analyses')
ap.add_argument('--omor-sem', action='store_true', default=False, help='include SEM= in raw analyses')
ap.add_argument('--homonyms', action='store_false', default=True, help='include HOMONYM= in raw analyses')
ap.add_argument('--none-lemmas', action='store_true', default=False, help='include lemmas in raw analyses')
ap.add_argument('--none-segments', action='store_true', default=False, help='include segments in raw analyses')
ap.add_argument('--break-loops', action='store_true', default=False, help='skip continuations that may loop, comp/der/etc.')
ap.add_argument('--splits', action='append', default=['prontype', 'adptype'], metavar='COMMENT', help='skip lines starting with COMMENT thatdo not have SEPs')
args = ap.parse_args()
formatter = None
if (args.format == 'omor'):
formatter = OmorFormatter(args.verbose, newparas=args.omor_new_para, allo=args.omor_allo, props=args.omor_props, sem=args.omor_sem, homonyms=args.homonyms)
if args.omor_sem:
args.splits.append('sem')
if args.omor_props:
args.splits.append('proper_noun_class')
elif (args.format == 'ftb3'):
formatter = Ftb3Formatter(args.verbose)
elif (args.format == 'apertium'):
formatter = ApertiumFormatter(args.verbose)
args.splits.append('sem')
args.splits.append('proper_noun_class')
elif (args.format == 'giella'):
formatter = GiellaFormatter(args.verbose)
elif (args.format == 'none'):
formatter = NoTagsFormatter(args.verbose, lemmatise=args.none_lemmas, segment=args.none_segments)
elif (args.format == 'labelsegments'):
formatter = LabeledSegmentsFormatter(args.verbose)
else:
print('DIDNT CONVERT FORMATTER YET', args.format)
exit(1)
if ((args.strip == '"') or (args.strip == "'")):
quoting = csv.QUOTE_ALL
quotechar = args.strip
else:
quoting = csv.QUOTE_NONE
quotechar = None
lemmas = []
if args.include_lemmas:
for lemma_file in args.include_lemmas:
if args.verbose:
print('including only lemmas from', lemma_file.name)
for line in lemma_file:
lemmas.append(line.rstrip('\n'))
lemma_file.close()
if (not args.exclude_pos):
args.exclude_pos = []
if args.verbose:
print('Writing everything to', args.output.name)
if args.exclude_pos:
print('Not writing closed parts-of-speech data in', ','.join(args.exclude_pos))
print(formatter.copyright_lexc(), file=args.output)
if args.verbose:
print('Creating Multichar_Symbols and Root')
print(formatter.multichars_lexc(), file=args.output)
print(formatter.root_lexicon_lexc(), file=args.output)
for tsv_filename in args.masterfilenames:
if args.verbose:
print('Reading from', tsv_filename)
linecount = 0
print('! Omorfi stubs generated from', tsv_filename, '\n! date:', strftime('%Y-%m-%d %H:%M:%S+%Z'), '\n! params: ', ' '.join(argv), file=args.output)
print(formatter.copyright_lexc(), file=args.output)
curr_lexicon = ''
with open(tsv_filename, 'r', newline='') as tsv_file:
tsv_reader = csv.DictReader(tsv_file, delimiter=args.separator, quoting=quoting, escapechar='\\', quotechar=quotechar, strict=True)
postponed_suffixes = list()
postponed_abbrs = {'ABBREVIATION': list(), 'ACRONYM': list()}
for tsv_parts in tsv_reader:
linecount += 1
if (args.verbose and ((linecount % 10000) == 0)):
print(linecount, '...', sep='', end='\r')
if (len(tsv_parts) < 18):
print('Too few tabs on line', linecount, 'skipping following line completely:', file=stderr)
print(tsv_parts, file=stderr)
continue
wordmap = tsv_parts
if (wordmap['new_para'] == 'X_IGNORE'):
continue
if args.exclude_pos:
if (wordmap['pos'] in args.exclude_pos):
continue
if args.include_lemmas:
if (wordmap['lemma'] not in lemmas):
continue
if args.include_origin:
origins = wordmap['origin'].split('|')
origin_ok = False
for origin in origins:
if (origin in args.include_origin):
origin_ok = True
if (not origin_ok):
continue
if args.exclude_blacklisted:
if (wordmap['blacklist'] in args.exclude_blacklisted):
wordmap['new_para'] = 'XXX_BLACKLISTED_SINK'
incoming_lexicon = tsv_parts['upos']
if tsv_parts['is_suffix']:
postponed_suffixes.append(tsv_parts)
continue
elif tsv_parts['abbr']:
postponed_abbrs[tsv_parts['abbr']].append(tsv_parts)
continue
if (curr_lexicon != incoming_lexicon):
print('\nLEXICON', incoming_lexicon, end='\n\n', file=args.output)
curr_lexicon = incoming_lexicon
if wordmap['real_pos']:
wordmap['pos'] = wordmap['real_pos']
variants = [wordmap]
for splitfeat in args.splits:
if ('|' in wordmap[splitfeat]):
newvariants = []
for variant in variants:
for split in variant[splitfeat].split('|'):
newword = variant.copy()
newword[splitfeat] = split
newvariants.append(newword)
variants = newvariants
for variant in variants:
print(formatter.wordmap2lexc(variant), file=args.output)
if postponed_suffixes:
print('\nLEXICON SUFFIX\n\n', file=args.output)
for suffix in postponed_suffixes:
print(formatter.wordmap2lexc(suffix), file=args.output)
for (key, words) in sorted(postponed_abbrs.items()):
if words:
print('\nLEXICON', key, '\n\n', file=args.output)
for word in words:
variants = [word]
for splitfeat in args.splits:
if ('|' in word[splitfeat]):
newvariants = []
for variant in variants:
for split in variant[splitfeat].split('|'):
newword = variant.copy()
newword[splitfeat] = split
newvariants.append(newword)
variants = newvariants
for variant in variants:
print(formatter.wordmap2lexc(variant), file=args.output)
if args.verbose:
print('\n', linecount, ' entries in master db')
for tsv_filename in args.contfilenames:
if args.verbose:
print('Reading from', tsv_filename)
linecount = 0
print('! Omorfi continuations generated from', tsv_file.name, '! date:', strftime('%Y-%m-%d %H:%M:%S+%Z'), '! params: ', ' '.join(argv), file=args.output)
print(formatter.copyright_lexc(), file=args.output)
curr_lexicon = ''
with open(tsv_filename, 'r', newline='') as tsv_file:
tsv_reader = csv.reader(tsv_file, delimiter=args.separator, strict=True)
for tsv_parts in tsv_reader:
linecount += 1
if (len(tsv_parts) < 3):
print(tsv_filename, linecount, 'Too few tabs on line', 'skipping following fields:', tsv_parts, file=stderr)
continue
pos = tsv_parts[0].split('_')[0]
if (pos not in ['ADJ', 'NOUN', 'VERB', 'PROPN', 'NUM', 'PRON', 'ADP', 'ADV', 'SYM', 'PUNCT', 'INTJ', 'X', 'DIGITS', 'CCONJ', 'SCONJ', 'AUX', 'DET']):
print('Cannot deduce pos from incoming cont:', tsv_parts[0])
continue
if (pos in args.exclude_pos):
continue
if args.break_loops:
if (('COMPOUND' in tsv_parts[0]) or ('DERIV' in tsv_parts[0]) or ('LOOP' in tsv_parts[0])):
continue
if (curr_lexicon != tsv_parts[0]):
print('\nLEXICON', tsv_parts[0], end='\n\n', file=args.output)
curr_lexicon = tsv_parts[0]
for cont in tsv_parts[3].split('|'):
print(formatter.continuation2lexc(tsv_parts[1], tsv_parts[2], cont), file=args.output)
exit(0) |
class Nodes(object):
def __init__(self, nodesName_in):
self.nodesName = nodesName_in
self._controllers = []
self._utilities = []
self._joints = []
self._curves = []
self.network = Network(self.nodesName)
def deleteNodes(self):
pm.delete(self.network.nodesName)
del self.controllers
del self.joints
del self.utilities
del self
def controllers(self):
return self._controllers
def controllers(self, object_in):
self.appendObject(self._controllers, object_in)
def controllers(self):
self.deleter(self.controllers)
def joints(self):
return self._joints
def joints(self, object_in):
self.appendObject(self._joints, object_in)
def joints(self):
self.deleter(self.joints)
def utilities(self):
return self._utilities
def utilities(self, object_in):
self.appendObject(self._utilities, object_in)
def utilities(self):
self.deleter(self.utilities)
def appendObject(self, nodeType, object_in):
if isinstance(object_in, list):
for obj in object_in:
if isinstance(obj, list):
for ob in obj:
self.append_to(nodeType, ob)
else:
self.append_to(nodeType, obj)
else:
self.append_to(nodeType, object_in)
def checkMembers(self, nodeType, object_in):
if (nodeType == object_in):
return False
return True
def append_to(self, nodeType, object_in):
if self.checkMembers(nodeType, object_in):
nodeType.append(object_in)
self.network.attach(object_in)
def deleter(self, nodeList):
objects = pm.ls()
for obj in objects:
for nodes in nodeList:
if (obj == nodes):
pm.delete(nodes)
del nodeList[:] |
.django_db
def test_download_contract_without_columns(client, download_test_data):
download_generation.retrieve_db_string = Mock(return_value=get_database_dsn_string())
resp = client.post('/api/v2/download/contract/', content_type='application/json', data=json.dumps({'award_id': 456}))
assert (resp.status_code == status.HTTP_200_OK)
assert ('.zip' in resp.json()['file_url']) |
def main(req: func.HttpRequest) -> func.HttpResponse:
logging.basicConfig(format='[%(asctime)s] [%(levelname)s] %(message)s', level=logging.DEBUG)
handlers: Mapping[(str, Action)] = {class_var.name(): class_var for class_var in Action.__subclasses__()}
log.info(f'Find Actions subclasses: {handlers}')
success_message = {'output': 'This HTTP triggered function executed.'}
try:
action_name = req.route_params.get('action_name', 'list')
log.info(f'Action name: `{action_name}`')
action = handlers.get(action_name)
if (not action):
raise KeyError(f"Can't find required Action for `{action_name}` azure function")
params: Dict = (req.params or req.get_json())
message = (action().execute(**params) or success_message)
return func.HttpResponse(json.dumps(message), status_code=HTTPStatus.OK)
except Exception as ex:
ex_message = f'''Exception {ex}
{''.join(tb.format_exception(etype=type(ex), value=ex, tb=ex.__traceback__))}'''
log.error(ex_message)
return func.HttpResponse(ex_message, status_code=HTTPStatus.INTERNAL_SERVER_ERROR) |
class UnisocParser():
def __init__(self):
self.io_device = None
self.writer = None
self.combine_stdout = False
self.name = 'unisoc'
self.shortname = 'sprd'
self.logger = logging.getLogger('scat.unisocparser')
self.diag_log_parsers = []
self.process = {}
self.no_process = {}
for p in self.diag_log_parsers:
self.process.update(p.process)
try:
self.no_process.update(p.no_process)
except AttributeError:
pass
def set_io_device(self, io_device):
self.io_device = io_device
def set_writer(self, writer):
self.writer = writer
def set_parameter(self, params):
for p in params:
if (p == 'log_level'):
self.logger.setLevel(params[p])
elif (p == 'msgs'):
self.msgs = params[p]
elif (p == 'combine-stdout'):
self.combine_stdout = params[p]
def init_diag(self):
pass
def prepare_diag(self):
pass
def parse_diag(self, pkt):
pkt = pkt[2:(- 4)]
pkt_header_struct = namedtuple('UnisocPktHeader', 'chan_num pkt_type magic csum')
pkt_tag_header_struct = namedtuple('UnisocPktTagHeader', 'seqnr len type subtype')
pkt_header = pkt_header_struct._make(struct.unpack('<BBHH', pkt[0:6]))
assert (pkt_header.magic == 23130)
if (pkt_header.chan_num == 1):
pkt_tag_header = pkt_tag_header_struct._make(struct.unpack('<LHBB', pkt[6:14]))
print('Chan: {:#04x}, Type: {:#04x}, CSum: {:#06x}, (SeqNr: {:#010x}/{:10d}, Type: {:#04x}, Subtype: {:#04x}): {}'.format(pkt_header.chan_num, pkt_header.pkt_type, pkt_header.csum, pkt_tag_header.seqnr, pkt_tag_header.seqnr, pkt_tag_header.type, pkt_tag_header.subtype, binascii.hexlify(pkt[14:]).decode()))
if ((len(pkt[14:]) + 8) != pkt_tag_header.len):
self.logger.log(logging.WARNING, 'Length mismatch: expected {}, got {}'.format(pkt_tag_header.len, (len(pkt[14:]) + 8)))
if (pkt_tag_header.type == 248):
pkt_0xf8_struct = namedtuple('Unisoc0xf8Header', 'zero type len')
pkt_0xf8 = pkt_0xf8_struct._make(struct.unpack('>HHH', pkt[14:20]))
assert (pkt_0xf8.zero == 0)
assert (pkt_0xf8.len == len(pkt[20:]))
if (pkt_0xf8.type == 4608):
pkt_0xf8_0x1200 = struct.unpack('>LL', pkt[20:28])
pkt_0xf8_0x1200_rest = pkt[28:]
assert (len(pkt_0xf8_0x1200_rest) == pkt_0xf8_0x1200[1])
print('Log ID: {:#010x}, Args: {} {}'.format(pkt_0xf8_0x1200[0], pkt_0xf8_0x1200[1], binascii.hexlify(pkt_0xf8_0x1200_rest).decode()))
elif (pkt_tag_header.type == 152):
pkt_0x98_struct = namedtuple('Unisoc0x98Header', 'zero type len')
pkt_0x98 = pkt_0x98_struct._make(struct.unpack('<HHH', pkt[14:20]))
assert (pkt_0x98.zero == 0)
assert (pkt_0x98.len == (len(pkt[20:]) + 4))
if (pkt_0x98.type == 37124):
print('Log 0x9104: {}'.format(pkt[20:].decode(errors='replacebackslash')))
else:
self.logger.log(logging.WARNING, 'Unknown channel number {:#04x}'.format(pkt_header.chan_num))
return
def run_diag(self):
pass
def stop_diag(self):
pass
def run_dump(self):
self.logger.log(logging.INFO, 'Starting diag from dump')
usoc_header_struct = namedtuple('UnisocDumpHeader', 'magic unk1 unk2 unk3')
sync_word = b'~~~~'
oldbuf = b''
loop = True
cur_pos = 0
try:
header_buf = self.io_device.read(16)
usoc_header = usoc_header_struct._make(struct.unpack('<4L', header_buf))
print(usoc_header)
if (usoc_header.magic != ):
self.logger.log(logging.WARNING, 'Not processing due to magic mismatch: expected {:#10x}, got {:#10x}'.format(, usoc_header.magic))
return
buf = self.io_device.read(4)
if (buf != sync_word):
self.logger.log(logging.WARNING, 'End-of-packet indicator not found')
return
while loop:
buf = self.io_device.read(589824)
if (len(buf) == 0):
if self.io_device.block_until_data:
continue
else:
loop = False
buf = (oldbuf + buf)
cur_pos = 0
while (cur_pos < len(buf)):
if ((cur_pos + 2) > len(buf)):
oldbuf = buf[cur_pos:]
break
pkt_len = struct.unpack('<H', buf[cur_pos:(cur_pos + 2)])[0]
if ((cur_pos + pkt_len) > len(buf)):
oldbuf = buf[cur_pos:]
break
pkt = buf[cur_pos:((cur_pos + pkt_len) + 4)]
if (len(pkt) < (pkt_len + 4)):
oldbuf = buf[cur_pos:]
break
parse_result = self.parse_diag(pkt)
if (parse_result is not None):
self.postprocess_parse_result(parse_result)
cur_pos += (pkt_len + 4)
if (cur_pos == len(buf)):
oldbuf = b''
except KeyboardInterrupt:
return
def read_dump(self):
while self.io_device.file_available:
self.logger.log(logging.INFO, 'Reading from {}'.format(self.io_device.fname))
self.run_dump()
self.io_device.open_next_file()
def postprocess_parse_result(self, parse_result):
if ('radio_id' in parse_result):
radio_id = parse_result['radio_id']
else:
radio_id = 0
if ('ts' in parse_result):
ts = parse_result['ts']
else:
ts = None
if ('cp' in parse_result):
for sock_content in parse_result['cp']:
self.writer.write_cp(sock_content, radio_id, ts)
if ('up' in parse_result):
for sock_content in parse_result['up']:
self.writer.write_up(sock_content, radio_id, ts)
if ('stdout' in parse_result):
if (len(parse_result['stdout']) > 0):
if self.combine_stdout:
for l in parse_result['stdout'].split('\n'):
osmocore_log_hdr = util.create_osmocore_logging_header(timestamp=ts, process_name=Path(sys.argv[0]).name, pid=os.getpid(), level=3, subsys_name=self.__class__.__name__, filename=Path(__file__).name, line_number=getframeinfo(currentframe()).lineno)
gsmtap_hdr = util.create_gsmtap_header(version=2, payload_type=util.gsmtap_type.OSMOCORE_LOG)
self.writer.write_cp(((gsmtap_hdr + osmocore_log_hdr) + l.encode('utf-8')), radio_id, ts)
else:
for l in parse_result['stdout'].split('\n'):
print('Radio {}: {}'.format(radio_id, l))
def parse_diag_log(self, pkt, args=None):
pass |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = None
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'log_syslogd4_filter': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['log_syslogd4_filter']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['log_syslogd4_filter']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=False)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'log_syslogd4_filter')
(is_error, has_changed, result, diff) = fortios_log_syslogd4(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
class OptionSeriesBellcurveSonificationDefaultinstrumentoptionsMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestConnectionRegistry():
def test_get_connector_template(self):
assert ('mailchimp' in ConnectorRegistry.connector_types())
assert (ConnectorRegistry.get_connector_template('bad_key') is None)
mailchimp_template = ConnectorRegistry.get_connector_template('mailchimp')
assert mailchimp_template
assert (mailchimp_template.config == load_yaml_as_string('data/saas/config/mailchimp_config.yml'))
assert (mailchimp_template.dataset == load_yaml_as_string('data/saas/dataset/mailchimp_dataset.yml'))
assert (mailchimp_template.icon == encode_file_contents('data/saas/icon/mailchimp.svg'))
assert (mailchimp_template.human_readable == 'Mailchimp')
('fides.api.service.connectors.saas.connector_registry_service.replace_dataset_placeholders')
('fides.api.service.connectors.saas.connector_registry_service.replace_config_placeholders')
('fides.api.service.connectors.saas.connector_registry_service.load_config_from_string')
def test_update_config_additions(self, load_config_from_string_mock_object: Mock, replace_config_placeholders_mock_object: Mock, replace_dataset_placeholders_mock_object: Mock, db, secondary_mailchimp_instance, tertiary_mailchimp_instance, secondary_sendgrid_instance):
update_config(load_config_from_string_mock_object, load_config_from_string_mocked_additions_function, replace_config_placeholders_mock_object, replace_config_placeholders_mocked_additions_function, replace_dataset_placeholders_mock_object, replace_dataset_placeholders_mocked_additions_function, validate_updated_instances_additions, db, secondary_mailchimp_instance, tertiary_mailchimp_instance, secondary_sendgrid_instance)
('fides.api.service.connectors.saas.connector_registry_service.replace_dataset_placeholders')
('fides.api.service.connectors.saas.connector_registry_service.replace_config_placeholders')
('fides.api.service.connectors.saas.connector_registry_service.load_config_from_string')
def test_update_config_removals(self, load_config_from_string_mock_object: Mock, replace_config_placeholders_mock_object: Mock, replace_dataset_placeholders_mock_object: Mock, db, secondary_mailchimp_instance, tertiary_mailchimp_instance, secondary_sendgrid_instance):
update_config(load_config_from_string_mock_object, load_config_from_string_mocked_removals_function, replace_config_placeholders_mock_object, replace_config_placeholders_mocked_removals_function, replace_dataset_placeholders_mock_object, replace_dataset_placeholders_mocked_removals_function, validate_updated_instances_removals, db, secondary_mailchimp_instance, tertiary_mailchimp_instance, secondary_sendgrid_instance) |
def test_repeat_enum():
r = ft.Image()
assert (r.repeat is None)
assert (r._get_attr('repeat') is None)
r = ft.Image(repeat=ft.ImageRepeat.REPEAT)
assert isinstance(r.repeat, ft.ImageRepeat)
assert (r.repeat == ft.ImageRepeat.REPEAT)
assert (r._get_attr('repeat') == 'repeat')
r = ft.Image(repeat='repeatX')
assert isinstance(r.repeat, str)
assert (r._get_attr('repeat') == 'repeatX') |
class RaiseExceptionOnResponseMiddleware(ClientMiddleware):
class MiddlewareProcessedResponse(Exception):
pass
def response(self, get_response):
def handler(receive_timeout_in_seconds):
(request_id, response) = get_response(receive_timeout_in_seconds)
if (response and response.actions and (response.actions[0].body.get('middleware_was_here') is True)):
raise self.MiddlewareProcessedResponse()
return (request_id, response)
return handler |
def get_concatened_keywords(strings_list):
output_list = []
for s in strings_list:
pattern = '[$#]+'
parts = re.split(pattern, s)
parts = [part for part in parts if part]
if (len(parts) > 1):
for part in parts:
if (len(part) > 2):
output_list.append(part)
continue
output_list.append(s)
return output_list |
def scan_types() -> None:
for _ in scan(es, query={'query': {'match_all': {}}}, request_timeout=10, clear_scroll=True, scroll_kwargs={'request_timeout': 10}):
pass
for _ in scan(es, raise_on_error=False, preserve_order=False, scroll='10m', size=10, request_timeout=10.0):
pass |
def update_subaward_city_county(table_name=SubawardSearch._meta.db_table):
sql = f'''
with
address_info as (
select distinct on (upper(feature_name), state_alpha)
upper(feature_name) as feature_name,
state_alpha,
county_numeric,
upper(county_name) as county_name,
census_code
from
ref_city_county_state_code
where
feature_class = 'Populated Place' and
coalesce(feature_name, '') != '' and
coalesce(state_alpha, '') != ''
order by
upper(feature_name),
state_alpha,
county_sequence,
coalesce(date_edited, date_created) desc,
id desc
)
update
"{table_name}" as s1
set
sub_place_of_perform_county_code = LPAD(CAST(CAST((REGEXP_MATCH(pop.county_numeric, '^[A-Z]*(\d+)(?:\.\d+)?$'))[1] AS smallint) AS text), 3, '0'),
sub_place_of_perform_county_name = pop.county_name,
sub_place_of_perform_city_code = pop.census_code,
sub_legal_entity_county_code = LPAD(CAST(CAST((REGEXP_MATCH(rec.county_numeric, '^[A-Z]*(\d+)(?:\.\d+)?$'))[1] AS smallint) AS text), 3, '0'),
sub_legal_entity_county_name = rec.county_name,
sub_legal_entity_city_code = rec.census_code
from
"{table_name}" as s
left outer join address_info as pop on
pop.feature_name = UPPER(s.sub_place_of_perform_city_name) and
pop.state_alpha = UPPER(s.sub_place_of_perform_state_code)
left outer join address_info as rec on
rec.feature_name = UPPER(s.sub_legal_entity_city_name) and
rec.state_alpha = UPPER(s.sub_legal_entity_state_code)
where
s.broker_subaward_id = s1.broker_subaward_id and (
pop.county_numeric is distinct from s.sub_place_of_perform_county_code or
pop.county_name is distinct from s.sub_place_of_perform_county_name or
pop.census_code is distinct from s.sub_place_of_perform_city_code or
rec.county_numeric is distinct from s.sub_legal_entity_county_code or
rec.county_name is distinct from s.sub_legal_entity_county_name or
rec.census_code is distinct from s.sub_legal_entity_city_code
)
'''
return execute_dml_sql(sql) |
class FakeConnection(Connection):
def __init__(self, envelope: Envelope, num: int, *args: Any, **kwargs: Any):
Connection.__init__(self, *args, **kwargs)
self.num = num
self.envelope = envelope
self.state = ConnectionStates.connected
async def connect(self) -> None:
async def disconnect(self) -> None:
self.state = ConnectionStates.disconnected
async def send(self, envelope: Envelope) -> None:
return None
async def receive(self, *args: Any, **kwargs: Any) -> Optional[Envelope]:
if (self.num <= 0):
(await asyncio.sleep(0.1))
return None
self.num -= 1
return self.envelope |
class Components():
def __init__(self, app: str, count: int=0):
(self._app, self.count_comp, self.page) = (app, count, app.server.page)
def router(self):
from epyk.web.components.angular import standards
return standards.Router(self.page, None)
def materials(self):
from epyk.web.components.angular import materials
return materials.Components(app=self._app, page=self.page)
def primeng(self):
from epyk.web.components.angular import primeng
return primeng.Components(self) |
class BuildChrootStartedV1StompDontUseTest(unittest.TestCase):
msg_class = schema.BuildChrootStartedV1StompDontUse
def setUp(self):
self.fedmsg_message = {'chroot': 'fedora-29-x86_64'}
def test_chroot(self):
msg = self.msg_class(body=self.fedmsg_message)
msg.validate() |
(scope='function')
def parent_child_reindex_setup(sync_client):
body = {'settings': {'number_of_shards': 1, 'number_of_replicas': 0}, 'mappings': {'properties': {'question_answer': {'type': 'join', 'relations': {'question': 'answer'}}}}}
sync_client.indices.create(index='test-index', body=body)
sync_client.indices.create(index='real-index', body=body)
sync_client.index(index='test-index', id=42, body={'question_answer': 'question'})
sync_client.index(index='test-index', id=47, routing=42, body={'some': 'data', 'question_answer': {'name': 'answer', 'parent': 42}})
sync_client.indices.refresh(index='test-index') |
def filter_user_identities_for_connector(secrets: EmailSchema, user_identities: Dict[(str, Any)]) -> Dict[(str, Any)]:
required_identities: List[str] = get_identity_types_for_connector(secrets)
return {identity_type: user_identities.get(identity_type) for identity_type in required_identities if user_identities.get(identity_type)} |
def function_call_interval_limit(interval):
def decorator(func):
(func)
def wrapper(*args, **kwargs):
current_time = time.time()
last_time_called_key = '_last_time_called_{0}'.format(func.__name__)
if (not hasattr(function_call_interval_limit, last_time_called_key)):
setattr(function_call_interval_limit, last_time_called_key, current_time)
return func(*args, **kwargs)
try:
new_interval = args[0].function_call_interval_limit_overwrite
interval = new_interval
except AttributeError:
pass
if ((current_time - getattr(function_call_interval_limit, last_time_called_key)) >= interval):
setattr(function_call_interval_limit, last_time_called_key, current_time)
return func(*args, **kwargs)
else:
return (lambda *args: None)
return wrapper
return decorator |
.skipif((MID_MEMORY > memory), reason='Travis has too less memory to run it.')
.parametrize('matrix', [matrix])
.parametrize('outFileName', [outfile_aggregate_plots])
.parametrize('BED', [BED])
.parametrize('mode', ['intra-chr'])
.parametrize('ran', ['50000:900000'])
.parametrize('BED2', [BED2])
.parametrize('numberOfBins', [30])
.parametrize('transform', sorted(['total-counts']))
.parametrize('operationType', sorted(['sum', 'mean', 'median']))
.parametrize('outFilePrefixMatrix', ['outFilePrefix'])
.parametrize('outFileContactPairs', ['outFileContactPairs'])
.parametrize('diagnosticHeatmapFile', [diagnosticHeatmapFile])
.parametrize('kmeans', [4])
.parametrize('hclust', [4])
.parametrize('howToCluster', sorted(['full', 'center', 'diagonal']))
.parametrize('chromosomes', ['X'])
.parametrize('colorMap', ['RdYlBu_r'])
.parametrize('plotType', sorted(['2d', '3d']))
.parametrize('vMin', [0.01])
.parametrize('vMax', [1.0])
def test_aggregate_contacts(capsys, matrix, outFileName, BED, mode, ran, BED2, numberOfBins, transform, operationType, outFilePrefixMatrix, outFileContactPairs, diagnosticHeatmapFile, kmeans, hclust, howToCluster, chromosomes, colorMap, plotType, vMin, vMax):
args = '--matrix {} --outFileName {} --BED {} --mode {} --range {} --BED2 {} --numberOfBins {} --transform {} --operationType {} --outFilePrefixMatrix {} --kmeans {} --hclust {} --howToCluster {} --chromosomes {} --colorMap {} --plotType {} --vMin {} --vMax {} --disable_bbox_tight'.format(matrix, outFileName.name, BED, mode, ran, BED2, numberOfBins, transform, operationType, outFilePrefixMatrix, kmeans, hclust, howToCluster, chromosomes, colorMap, plotType, vMin, vMax).split()
hicexplorer.hicAggregateContacts.main(args)
compute(hicexplorer.hicAggregateContacts.main, args, 5)
os.remove(outFileName.name) |
class TreeNodeIndentedLabels():
def __init__(self, queryset, *args, **kwargs):
if hasattr(queryset, 'with_tree_fields'):
queryset = queryset.with_tree_fields()
if ('label_from_instance' in kwargs):
self.label_from_instance = kwargs.pop('label_from_instance')
super().__init__(queryset, *args, **kwargs)
def label_from_instance(self, obj):
depth = getattr(obj, 'tree_depth', 0)
return '{}{}'.format(''.join((['--- '] * depth)), obj) |
class TestFizzBuzz(unittest.TestCase):
def test_fizz_buzz(self):
solution = Solution()
self.assertRaises(TypeError, solution.fizz_buzz, None)
self.assertRaises(ValueError, solution.fizz_buzz, 0)
expected = ['1', '2', 'Fizz', '4', 'Buzz', 'Fizz', '7', '8', 'Fizz', 'Buzz', '11', 'Fizz', '13', '14', 'FizzBuzz']
self.assertEqual(solution.fizz_buzz(15), expected)
print('Success: test_fizz_buzz') |
class OptionPlotoptionsTreemapSonificationDefaultspeechoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def split_and_write_sequences(seq, min_Ns, min_len, output_file):
short_name = seq.name.split(' ')[0]
seq.name = short_name
fragments = list(re.finditer((('[^nN]{' + str(min_len)) + ',}'), seq.sequence))
n = 0
for f in fragments:
n += 1
nstr = str(n).rjust(8, '0')
(_from, _to) = f.span()
print(short_name, _from, _to)
if ((_to - _from) > min_len):
Fasta('_'.join([short_name, nstr, str(_from), str(_to)]), seq.sequence[_from:_to]).write_to_file(outfile) |
def _ocr_tables_standarize_row(row) -> Tuple[(Row, int)]:
is_header = False
cells: Sequence[Cell] = []
for cell in row.cells:
std_cell = _ocr_tables_standarize_cell(cell)
cells.append(std_cell)
num_col = len(cells)
return (Row(cells=cells, is_header=is_header), num_col) |
def test_hexary_trie_at_root_lookups():
changes = ((b'ab', (b'b' * 32)), (b'ac', (b'c' * 32)), (b'ac', None), (b'ad', (b'd' * 32)))
expected_by_root = defaultdict(set)
missing_by_root = defaultdict(set)
trie = HexaryTrie({})
for (key, val) in changes:
if (val is None):
del trie[key]
missing_by_root[trie.root_hash].add(key)
else:
trie[key] = val
expected_by_root[trie.root_hash].add((key, val))
for (root_hash, expected_items) in expected_by_root.items():
for (key, val) in expected_items:
with trie.at_root(root_hash) as snapshot:
assert (key in snapshot)
assert (snapshot[key] == val)
for (root_hash, missing_keys) in missing_by_root.items():
for key in missing_keys:
with trie.at_root(root_hash) as snapshot:
assert (key not in snapshot) |
def pow_cf_c(gen, t, srcs):
nonzero = gen.symbols.newLabel()
done = gen.symbols.newLabel()
dst_re = gen.newTemp(Float)
dst_im = gen.newTemp(Float)
temp = log_c_c(gen, t, [srcs[0]])
t_re = gen.emit_binop('*', [temp.re, srcs[1]], Float)
t_re = gen.emit_func('exp', [t_re], Float)
t_im = gen.emit_binop('*', [temp.im, srcs[1]], Float)
temp2 = polar_ff_c(gen, t, [t_re, t_im])
gen.emit_move(temp2.re, dst_re)
gen.emit_move(temp2.im, dst_im)
gen.emit_label(done)
return ComplexArg(dst_re, dst_im) |
class SigningHandler(Handler):
SUPPORTED_PROTOCOL = SigningMessage.protocol_id
def setup(self) -> None:
def handle(self, message: Message) -> None:
signing_msg = cast(SigningMessage, message)
signing_dialogues = cast(SigningDialogues, self.context.signing_dialogues)
signing_dialogue = cast(Optional[SigningDialogue], signing_dialogues.update(signing_msg))
if (signing_dialogue is None):
self._handle_unidentified_dialogue(signing_msg)
return
self.context.logger.info('received {} from decision_maker, message={}'.format(signing_msg.performative.value, signing_msg))
if (signing_msg.performative is SigningMessage.Performative.SIGNED_MESSAGE):
self._handle_signed_message(signing_msg, signing_dialogue)
elif (signing_msg.performative is SigningMessage.Performative.SIGNED_TRANSACTION):
self._handle_signed_transaction(signing_msg, signing_dialogue)
elif (signing_msg.performative is SigningMessage.Performative.ERROR):
self._handle_error(signing_msg, signing_dialogue)
else:
self._handle_invalid(signing_msg, signing_dialogue)
def teardown(self) -> None:
def _handle_unidentified_dialogue(self, signing_msg: SigningMessage) -> None:
self.context.logger.info('received invalid signing message={}, unidentified dialogue.'.format(signing_msg))
def _handle_signed_message(self, signing_msg: SigningMessage, signing_dialogue: SigningDialogue) -> None:
fipa_dialogue = signing_dialogue.associated_fipa_dialogue
last_fipa_message = cast(FipaMessage, fipa_dialogue.last_incoming_message)
enforce((last_fipa_message is not None), 'last message not recovered.')
if (last_fipa_message.performative == FipaMessage.Performative.ACCEPT):
fipa_msg = fipa_dialogue.reply(performative=FipaMessage.Performative.MATCH_ACCEPT_W_INFORM, target_message=last_fipa_message, info={'signature': signing_msg.signed_message.body})
self.context.outbox.put_message(message=fipa_msg)
self.context.logger.info('sending {} to {} (as {}), message={}.'.format(fipa_msg.performative.value, fipa_msg.to[(- 5):], fipa_dialogue.role, fipa_msg))
elif (last_fipa_message.performative == FipaMessage.Performative.MATCH_ACCEPT_W_INFORM):
counterparty_signature = fipa_dialogue.counterparty_signature
tx_id = fipa_dialogue.terms.sender_hash
if ('transactions' not in self.context.shared_state.keys()):
self.context.shared_state['transactions'] = OrderedDict()
tx = {'terms': fipa_dialogue.terms, 'sender_signature': signing_msg.signed_message.body, 'counterparty_signature': counterparty_signature}
self.context.shared_state['transactions'][tx_id] = tx
self.context.logger.info(f'sending transaction to controller, tx={tx}.')
else:
enforce(False, 'last message should be of performative accept or match accept.')
def _handle_signed_transaction(self, signing_msg: SigningMessage, signing_dialogue: SigningDialogue) -> None:
strategy = cast(Strategy, self.context.strategy)
if (not strategy.is_contract_tx):
self.context.logger.warning('signed transaction handler only for contract case.')
return
cosm_trade_dialogue = signing_dialogue.associated_cosm_trade_dialogue
if (cosm_trade_dialogue is not None):
ledger_api_dialogues = cast(LedgerApiDialogues, self.context.ledger_api_dialogues)
(ledger_api_msg, ledger_api_dialogue) = ledger_api_dialogues.create(counterparty=LEDGER_API_ADDRESS, performative=LedgerApiMessage.Performative.SEND_SIGNED_TRANSACTION, signed_transaction=signing_msg.signed_transaction)
ledger_api_dialogue = cast(LedgerApiDialogue, ledger_api_dialogue)
ledger_api_dialogue.associated_signing_dialogue = signing_dialogue
self.context.logger.info('sending {} to ledger {}, message={}'.format(ledger_api_msg.performative, strategy.ledger_id, ledger_api_msg))
self.context.outbox.put_message(message=ledger_api_msg)
return
fipa_dialogue = signing_dialogue.associated_fipa_dialogue
last_fipa_message = cast(FipaMessage, fipa_dialogue.last_incoming_message)
enforce((last_fipa_message is not None), 'last message not recovered.')
if (last_fipa_message.performative == FipaMessage.Performative.ACCEPT):
fipa_msg = fipa_dialogue.reply(performative=FipaMessage.Performative.MATCH_ACCEPT_W_INFORM, target_message=last_fipa_message, info={'tx_signature': signing_msg.signed_transaction})
self.context.logger.info('sending {} to {} (as {}), message={}.'.format(fipa_msg.performative.value, fipa_msg.to[(- 5):], fipa_dialogue.role, fipa_msg))
self.context.outbox.put_message(message=fipa_msg)
elif (last_fipa_message.performative == FipaMessage.Performative.MATCH_ACCEPT_W_INFORM):
if (strategy.ledger_id == EthereumApi.identifier):
ledger_api_dialogues = cast(LedgerApiDialogues, self.context.ledger_api_dialogues)
(ledger_api_msg, ledger_api_dialogue) = ledger_api_dialogues.create(counterparty=LEDGER_API_ADDRESS, performative=LedgerApiMessage.Performative.SEND_SIGNED_TRANSACTION, signed_transaction=signing_msg.signed_transaction)
ledger_api_dialogue = cast(LedgerApiDialogue, ledger_api_dialogue)
ledger_api_dialogue.associated_signing_dialogue = signing_dialogue
self.context.logger.info('sending {} to ledger {}, message={}'.format(ledger_api_msg.performative, strategy.ledger_id, ledger_api_msg))
self.context.outbox.put_message(message=ledger_api_msg)
elif (strategy.ledger_id == FetchAIApi.identifier):
cosm_trade_dialogues = cast(CosmTradeDialogues, self.context.cosm_trade_dialogues)
(cosm_trade_msg, _) = cosm_trade_dialogues.create(counterparty=signing_dialogue.associated_fipa_dialogue.dialogue_label.dialogue_opponent_addr, performative=CosmTradeMessage.Performative.INFORM_SIGNED_TRANSACTION, signed_transaction=signing_msg.signed_transaction, fipa_dialogue_id=signing_dialogue.associated_fipa_dialogue.dialogue_label.dialogue_reference)
self.context.logger.info('sending {} to {}, message={}.'.format(cosm_trade_msg.performative.value, cosm_trade_msg.to[(- 5):], cosm_trade_msg))
self.context.outbox.put_message(message=cosm_trade_msg)
else:
enforce(False, f'Unidentified ledger id: {strategy.ledger_id}')
else:
enforce(False, 'last message should be of performative accept or match accept.')
def _handle_error(self, signing_msg: SigningMessage, signing_dialogue: SigningDialogue) -> None:
self.context.logger.info('transaction signing was not successful. Error_code={} in dialogue={}'.format(signing_msg.error_code, signing_dialogue))
def _handle_invalid(self, signing_msg: SigningMessage, signing_dialogue: SigningDialogue) -> None:
self.context.logger.warning('cannot handle signing message of performative={} in dialogue={}.'.format(signing_msg.performative, signing_dialogue)) |
class TestFileTracker():
def test_track_open(self, tmpdir):
FileTracker.track()
path = tmpdir.join('test').strpath
f = open(path, 'w')
assert (f in FileTracker.get_openfiles())
f.close()
assert (f not in FileTracker.get_openfiles())
def test_track_context_open(self, tmpdir):
FileTracker.track()
path = tmpdir.join('test').strpath
with open(path, 'w') as f:
assert (f in FileTracker.get_openfiles())
assert (f not in FileTracker.get_openfiles())
def test_untrack(self, tmpdir):
FileTracker.track()
FileTracker.untrack()
path = tmpdir.join('test').strpath
f = open(path, 'w')
assert (f not in FileTracker.get_openfiles()) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'id'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'emailfilter_mheader': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['emailfilter_mheader']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['emailfilter_mheader']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'emailfilter_mheader')
(is_error, has_changed, result, diff) = fortios_emailfilter(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
def open_logfile(name):
_format = '%(asctime)s.%(msecs)03d %(name)-10s: %(levelname)-8s: %(message)s'
_datefmt = '%H:%M:%S'
if (config['log_dir'] != None):
filename = (os.path.join(config['log_dir'], name) + '.log')
else:
filename = config['log_file']
logger = logging.getLogger()
for handler in logger.handlers:
logger.removeHandler(handler)
handler.close()
handler = logging.FileHandler(filename, mode='a')
handler.setFormatter(logging.Formatter(_format, _datefmt))
logger.addHandler(handler) |
class OptionSeriesTreemapSonificationDefaultspeechoptionsMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_loop_to_sequence_rule_not_possible_continue():
ast = AbstractSyntaxForest(condition_handler=condition_handler1(LogicCondition.generate_new_context()))
root = ast.factory.create_endless_loop_node()
body = ast.factory.create_seq_node()
children = [ast.factory.create_code_node(stmts=[assignment_c_plus_5.copy()]), ast.factory.create_condition_node(condition=logic_cond('a', ast.factory.logic_context)), ast.factory.create_code_node(stmts=[assignment_c_plus_10.copy(), Break()])]
true_branch = ast.factory.create_true_node()
true_branch_child = ast.factory.create_code_node(stmts=[Continue()])
ast._add_nodes_from([root, body, children[0], children[1], children[2], true_branch, true_branch_child])
ast._add_edges_from([(root, body), (body, children[0]), (body, children[1]), (body, children[2]), (children[1], true_branch), (true_branch, true_branch_child)])
ast._code_node_reachability_graph.add_reachability_from(((children[0], true_branch_child), (children[0], children[2]), (true_branch_child, children[2])))
body.sort_children()
assert (SequenceRule.can_be_applied(root) is False) |
class GaussianMixtureModelTest(unittest.TestCase):
def test_gmm_to_dot(self) -> None:
self.maxDiff = None
queries = [mixed(0)]
observations = {}
observed = BMGInference().to_dot(queries, observations)
expected = '\ndigraph "graph" {\n N00[label="[0.125,0.375,0.5]"];\n N01[label=Categorical];\n N02[label=Sample];\n N03[label=0.0];\n N04[label=1.0];\n N05[label=Normal];\n N06[label=Sample];\n N07[label=Sample];\n N08[label=Sample];\n N09[label=Choice];\n N10[label=2.0];\n N11[label=Normal];\n N12[label=Sample];\n N13[label=Query];\n N00 -> N01;\n N01 -> N02;\n N02 -> N09;\n N03 -> N05;\n N04 -> N05;\n N05 -> N06;\n N05 -> N07;\n N05 -> N08;\n N06 -> N09;\n N07 -> N09;\n N08 -> N09;\n N09 -> N11;\n N10 -> N11;\n N11 -> N12;\n N12 -> N13;\n}\n'
self.assertEqual(expected.strip(), observed.strip()) |
def checkDestinations(tdb, cmdenv, calc):
cmdenv.destinations = None
if cmdenv.destPlace:
if (cmdenv.endJumps and (cmdenv.endJumps > 0)):
cmdenv.destinations = expandForJumps(tdb, cmdenv, calc, cmdenv.destPlace.system, cmdenv.endJumps, '--to', 'destination')
cmdenv.destPlace = None
elif isinstance(cmdenv.destPlace, Station):
cmdenv.DEBUG0('destPlace: Station: {}', cmdenv.destPlace.name())
checkStationSuitability(cmdenv, calc, cmdenv.destPlace, '--to')
cmdenv.destinations = (cmdenv.destPlace,)
else:
cmdenv.DEBUG0('destPlace: System: {}', cmdenv.destPlace.name())
cmdenv.destinations = tuple((station for station in cmdenv.destPlace.stations if checkStationSuitability(cmdenv, calc, station)))
checkForEmptyStationList('--to', cmdenv.destPlace, cmdenv.destinations, cmdenv.endJumps)
else:
if cmdenv.endJumps:
raise CommandLineError('--end-jumps (-e) only works with --to')
cmdenv.DEBUG0('Using all available destinations')
if cmdenv.goalSystem:
dest = tdb.lookupPlace(cmdenv.goalSystem)
cmdenv.goalSystem = dest.system
if (cmdenv.origPlace and (cmdenv.maxJumpsPer == 0)):
stationSrc = chain.from_iterable((system.stations for system in cmdenv.origSystems))
else:
stationSrc = tdb.stationByID.values()
cmdenv.destinations = tuple((station for station in stationSrc if checkStationSuitability(cmdenv, calc, station)))
if ((not cmdenv.endJumps) and isinstance(cmdenv.destPlace, System)):
cmdenv.destinations = filterStationSet('--to', cmdenv, calc, cmdenv.destinations)
cmdenv.destSystems = tuple(set((stn.system for stn in cmdenv.destinations))) |
class FunctionPtrType(BaseFunctionType):
_base_pattern = '(*&)(%s)'
def build_backend_type(self, ffi, finishlist):
result = self.result.get_cached_btype(ffi, finishlist)
args = []
for tp in self.args:
args.append(tp.get_cached_btype(ffi, finishlist))
return global_cache(self, ffi, 'new_function_type', tuple(args), result, self.ellipsis) |
def safe_decode(s, enc='utf-8', errors='strict'):
if isinstance(s, str):
return s
if isinstance(s, bytes):
return s.decode(enc, errors)
if (isinstance(s, int) or isinstance(s, float)):
return str(s)
try:
return safe_decode(bytes(s), enc=enc, errors=errors)
except:
pass
return str(s) |
.AnalysisPluginTestConfig(plugin_class=YaraPlugin)
class TestAnalysisYaraBasePlugin():
def test_get_signature_paths(self, analysis_plugin):
intended_signature_path = os.path.join(get_src_dir(), 'analysis/signatures', analysis_plugin.NAME)
assert isinstance(analysis_plugin.signature_path, str), 'incorrect type'
assert (f"{intended_signature_path.rstrip('/')}.yc" == analysis_plugin.signature_path), 'signature path is wrong'
def test_process_object(self, analysis_plugin):
test_file = FileObject(file_path=os.path.join(get_test_data_dir(), 'yara_test_file'))
test_file.processed_analysis.update({analysis_plugin.NAME: []})
processed_file = analysis_plugin.process_object(test_file)
results = processed_file.processed_analysis[analysis_plugin.NAME]
assert (len(results) == 2), 'not all matches found'
assert ('testRule' in results), 'testRule match not found'
assert (results['summary'] == ['testRule'])
def test_process_object_nothing_found(self, analysis_plugin):
test_file = FileObject(file_path=os.path.join(get_test_data_dir(), 'zero_byte'))
test_file.processed_analysis.update({analysis_plugin.NAME: []})
processed_file = analysis_plugin.process_object(test_file)
assert (len(processed_file.processed_analysis[analysis_plugin.NAME]) == 1), 'result present but should not'
assert (processed_file.processed_analysis[analysis_plugin.NAME]['summary'] == []), 'summary not empty' |
class BaseKTSolver(object):
def __init__(self, KL_temperature, AT_beta):
self.KL_temperature = KL_temperature
self.AT_beta = AT_beta
def update_generator(same_noise, KT_loss_generator_fn, _generator, _opt_generator, _teachers, _students):
x_pseudos = _generator.generate(same_noise=same_noise)
student_logits = [_student(_x) for (_x, _student) in zip(x_pseudos, _students)]
teacher_logits = [_teacher(_x) for (_x, _teacher) in zip(x_pseudos, _teachers)]
if ((len(student_logits) < len(teacher_logits)) and (len(student_logits) == 1)):
student_logits = (student_logits * len(teacher_logits))
generator_losses = [KT_loss_generator_fn(_student_logits, _teacher_logits) for (_student_logits, _teacher_logits) in zip(student_logits, teacher_logits)]
_opt_generator.zero_grad()
generator_avg_loss = (sum(generator_losses) / len(generator_losses))
generator_avg_loss.backward()
torch.nn.utils.clip_grad_norm_(_generator.parameters(), 5)
_opt_generator.step()
return generator_avg_loss.item()
def update_student(same_noise, KT_loss_student_fn, _student, _opt_student, _teachers, _generators, teacher_logits=None, weights=None):
x_pseudos = [_gen.generate(same_noise=same_noise)[0] for _gen in _generators]
if (teacher_logits is None):
with torch.no_grad():
teacher_logits = [_teacher(_x) for (_x, _teacher) in zip(x_pseudos, _teachers)]
if ((len(_teachers) > len(_generators)) and (len(_generators) == 1)):
teacher_logits = (teacher_logits * len(_teachers))
student_logits_activations = [(_student(_x), _student.activations) for _x in x_pseudos]
student_losses = [KT_loss_student_fn(_student_logits, _student_activations, _teacher_logits, _teacher.activations) for ((_student_logits, _student_activations), _teacher_logits, _teacher) in zip(student_logits_activations, teacher_logits, _teachers)]
_opt_student.zero_grad()
weights = (weights if (weights is not None) else ([(1.0 / len(student_losses))] * len(student_losses)))
student_avg_loss = sum([(loss * weight) for (loss, weight) in zip(student_losses, weights)])
student_avg_loss.backward()
torch.nn.utils.clip_grad_norm_(_student.parameters(), 5)
_opt_student.step()
return (student_avg_loss.item(), teacher_logits)
def update_generator_ensemble(KT_loss_generator_fn, server_generator, opt_server_generator, local_teachers, server_student):
x_pseudo = server_generator.generate(same_noise=True)[0]
student_logit = server_student(x_pseudo)
teacher_logit = (sum([teacher(x_pseudo) for teacher in local_teachers]) / len(local_teachers))
generator_loss = KT_loss_generator_fn(student_logit, teacher_logit)
opt_server_generator.zero_grad()
generator_loss.backward()
torch.nn.utils.clip_grad_norm_(server_generator.parameters(), 5)
opt_server_generator.step()
return generator_loss.item()
def update_student_ensemble(KT_loss_student_fn, server_student, opt_server_student, local_teachers, server_generator, teacher_logit=None, teacher_activations=None):
x_pseudo = server_generator.generate(same_noise=True)[0]
if (teacher_logit is None):
with torch.no_grad():
teacher_logit = (sum([teacher(x_pseudo) for teacher in local_teachers]) / len(local_teachers))
student_logit = server_student(x_pseudo)
if (teacher_activations is None):
teacher_activations = [(sum([teacher.activations[idx] for teacher in local_teachers]) / len(local_teachers)) for idx in range(len(local_teachers[0].activations))]
student_loss = KT_loss_student_fn(student_logit, server_student.activations, teacher_logit, teacher_activations)
opt_server_student.zero_grad()
student_loss.backward()
torch.nn.utils.clip_grad_norm_(server_student.parameters(), 5)
opt_server_student.step()
return (student_loss.item(), teacher_logit, teacher_activations)
def attention(self, x):
return F.normalize(x.pow(2).mean(1).view(x.size(0), (- 1)))
def attention_diff(self, x, y):
return (self.attention(x) - self.attention(y)).pow(2).mean()
def divergence(self, student_logits, teacher, use_teacher_logits=True):
divergence = F.kl_div(F.log_softmax((student_logits / self.KL_temperature), dim=1), (F.softmax((teacher / self.KL_temperature), dim=1) if use_teacher_logits else teacher), reduction='batchmean')
return divergence
def KT_loss_generator(self, student_logits, teacher_logits):
divergence_loss = self.divergence(student_logits, teacher_logits)
total_loss = (- divergence_loss)
return total_loss
def KT_loss_student(self, student_logits, student_activations, teacher_logits, teacher_activations):
divergence_loss = self.divergence(student_logits, teacher_logits)
if (self.AT_beta > 0):
at_loss = 0
for i in range(len(student_activations)):
at_loss = (at_loss + (self.AT_beta * self.attention_diff(student_activations[i], teacher_activations[i])))
else:
at_loss = 0
total_loss = (divergence_loss + at_loss)
return total_loss
def check_early_stopping(self, model, model_ind, best_tracker, validated_perf, validated_perfs, perf_index, early_stopping_batches, log_fn=print, best_models=None):
best_tracker.update(perf=validated_perf['top1'], perf_location=perf_index)
if (validated_perfs is not None):
validated_perfs[model_ind].append(validated_perf)
if (best_tracker.is_best and (best_models is not None)):
best_models[model_ind] = copy.deepcopy(model).cpu()
if ((perf_index - best_tracker.get_best_perf_loc) >= early_stopping_batches):
log_fn(f' Meet the early stopping condition (batches={early_stopping_batches}): early stop!! (perf_index={perf_index}, best_perf_loc={best_tracker.get_best_perf_loc}).')
return True
else:
return False
def prepare_model(self, conf, model, device, _is_teacher):
model = model.to(device)
model.save_activations = True
if _is_teacher:
model = agg_utils.modify_model_trainable_status(conf, model, trainable=False)
model.eval()
else:
model = copy.deepcopy(model)
model = agg_utils.modify_model_trainable_status(conf, model, trainable=True)
model.train()
return model |
def _modexp(data: bytes) -> int:
(base_length, exponent_length, modulus_length) = extract_lengths(data)
if (base_length == 0):
return 0
elif (modulus_length == 0):
return 0
base_end_idx = (96 + base_length)
exponent_end_idx = (base_end_idx + exponent_length)
modulus_end_dx = (exponent_end_idx + modulus_length)
modulus_bytes = zpad_right(data[exponent_end_idx:modulus_end_dx], to_size=modulus_length)
modulus = big_endian_to_int(modulus_bytes)
if (modulus == 0):
return 0
base_bytes = zpad_right(data[96:base_end_idx], to_size=base_length)
base = big_endian_to_int(base_bytes)
exponent_bytes = zpad_right(data[base_end_idx:exponent_end_idx], to_size=exponent_length)
exponent = big_endian_to_int(exponent_bytes)
result = pow(base, exponent, modulus)
return result |
class ResourceSerializer(serializers.ModelSerializer):
class Meta():
model = Resource
exclude = ['location']
def to_representation(self, obj):
representation = super().to_representation(obj)
request = self.context['request']
try:
params = request.query_params.dict()
except AttributeError:
params = request.GET.dict()
try:
arrive = params.get('arrive', dt.datetime.today())
arrive = maya.parse(arrive).date
depart = params.get('depart', (arrive + timedelta(days=13)))
depart = maya.parse(depart).date
except ParserError:
arrive = dt.date.today()
depart = (arrive + timedelta(days=13))
availabilities = [{'date': date, 'quantity': quantity} for (date, quantity) in obj.daily_availabilities_within(arrive, depart)]
representation['availabilities'] = availabilities
representation['hasFutureDrftCapacity'] = obj.has_future_drft_capacity()
representation['maxBookingDays'] = obj.location.max_booking_days
return representation |
class TestSig():
payload = 'some important data'
digest = hashlib.sha256(payload.encode('utf-8').rstrip()).hexdigest()
legacy = ('EOS6JWAwA6goJmmAGwQEwbFne8zNxhuVTjgk1aLqVW9efHWhGfvwU', '5JU8RktQ72qFtJyiW3DJ54B2ZY6Ad83HdoGg78Nk8kUNMJEmCUg')
r1 = ('PUB_R1_65vcmkCEJuxQ2rvYxBZSiUGP9FJPaqMfrLyakHduxEULWcBUxW', 'PVT_R1_2sTZXHRWPfgWfn4gTD4bXjVsKRTSYBCekebBgJq1P9SW7ckoXk')
k1 = ('PUB_K1_6ctHgq55Tt4u3ksvDw1jadhC5tytemHs8fHM4YfFVqMe4F8XWU', 'PVT_K1_r9seSVdS9yTRmSXtLrpELLZ5dhbEqr12jLCRg5NJAWr5q8U9o')
def test_legacy(self):
key = EOSKey(self.legacy[1])
sig = key.sign(self.digest)
assert key.verify(sig, self.digest)
def test_r1(self):
key = EOSKey(self.r1[1])
sig = key.sign(self.digest)
assert key.verify(sig, self.digest)
def test_k1(self):
key = EOSKey(self.k1[1])
sig = key.sign(self.digest)
assert key.verify(sig, self.digest) |
def get_set_cell_value_code(new_value, previous_value, zero_next_cell_if_necessary=True):
def get_char(value):
return ('+' if (value > 0) else '-')
offset = (new_value - previous_value)
char = get_char(offset)
is_negative = (offset < 0)
offset = abs(offset)
naive = (char * offset)
def get_abc(offset):
(min_a, min_b, min_c) = (offset, 1, 0)
min_sum = (offset + 1)
left = 1
right = ((offset // 2) - 1)
while (right >= left):
(a, b) = ((left + 1), (right + 1))
c = (offset - (a * b))
curr_sum = ((abs(a) + abs(b)) + abs(c))
if (curr_sum < min_sum):
(min_a, min_b, min_c) = (a, b, c)
min_sum = curr_sum
if ((a * b) > offset):
right -= 1
else:
left += 1
return (min_a, min_b, min_c)
(a, b, c) = get_abc(offset)
looped = '>'
if zero_next_cell_if_necessary:
looped += '[-]'
looped += ('+' * a)
looped += (('[<' + (char * abs(b))) + '>-]')
looped += '<'
looped += (get_char(((- c) if is_negative else c)) * abs(c))
if (len(naive) > len(looped)):
return looped
else:
return naive |
class OptionSeriesTilemapSonificationTracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
(max_attempts=5, wait=2, exceptions=(AssertionError, SpinnakerElbNotFound))
def find_elb(name='', env='', region=''):
LOG.info('Find %s ELB in %s [%s].', name, env, region)
uri = '/applications/{0}/loadBalancers'.format(name)
response = gate_request(uri=uri)
assert response.ok
elb_dns = None
accounts = response.json()
for account in accounts:
if ((account['account'] == env) and (account['region'] == region)):
elb_dns = account['dnsname']
break
else:
raise SpinnakerElbNotFound('Elb for "{0}" in region {1} not found'.format(name, region))
LOG.info('Found: %s', elb_dns)
return elb_dns |
def Translation(model):
class Inner(models.Model):
parent = models.ForeignKey(model, related_name='translations', on_delete=models.CASCADE)
language_code = models.CharField(_('language'), max_length=10, choices=settings.LANGUAGES, default=settings.LANGUAGES[0][0], editable=(len(settings.LANGUAGES) > 1))
class Meta():
unique_together = ('parent', 'language_code')
abstract = True
def short_language_code(self):
return short_language_code(self.language_code)
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
self.parent.purge_translation_cache()
save.alters_data = True
def delete(self, *args, **kwargs):
super().delete(*args, **kwargs)
self.parent.purge_translation_cache()
delete.alters_data = True
return Inner |
_test
def test_parallel_run() -> None:
graph = MyDistributedGraph()
graph.configure(MyDistributedConfig(output_filename=DISTRIBUTED_OUTPUT_FILENAME))
runner = ParallelRunner(graph=graph)
runner.run()
remaining_numbers = {str(i) for i in range(NUM_MESSAGES)}
with open(DISTRIBUTED_OUTPUT_FILENAME, 'r') as output_file:
lines = output_file.readlines()
assert (len(lines) == NUM_MESSAGES)
for line in lines:
message = MyMessage2.fromdict(json.loads(line))
assert (message.str_field in remaining_numbers)
remaining_numbers.remove(message.str_field)
assert (len(remaining_numbers) == 0)
os.remove(DISTRIBUTED_OUTPUT_FILENAME) |
def test_catch_unreachable_node_collection_before() -> None:
t = generate_graph_resources(3)
field(t, 'dr_1', 'ds_1', 'f1').references.append((FieldAddress('dr_2', 'ds_2', 'f1'), None))
field(t, 'dr_1', 'ds_1', 'f1').identity = 'email'
field(t, 'dr_2', 'ds_2', 'f1').references.append((FieldAddress('dr_3', 'ds_3', 'f1'), None))
collection(t, CollectionAddress('dr_1', 'ds_1')).after.add(CollectionAddress('dr_2', 'ds_2'))
collection(t, CollectionAddress('dr_2', 'ds_2')).after.add(CollectionAddress('dr_3', 'ds_3'))
collection(t, CollectionAddress('dr_3', 'ds_3')).after.add(CollectionAddress('dr_1', 'ds_1'))
with pytest.raises(TraversalError):
generate_traversal({'email': 'a'}, *t) |
class ETLDBLinkTable(ETLObjectBase):
def __init__(self, table_name: str, dblink_name: str, data_types: DataTypes, schema_name: str='public') -> None:
self.table_name = table_name
self.schema_name = schema_name
self.dblink_name = dblink_name
self.data_types = data_types
super(ETLDBLinkTable, self).__init__()
def _get_columns(self) -> List[str]:
return [c for c in get_columns(self.table_name, self.schema_name, self.dblink_name) if (c in self.data_types)]
def _get_object_representation(self, custom_predicate: List[dict]=None) -> Composed:
remote_table = SQL('{}.{}').format(Identifier(self.schema_name), Identifier(self.table_name))
remote_sql = SQL('select {columns} from {remote_table}').format(columns=primatives.make_column_list(self.columns), remote_table=remote_table)
if custom_predicate:
predicate = self._custom_predicate(custom_predicate)
remote_sql = (remote_sql + predicate)
return SQL('({})').format(primatives.wrap_dblink_query(self.dblink_name, remote_sql, 'r', self.columns, self.data_types))
def _custom_predicate(self, custom_predicate: List[dict]) -> Composed:
predicate = []
for item in custom_predicate:
if (item.get('op', '') == 'IN'):
predicate.append(((Identifier(item['field']) + SQL(f" {item['op']} ")) + Literal(item['values'])))
elif (item.get('op', '') == 'EQUAL'):
predicate.append(((Identifier(item['field']) + SQL(' = ')) + Literal(item['value'])))
else:
raise NotImplementedError("object_representation_custom_predicate() isn't that complex. Please add new functionality")
return (SQL(' where ') + SQL(' and ').join(predicate)) |
.parametrize('values_flag, expected_result', [(True, [2.0, 1.0, 4.0, 3.0]), (False, [0.0, 0.0, 0.0, 0.0])])
def test_integration_values(values_flag, expected_result):
result = xtgeo.surface_from_file(StringIO('! Example 2x2 grid\_example.zmap HEADER , GRID, 5\n 15, -99999.0000, , 4, 1\n 2, 2, 1.0000, 2.0000, 1.0000, 2.0000\n 0.0000, 0.0000, 0.0000\\n+ Grid data starts after this line\n 1.0000 2.0000 3.0000 4.0000\n'), fformat='zmap', values=values_flag)
assert (result.xmax == 2.0)
assert (result.ymax == 2.0)
assert (result.xinc == 1.0)
assert (result.yinc == 1.0)
assert (list(result.values.data.flatten()) == expected_result) |
def test_query_is_created_properly_with_sort_tuple():
bs = BlogSearch('python search', sort=('category', '-title'))
s = bs.build_search()
assert (s._doc_type == ['user', 'post'])
assert ({'aggs': {'_filter_tags': {'filter': {'match_all': {}}, 'aggs': {'tags': {'terms': {'field': 'tags'}}}}, '_filter_category': {'filter': {'match_all': {}}, 'aggs': {'category': {'terms': {'field': 'category.raw'}}}}}, 'query': {'multi_match': {'fields': ('title^5', 'body'), 'query': 'python search'}}, 'highlight': {'fields': {'body': {}, 'title': {}}}, 'sort': ['category', {'title': {'order': 'desc'}}]} == s.to_dict()) |
def log_tacacsplusaccounting2_filter(data, fos):
vdom = data['vdom']
log_tacacsplusaccounting2_filter_data = data['log_tacacsplusaccounting2_filter']
filtered_data = underscore_to_hyphen(filter_log_tacacsplusaccounting2_filter_data(log_tacacsplusaccounting2_filter_data))
return fos.set('log.tacacs+accounting2', 'filter', data=filtered_data, vdom=vdom) |
def _wells_importer(wells: list[xtgeo.Well], zone: (int | None)=None, resample: (int | None)=1):
dflist = []
maxid = 0
for well in wells:
wp = well.get_zone_interval(zone, resample=resample)
if (wp is not None):
wp['WellName'] = well.name
wp['POLY_ID'] += maxid
maxid = (wp['POLY_ID'].max() + 1)
dflist.append(wp)
if (not dflist):
return {}
dfr = pd.concat(dflist, ignore_index=True)
dfr.reset_index(inplace=True, drop=True)
return {'values': dfr, 'attributes': {'WellName': 'str'}} |
class TestAgentState():
def setup(cls):
cls.agent_address = 'sender_address'
cls.amount_by_currency_id = {'1': 10}
cls.quantities_by_good_id = {'2': 1, '3': 2}
cls.exchange_params_by_currency_id = {'1': 1.0}
cls.utility_params_by_good_id = {'2': 1.0, '3': 1.5}
cls.agent_state = AgentState(cls.agent_address, cls.amount_by_currency_id, cls.exchange_params_by_currency_id, cls.quantities_by_good_id, cls.utility_params_by_good_id)
cls.ledger_id = 'ethereum'
cls.sender_address = cls.agent_address
cls.counterparty_address = 'some_counterparty_address'
cls.tx_amount_by_currency_id = {'1': 10}
cls.tx_quantities_by_good_id = {'2': (- 1), '3': (- 2)}
cls.is_sender_payable_tx_fee = True
cls.nonce = 'some_nonce'
cls.fee_by_currency_id = {'1': 1}
cls.sender_signature = 'some_sender_signature'
cls.counterparty_signature = 'some_counterparty_signature'
cls.transaction_1 = Transaction(cls.ledger_id, cls.sender_address, cls.counterparty_address, cls.tx_amount_by_currency_id, cls.tx_quantities_by_good_id, cls.is_sender_payable_tx_fee, cls.nonce, cls.fee_by_currency_id, cls.sender_signature, cls.counterparty_signature)
cls.amount_by_currency_id_2 = {'1': (- 9)}
cls.quantities_by_good_id_2 = {'2': 1, '3': 2}
cls.transaction_2 = Transaction(cls.ledger_id, cls.sender_address, cls.counterparty_address, cls.amount_by_currency_id_2, cls.quantities_by_good_id_2, cls.is_sender_payable_tx_fee, cls.nonce, cls.fee_by_currency_id, cls.sender_signature, cls.counterparty_signature)
def test_simple_properties(self):
assert (self.agent_state.agent_address == self.agent_address)
assert (self.agent_state.amount_by_currency_id == self.amount_by_currency_id)
assert (self.agent_state.exchange_params_by_currency_id == self.exchange_params_by_currency_id)
assert (self.agent_state.quantities_by_good_id == self.quantities_by_good_id)
assert (self.agent_state.utility_params_by_good_id == self.utility_params_by_good_id)
def test_get_score(self):
assert (self.agent_state.get_score() == (logarithmic_utility(self.utility_params_by_good_id, self.quantities_by_good_id) + linear_utility(self.exchange_params_by_currency_id, self.amount_by_currency_id)))
def test_is_consistent_transaction_succeeds(self):
assert (self.agent_state.is_consistent_transaction(self.transaction_1) is True)
def test_is_consistent_transaction_fails_i(self):
self.transaction_1._sender_address = 'some_sender_address'
assert (self.agent_state.is_consistent_transaction(self.transaction_1) is False)
def test_is_consistent_transaction_fails_ii(self):
self.transaction_1._amount_by_currency_id = {'1': 10, '2': 20}
assert (self.agent_state.is_consistent_transaction(self.transaction_1) is False)
def test_is_consistent_transaction_fails_iii(self):
self.transaction_1._amount_by_currency_id = {'1': 0}
self.transaction_1._quantities_by_good_id = {'2': 0, '3': 0}
assert (self.agent_state.is_consistent_transaction(self.transaction_1) is False)
def test_is_consistent_transaction_fails_iv(self):
self.transaction_1._amount_by_currency_id = {'1': (- 11)}
self.transaction_1._quantities_by_good_id = {'2': 1, '3': 0}
assert (self.agent_state.is_consistent_transaction(self.transaction_1) is False)
def test_is_consistent_transaction_succeeds_iv(self):
self.transaction_1._amount_by_currency_id = {'1': (- 9)}
self.transaction_1._quantities_by_good_id = {'2': 1, '3': 0}
assert (self.agent_state.is_consistent_transaction(self.transaction_1) is True)
def test_is_consistent_transaction_fails_v(self):
self.transaction_1._counterparty_address = self.agent_address
self.transaction_1._sender_address = 'some_sender_address'
self.transaction_1._amount_by_currency_id = {'1': (- 10)}
self.transaction_1._quantities_by_good_id = {'2': 2, '3': 2}
assert (self.agent_state.is_consistent_transaction(self.transaction_1) is False)
def test_is_consistent_transaction_succeeds_v(self):
self.transaction_1._counterparty_address = self.agent_address
self.transaction_1._sender_address = 'some_sender_address'
self.transaction_1._amount_by_currency_id = {'1': (- 10)}
self.transaction_1._quantities_by_good_id = {'2': 1, '3': 2}
assert (self.agent_state.is_consistent_transaction(self.transaction_1) is True)
def test_is_consistent_transaction_fails_vi(self):
self.transaction_1._amount_by_currency_id = {'1': 10}
self.transaction_1._quantities_by_good_id = {'2': (- 2), '3': (- 2)}
assert (self.agent_state.is_consistent_transaction(self.transaction_1) is False)
def test_is_consistent_transaction_succeeds_vi(self):
self.transaction_1._amount_by_currency_id = {'1': 10}
self.transaction_1._quantities_by_good_id = {'2': (- 1), '3': (- 2)}
assert (self.agent_state.is_consistent_transaction(self.transaction_1) is True)
def test_is_consistent_transaction_fails_vii(self):
self.transaction_1._counterparty_address = self.agent_address
self.transaction_1._sender_address = 'some_sender_address'
self.transaction_1._amount_by_currency_id = {'1': 11}
self.transaction_1._quantities_by_good_id = {'2': (- 1), '3': (- 2)}
assert (self.agent_state.is_consistent_transaction(self.transaction_1) is False)
def test_is_consistent_transaction_succeeds_vii(self):
self.transaction_1._counterparty_address = self.agent_address
self.transaction_1._sender_address = 'some_sender_address'
self.transaction_1._amount_by_currency_id = {'1': 9}
self.transaction_1._quantities_by_good_id = {'2': (- 1), '3': (- 2)}
assert (self.agent_state.is_consistent_transaction(self.transaction_1) is True)
def test_is_consistent_transaction_fails_viii(self):
self.transaction_1._amount_by_currency_id = {'1': (- 11)}
self.transaction_1._quantities_by_good_id = {'2': (- 1), '3': (- 2)}
assert (self.agent_state.is_consistent_transaction(self.transaction_1) is False)
def test_apply(self):
new_agent_state = self.agent_state.apply([self.transaction_1, self.transaction_2])
assert (new_agent_state.amount_by_currency_id == {'1': 11})
assert (new_agent_state.quantities_by_good_id == {'2': 1, '3': 2})
def test_update_sender_i(self):
self.agent_state.update(self.transaction_1)
assert (self.agent_state.amount_by_currency_id == {'1': 20})
assert (self.agent_state.quantities_by_good_id == {'2': 0, '3': 0})
def test_update_sender_ii(self):
self.agent_state.update(self.transaction_2)
assert (self.agent_state.amount_by_currency_id == {'1': 1})
assert (self.agent_state.quantities_by_good_id == {'2': 2, '3': 4})
def test_update_counterparty_i(self):
self.transaction_1._sender_address = 'some_sender_address'
self.transaction_1._counterparty_address = self.agent_address
self.agent_state.update(self.transaction_1)
assert (self.agent_state.amount_by_currency_id == {'1': 0})
assert (self.agent_state.quantities_by_good_id == {'2': 2, '3': 4})
def test_update_counterparty_ii(self):
self.transaction_2._sender_address = 'some_sender_address'
self.transaction_2._counterparty_address = self.agent_address
self.agent_state.update(self.transaction_2)
assert (self.agent_state.amount_by_currency_id == {'1': 19})
assert (self.agent_state.quantities_by_good_id == {'2': 0, '3': 0})
def test__copy__(self):
new_agent_state = self.agent_state.__copy__()
assert (new_agent_state == self.agent_state)
def test__str__(self):
agent_state_str = self.agent_state.__str__()
assert (agent_state_str == 'AgentState{}'.format(pprint.pformat({'agent_address': self.agent_state.agent_address, 'amount_by_currency_id': self.agent_state.amount_by_currency_id, 'exchange_params_by_currency_id': self.agent_state.exchange_params_by_currency_id, 'quantities_by_good_id': self.agent_state.quantities_by_good_id, 'utility_params_by_good_id': self.agent_state.utility_params_by_good_id})))
def test__eq__(self):
another_agent_state = AgentState(self.agent_address, self.amount_by_currency_id, self.exchange_params_by_currency_id, self.quantities_by_good_id, self.utility_params_by_good_id)
assert (self.agent_state.__eq__(another_agent_state) is True) |
class OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class CoinDisplay(Boxes):
ui_group = 'Misc'
def __init__(self) -> None:
Boxes.__init__(self)
self.addSettingsArgs(edges.FingerJointSettings)
self.buildArgParser('x', 'y', 'h', 'outside')
self.argparser.add_argument('--coin_d', action='store', type=float, default=20.0, help='The diameter of the coin in mm')
self.argparser.add_argument('--coin_plate', action='store', type=float, default=50.0, help='The size of the coin plate')
self.argparser.add_argument('--coin_showcase_h', action='store', type=float, default=50.0, help='The height of the coin showcase piece')
self.argparser.add_argument('--angle', action='store', type=float, default=30, help='The angle that the coin will tilt as')
def bottomHoles(self):
self.fingerHolesAt(((((self.x / 2) - self.thickness) - (self.thickness / 2)) - (self.coin_plate / 2)), (((self.y / 2) + (self.coin_plate_x / 2)) - self.thickness), self.coin_plate_x, (- 90))
self.fingerHolesAt(((((self.x / 2) - self.thickness) + (self.thickness / 2)) + (self.coin_plate / 2)), (((self.y / 2) + (self.coin_plate_x / 2)) - self.thickness), self.coin_plate_x, (- 90))
self.fingerHolesAt((((self.x / 2) - (self.coin_plate / 2)) - self.thickness), (((self.y / 2) - (self.coin_plate_x / 2)) - (self.thickness * 1.5)), self.coin_plate, 0)
def coinCutout(self):
self.hole((self.coin_plate / 2), (self.coin_plate / 2), (self.coin_d / 2))
def render(self):
(x, y, h) = (self.x, self.y, self.h)
if self.outside:
x = self.adjustSize(x)
y = self.adjustSize(y)
h = self.adjustSize(h)
t = self.thickness
d2 = edges.Bolts(2)
d3 = edges.Bolts(3)
d2 = d3 = None
self.addPart(CoinHolderSideEdge(self, self))
self.angle = math.radians(self.angle)
self.coin_plate_x = (self.coin_plate * math.cos(self.angle))
self.rectangularWall(x, h, 'FFFF', bedBolts=([d2] * 4), move='right', label='Wall 1')
self.rectangularWall(y, h, 'FfFf', bedBolts=[d3, d2, d3, d2], move='up', label='Wall 2')
self.rectangularWall(y, h, 'FfFf', bedBolts=[d3, d2, d3, d2], label='Wall 4')
self.rectangularWall(x, h, 'FFFF', bedBolts=([d2] * 4), move='left up', label='Wall 3')
self.rectangularWall(x, y, 'ffff', bedBolts=[d2, d3, d2, d3], move='right', label='Top')
self.rectangularWall(x, y, 'ffff', bedBolts=[d2, d3, d2, d3], move='right', label='Bottom', callback=[self.bottomHoles])
e = ['f', 'f', 'B', 'e']
self.rectangularWall(self.coin_plate_x, self.coin_showcase_h, e, move='right', label='CoinSide1')
self.rectangularWall(self.coin_plate_x, self.coin_showcase_h, e, move='right', label='CoinSide2')
self.rectangularWall(self.coin_plate, self.coin_plate, 'efef', move='left down', label='Coin Plate Base')
self.rectangularWall(self.coin_plate, self.coin_plate, 'efef', move='down', label='Coin Plate', callback=[self.coinCutout])
self.rectangularWall(self.coin_plate, self.coin_showcase_h, 'fFeF', move='down', label='CoinSide3') |
class PluginOverride(_common.FlyteIdlEntity):
FAIL = _matchable_resource.PluginOverride.FAIL
USE_DEFAULT = _matchable_resource.PluginOverride.USE_DEFAULT
def string_to_enum(cls, val):
if (val == 'FAIL'):
return cls.FAIL
elif (val == 'USE_DEFAULT'):
return cls.USE_DEFAULT
else:
return '<UNKNOWN>'
def __init__(self, task_type, plugin_id, missing_plugin_behavior):
self._task_type = task_type
self._plugin_id = plugin_id
self._missing_plugin_behavior = missing_plugin_behavior
def task_type(self):
return self._task_type
def plugin_id(self):
return self._plugin_id
def missing_plugin_behavior(self):
return self._missing_plugin_behavior
def to_flyte_idl(self):
return _matchable_resource.PluginOverride(task_type=self.task_type, plugin_id=self.plugin_id, missing_plugin_behavior=self.missing_plugin_behavior)
def from_flyte_idl(cls, pb2_object):
return cls(task_type=pb2_object.task_type, plugin_id=pb2_object.plugin_id, missing_plugin_behavior=pb2_object.missing_plugin_behavior) |
class Order(db.Model):
__tablename__ = 'orders'
class Status():
INITIALIZING = 'initializing'
PENDING = 'pending'
COMPLETED = 'completed'
CANCELLED = 'cancelled'
EXPIRED = 'expired'
id = db.Column(db.Integer, primary_key=True)
identifier = db.Column(db.String, unique=True, default=get_new_id)
amount = db.Column(db.Float, nullable=False, default=0)
address = db.Column(db.String)
city = db.Column(db.String)
state = db.Column(db.String)
country = db.Column(db.String)
zipcode = db.Column(db.String)
company = db.Column(db.String)
tax_business_info = db.Column(db.String)
user_id = db.Column(db.Integer, db.ForeignKey('users.id', ondelete='SET NULL'))
event_id = db.Column(db.Integer, db.ForeignKey('events.id', ondelete='SET NULL'))
marketer_id = db.Column(db.Integer, db.ForeignKey('users.id', ondelete='SET NULL'))
created_at = db.Column(db.DateTime(timezone=True), default=func.now())
completed_at = db.Column(db.DateTime(timezone=True), nullable=True, default=None)
trashed_at = db.Column(db.DateTime(timezone=True), nullable=True, default=None)
transaction_id = db.Column(db.String)
paid_via = db.Column(db.String)
payment_mode = db.Column(db.String)
is_billing_enabled = db.Column(db.Boolean, nullable=False, default=False)
brand = db.Column(db.String)
exp_month = db.Column(db.Integer)
exp_year = db.Column(db.Integer)
last4 = db.Column(db.String)
stripe_token = db.Column(db.String)
stripe_payment_intent_id = db.Column(db.String)
paypal_token = db.Column(db.String)
status = db.Column(db.String, default='initializing')
cancel_note = db.Column(db.String, nullable=True)
order_notes = db.Column(db.String)
tickets_pdf_url = db.Column(db.String)
discount_code_id = db.Column(db.Integer, db.ForeignKey('discount_codes.id', ondelete='SET NULL'), nullable=True, default=None)
discount_code = db.relationship('DiscountCode', backref='orders')
access_code_id = db.Column(db.Integer, db.ForeignKey('access_codes.id', ondelete='SET NULL'), nullable=True, default=None)
access_code = db.relationship('AccessCode', backref='orders')
event = db.relationship('Event', backref='orders')
user = db.relationship('User', backref='orders', foreign_keys=[user_id])
marketer = db.relationship('User', backref='marketed_orders', foreign_keys=[marketer_id])
tickets = db.relationship('Ticket', secondary='orders_tickets', backref='order')
order_tickets = db.relationship('OrderTicket', backref='order')
def __repr__(self):
return ('<Order %r>' % self.id)
def get_invoice_number(self):
return ((('O' + str(int(time.mktime(self.created_at.timetuple())))) + '-') + str(self.id))
def invoice_number(self):
return self.get_invoice_number()
def tickets_count(self):
return sum((t.quantity for t in self.order_tickets))
def is_free(self):
return (self.payment_mode == 'free')
def get_revenue(self):
if self.amount:
return (self.amount - min((self.amount * (self.event.fee / 100.0)), self.event.maximum_fee))
return 0.0
def populate_and_save(self) -> None:
from app.api.orders import save_order
save_order(self)
def is_attendee(self, user) -> bool:
return db.session.query(TicketHolder.query.filter_by(order_id=self.id, user=user).exists()).scalar()
def ticket_pdf_path(self) -> str:
key = UPLOAD_PATHS['pdf']['tickets_all'].format(identifier=self.identifier, extra_identifier=self.identifier)
return f'generated/tickets/{key}/{generate_hash(key)}/{self.identifier}.pdf'
def invoice_pdf_path(self) -> str:
key = UPLOAD_PATHS['pdf']['order'].format(identifier=self.identifier)
return f'generated/invoices/{key}/{generate_hash(key)}/{self.identifier}.pdf'
def filtered_ticket_holders(self):
from app.api.helpers.permission_manager import has_access
query_ = TicketHolder.query.filter_by(order_id=self.id, deleted_at=None)
if ((not has_access('is_coorganizer', event_id=self.event_id)) and (current_user.id != self.user_id)):
query_ = query_.filter((TicketHolder.user == current_user))
return query_.all()
def safe_user(self):
from app.api.helpers.permission_manager import has_access
if ((not has_access('is_coorganizer', event_id=self.event_id)) and (current_user.id != self.user_id)):
return None
return self.user
def site_view_link(self) -> str:
frontend_url = get_settings()['frontend_url']
return (((frontend_url + '/orders/') + self.identifier) + '/view') |
def Main(argv):
args = ParseArgs(argv)
(new_vars, new_deps) = ParseDepsFile(args.dart_deps)
(old_vars, old_deps) = ParseDepsFile(args.flutter_deps)
updated_vars = {}
for (k, v) in sorted(old_vars.items()):
if ((k not in ('dart_revision', 'dart_git')) and k.startswith('dart_')):
dart_key = k[len('dart_'):]
if (dart_key in new_vars):
updated_revision = (new_vars[dart_key].lstrip('') if (dart_key in new_vars) else v)
updated_vars[k] = updated_revision
updatedfilename = (args.flutter_deps + '.new')
updatedfile = open(updatedfilename, 'w')
file = open(args.flutter_deps)
lines = file.readlines()
i = 0
while (i < len(lines)):
updatedfile.write(lines[i])
if lines[i].startswith(" 'dart_revision':"):
i = (i + 2)
updatedfile.writelines(['\n', ' # WARNING: DO NOT EDIT MANUALLY\n', ' # The lines between blank lines above and below are generated by a script. See create_updated_flutter_deps.py\n'])
while ((i < len(lines)) and (len(lines[i].strip()) > 0)):
i = (i + 1)
for (k, v) in sorted(updated_vars.items()):
updatedfile.write((" '%s': '%s',\n" % (k, v)))
updatedfile.write('\n')
elif lines[i].startswith(' # WARNING: Unused Dart dependencies'):
updatedfile.write('\n')
i = (i + 1)
while ((i < len(lines)) and (lines[i].startswith(' # WARNING: end of dart dependencies') == 0)):
i = (i + 1)
for (k, v) in sorted(old_deps.items()):
if k.startswith('src/third_party/dart/'):
for (dart_k, dart_v) in list(new_deps.items()):
dart_k_suffix = dart_k[(len('sdk/') if dart_k.startswith('sdk/') else 0):]
if k.endswith(dart_k_suffix):
if isinstance(dart_v, str):
updated_value = dart_v.replace(new_vars['dart_git'], "Var('dart_git') + '/")
updated_value = updated_value.replace(old_vars['chromium_git'], "Var('chromium_git') + '")
plain_v = dart_k[(dart_k.rfind('/') + 1):]
if (plain_v == 'quiver'):
plain_v = 'quiver-dart'
if ((('dart_' + plain_v) + '_tag') in updated_vars):
updated_value = (((updated_value[:updated_value.rfind('')] + "' + '' + Var('dart_") + plain_v) + "_tag')")
elif ((('dart_' + plain_v) + '_rev') in updated_vars):
updated_value = (((updated_value[:updated_value.rfind('')] + "' + '' + Var('dart_") + plain_v) + "_rev')")
else:
updated_value = (updated_value + "'")
else:
updated_value = dict(sorted(dart_v.items()))
updatedfile.write((" '%s':\n %s,\n\n" % (k, updated_value)))
break
updatedfile.write(lines[i])
i = (i + 1)
os.remove(args.flutter_deps)
os.rename(updatedfilename, args.flutter_deps)
return 0 |
class NameHeaderModelNestedBluePrint(SegmentedModelNestedBluePrint):
def __init__(self, *args, header_model: Model, merge_raw_authors: bool, **kwargs):
super().__init__(*args, **kwargs)
self.header_model = header_model
self.merge_raw_authors = merge_raw_authors
def iter_filter_layout_document(self, layout_document: LayoutDocument) -> Iterable[LayoutDocument]:
header_layout_document = self.filter_layout_document_by_segmentation_label(layout_document, '<header>')
labeled_layout_tokens = self.header_model.predict_labels_for_layout_document(header_layout_document, app_features_context=self.app_features_context)
LOGGER.debug('labeled_layout_tokens: %r', labeled_layout_tokens)
semantic_raw_authors_list = list(SemanticMixedContentWrapper(list(self.header_model.iter_semantic_content_for_labeled_layout_tokens(labeled_layout_tokens))).iter_by_type(SemanticRawAuthors))
LOGGER.info('semantic_raw_authors_list count: %d', len(semantic_raw_authors_list))
LOGGER.info('merge_raw_authors: %s', self.merge_raw_authors)
if self.merge_raw_authors:
return [LayoutDocument.for_blocks([block for semantic_raw_authors in semantic_raw_authors_list for block in semantic_raw_authors.iter_blocks()]).remove_empty_blocks()]
return [LayoutDocument.for_blocks(list(semantic_raw_authors.iter_blocks())).remove_empty_blocks() for semantic_raw_authors in semantic_raw_authors_list] |
class Task(object):
class State(IntEnum):
RUNNING = 1
COMPLETE = 2
CANCELED = 3
INTERRUPTED = 4
attr = collections.namedtuple('TaskAttributes', ['state', 'process_name', 'schedule_name', 'start_time', 'end_time', 'exit_code'])
__slots__ = ['task_id', 'process_name', 'schedule_name', 'state', 'cancel_requested', 'start_time', 'end_time', 'state', 'exit_code', 'reason', 'schedule_id']
def __init__(self):
self.task_id = None
self.process_name = None
self.schedule_name = None
self.schedule_id = None
self.reason = None
self.state = None
self.cancel_requested = None
self.start_time = None
self.end_time = None
self.exit_code = None |
def test_channels_with_same_id(assert_info):
fname = '2-channels-same-content-diff-sign.dlis'
fpath = ('data/chap4-7/eflr/frames-and-channels/' + fname)
with dlis.load(fpath) as (f, *_):
ch1 = f.object('CHANNEL', 'SAME', 0, 1)
ch2 = f.object('CHANNEL', 'SAME', 0, 2)
fr1 = f.object('FRAME', 'FRAME_SAME1')
fr2 = f.object('FRAME', 'FRAME_SAME2')
assert (ch1.frame == fr1)
assert (ch2.frame == fr2) |
class EvColumn():
def __init__(self, *args, **kwargs):
self.options = kwargs
self.column = [EvCell(data, **kwargs) for data in args]
def _balance(self, **kwargs):
col = self.column
kwargs.update(self.options)
if ('width' not in kwargs):
[cell.reformat() for cell in col]
kwargs['width'] = (max((cell.get_width() for cell in col)) if col else 0)
[cell.reformat(**kwargs) for cell in col]
def add_rows(self, *args, **kwargs):
options = {**kwargs, **self.options}
ypos = kwargs.get('ypos', None)
if ((ypos is None) or (ypos > len(self.column))):
self.column.extend([EvCell(data, **options) for data in args])
else:
ypos = min((len(self.column) - 1), max(0, int(ypos)))
new_cells = [EvCell(data, **options) for data in args]
self.column = ((self.column[:ypos] + new_cells) + self.column[ypos:])
def reformat(self, **kwargs):
self._balance(**kwargs)
def reformat_cell(self, index, **kwargs):
kwargs.update(self.options)
self.column[index].reformat(**kwargs)
def __repr__(self):
return ('<EvColumn\n %s>' % '\n '.join([repr(cell) for cell in self.column]))
def __len__(self):
return len(self.column)
def __iter__(self):
return iter(self.column)
def __getitem__(self, index):
return self.column[index]
def __setitem__(self, index, value):
self.column[index] = value
def __delitem__(self, index):
del self.column[index] |
class BigQueryConnector(SQLConnector):
secrets_schema = BigQuerySchema
def build_uri(self) -> str:
config = self.secrets_schema(**(self.configuration.secrets or {}))
dataset = (f'/{config.dataset}' if config.dataset else '')
return f'bigquery://{config.keyfile_creds.project_id}{dataset}'
def create_client(self) -> Engine:
secrets = (self.configuration.secrets or {})
uri = (secrets.get('url') or self.build_uri())
keyfile_creds = secrets.get('keyfile_creds', {})
credentials_info = (dict(keyfile_creds) if keyfile_creds else {})
return create_engine(uri, credentials_info=credentials_info, hide_parameters=self.hide_parameters, echo=(not self.hide_parameters))
def query_config(self, node: TraversalNode) -> BigQueryQueryConfig:
return BigQueryQueryConfig(node)
def mask_data(self, node: TraversalNode, policy: Policy, privacy_request: PrivacyRequest, rows: List[Row], input_data: Dict[(str, List[Any])]) -> int:
query_config = self.query_config(node)
update_ct = 0
client = self.client()
for row in rows:
update_stmt: Optional[Executable] = query_config.generate_update(row, policy, privacy_request, client)
if (update_stmt is not None):
with client.connect() as connection:
results: LegacyCursorResult = connection.execute(update_stmt)
update_ct = (update_ct + results.rowcount)
return update_ct |
class HideableQuery(BaseQuery):
_with_hidden = False
def __new__(cls, *args, **kwargs):
obj = super(HideableQuery, cls).__new__(cls)
include_hidden = kwargs.pop('_with_hidden', False)
has_view_hidden = (current_user and current_user.permissions.get('viewhidden', False))
obj._with_hidden = (include_hidden or has_view_hidden)
if (args or kwargs):
super(HideableQuery, obj).__init__(*args, **kwargs)
return (obj.filter_by(hidden=False) if (not obj._with_hidden) else obj)
return obj
def __init__(self, *args, **kwargs):
pass
def with_hidden(self):
return self.__class__(self._only_full_mapper_zero('get'), session=db.session(), _with_hidden=True)
def _get(self, *args, **kwargs):
return super(HideableQuery, self).get(*args, **kwargs)
def get(self, *args, **kwargs):
obj = self.with_hidden()._get(*args, **kwargs)
return (obj if ((obj is None) or self._with_hidden or (not obj.hidden)) else None) |
def extractSlothTranslationsBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if item['title'].startswith('Re:Master Magic '):
return buildReleaseMessageWithType(item, 'The Mage Will Master Magic Efficiently In His Second Life', vol, chp, frag=frag, postfix=postfix)
if item['title'].lower().startswith('blacksmith chapter '):
return buildReleaseMessageWithType(item, 'Botsuraku youtei nanode, Kajishokunin wo mezasu', vol, chp, frag=frag, postfix=postfix)
if item['title'].lower().startswith('evil lord'):
return buildReleaseMessageWithType(item, 'Im the Evil Lord of an Intergalactic Empire!', vol, chp, frag=frag, postfix=postfix)
return False |
def test_serialization_negative():
tx_msg = RegisterMessage(performative=RegisterMessage.Performative.REGISTER, info={})
with patch.object(RegisterMessage.Performative, '__eq__', return_value=False):
with pytest.raises(ValueError, match=f'Performative not valid: {tx_msg.performative}'):
tx_msg.serializer.encode(tx_msg)
encoded_tx_bytes = tx_msg.serializer.encode(tx_msg)
with patch.object(RegisterMessage.Performative, '__eq__', return_value=False):
with pytest.raises(ValueError, match=f'Performative not valid: {tx_msg.performative}'):
tx_msg.serializer.decode(encoded_tx_bytes) |
def test_get_resource_path_from_resources(monkeypatch):
def mock_resources_files(package):
return Path('/fake/path')
monkeypatch.setattr('importlib.resources.files', mock_resources_files)
package = 'readmeai'
resource_name = 'data.txt'
expected = (Path('/fake/path') / resource_name)
actual = get_resource_path(package, resource_name)
assert (actual == expected) |
def milliseconds_to_tc(milliseconds):
hours = int((milliseconds / 3600000))
residual_minutes = (milliseconds - (hours * 3600000))
minutes = int((residual_minutes / 60000))
residual_seconds = (residual_minutes - (minutes * 60000))
seconds = int((residual_seconds / 1000))
residual_milliseoncds = (residual_seconds - (seconds * 1000))
milliseconds = int(residual_milliseoncds)
return ('%02i:%02i:%02i.%03i' % (hours, minutes, seconds, milliseconds)) |
def generate_pdf_thumbnail(path, blob=None):
if (blob is None):
with subprocess.Popen(['gs', '-sDEVICE=jpeg', '-r72x72', '-o%stdout%', '-q', '-dFirstPage=1', '-dLastPage=1', '-dUseCropBox', '-dJPEQ=30', path], stdout=subprocess.PIPE) as gs:
try:
(outs, errs) = gs.communicate(timeout=60)
except subprocess.TimeoutExpired:
print('expired')
gs.kill()
(outs, errs) = gs.communicate()
print(outs, errs)
return None
with Image(format='jpg:file.jpg', blob=outs) as i:
with i.convert('webp') as first_page:
width = first_page.width
height = first_page.height
ratio = (100.0 / (width * 1.0))
new_height = int((ratio * height))
first_page.thumbnail(width=100, height=new_height)
return first_page.data_url()
else:
with subprocess.Popen(['gs', '-sDEVICE=jpeg', '-r72x72', '-o%stdout%', '-q', '-dFirstPage=1', '-dLastPage=1', '-dUseCropBox', '-dJPEQ=30', '%stdin%'], stdin=subprocess.PIPE, stdout=subprocess.PIPE) as gs:
try:
(outs, errs) = gs.communicate(input=blob, timeout=120)
except subprocess.TimeoutExpired:
print('expired')
gs.kill()
(outs, errs) = gs.communicate()
print(outs, errs)
return None
with Image(format='jpg:file.jpg', blob=outs) as i:
with i.convert('webp') as first_page:
width = first_page.width
height = first_page.height
ratio = (100.0 / (width * 1.0))
new_height = int((ratio * height))
first_page.thumbnail(width=100, height=new_height)
return first_page.data_url() |
(frozen=True)
class KubernetesGVK():
api_version: str
kind: str
def api_group(self) -> Optional[str]:
try:
return self.api_version.split('/', 1)[(- 2)]
except IndexError:
return None
def version(self) -> str:
return self.api_version.split('/', 1)[(- 1)]
def domain(self) -> str:
if self.api_group:
return f'{self.kind.lower()}.{self.api_group}'
else:
return self.kind.lower()
def for_ambassador(cls, kind: str, version: str='v2') -> KubernetesGVK:
if ('alpha' in version):
return cls(f'getambassador.io/{version}', kind)
else:
return cls(f'getambassador.io/{version}', kind)
def for_knative_networking(cls, kind: str) -> KubernetesGVK:
return cls('networking.internal.knative.dev/v1alpha1', kind) |
class TestFullTextProcessorConfig():
.parametrize('field_name,value', [('merge_raw_authors', False), ('merge_raw_authors', True)])
def test_should_override_default_from_app_config(self, field_name: str, value: bool):
config = FullTextProcessorConfig.from_app_config(app_config=AppConfig(props={'processors': {'fulltext': {field_name: value}}}))
assert (getattr(config, field_name) is value)
def test_should_ignore_empty_requested_field_names(self):
config = EXTRACT_ALL_FULLTEXT_CONFIG.get_for_requested_field_names(set())
assert (config == EXTRACT_ALL_FULLTEXT_CONFIG)
def test_should_configure_only_header_extraction(self):
config = EXTRACT_ALL_FULLTEXT_CONFIG.get_for_requested_field_names({RequestFieldNames.TITLE, RequestFieldNames.ABSTRACT, RequestFieldNames.AUTHORS, RequestFieldNames.AFFILIATIONS})
assert config.extract_front
assert config.extract_authors
assert config.extract_affiliations
assert (not config.extract_body_sections)
assert (not config.extract_acknowledgements)
assert (not config.extract_back_sections)
assert (not config.extract_references)
def test_should_configure_extract_references(self):
config = EXTRACT_ALL_FULLTEXT_CONFIG.get_for_requested_field_names({RequestFieldNames.REFERENCES})
assert (not config.extract_front)
assert (not config.extract_authors)
assert (not config.extract_affiliations)
assert (not config.extract_body_sections)
assert (not config.extract_acknowledgements)
assert (not config.extract_back_sections)
assert config.extract_references
def test_should_treat_unknown_field_names_as_no_change(self):
config = EXTRACT_ALL_FULLTEXT_CONFIG.get_for_requested_field_names({'other'})
assert (config == EXTRACT_ALL_FULLTEXT_CONFIG)
def test_should_configure_only_header_extraction_using_get_for_header_document(self):
config = EXTRACT_ALL_FULLTEXT_CONFIG.get_for_header_document()
assert config.extract_front
assert config.extract_authors
assert config.extract_affiliations
assert (not config.extract_body_sections)
assert (not config.extract_acknowledgements)
assert (not config.extract_back_sections)
assert (not config.extract_references)
assert (not config.extract_graphic_bounding_boxes) |
class BaseMetricsService(Service, MetricsServiceAPI):
MIN_SECONDS_BETWEEN_ERROR_LOGS = 60
def __init__(self, influx_server: str, influx_user: str, influx_password: str, influx_database: str, host: str, port: int, protocol: str, reporting_frequency: int) -> None:
self._unreported_error: Exception = None
self._last_time_reported: float = 0.0
self._influx_server = influx_server
self._reporting_frequency = reporting_frequency
self._registry = HostMetricsRegistry(host)
self.reporter = ExtendedInfluxReporter(database=influx_database, username=influx_user, password=influx_password, protocol=protocol, port=port, server=self._influx_server, registry=self._registry, async_post=self.async_post)
logger = get_logger('trinity.components.builtin.metrics.MetricsService')
def registry(self) -> HostMetricsRegistry:
return self._registry
async def send_annotation(self, annotation_data: str) -> None:
try:
(await self.reporter.send_annotation(annotation_data))
except (HTTPException, ConnectionError) as exc:
self.logger.warning('Unable to report annotations: %s', exc)
async def run(self) -> None:
self.logger.info('Reporting metrics to %s', self._influx_server)
self.manager.run_daemon_task(self.continuously_report)
(await self.manager.wait_finished())
async def report_now(self) -> None:
try:
(await self.reporter.report_metrics())
except (HTTPException, ConnectionError) as exc:
if self._is_justified_to_log_error():
self._log_and_clear(exc)
else:
self._unreported_error = exc
else:
if ((self._unreported_error is not None) and self._is_justified_to_log_error()):
self._log_and_clear(self._unreported_error)
def _log_and_clear(self, error: Exception) -> None:
self.logger.warning('Unable to report metrics: %s', error)
self._unreported_error = None
self._last_time_reported = time.monotonic()
def _is_justified_to_log_error(self) -> bool:
return ((self._last_time_reported == 0.0) or ((time.monotonic() - self._last_time_reported) > self.MIN_SECONDS_BETWEEN_ERROR_LOGS))
async def continuously_report(self) -> None:
...
async def async_post(self, data: str) -> None:
... |
def get_hmmer_alignments(hmmer_path, mali_root):
hmmer_df = parse_hmmer_text(hmmer_path)
hmmer_df = hmmer_df.groupby(['query_id', 'hit_id']).apply(hit_argmax)
manual = read_mali(mali_root, tool='manual', report_ids=True)
idx = set(map(tuple, manual[['query_id', 'hit_id']].values))
bidx = set(list(hmmer_df.index))
idx = list((set(idx) & set(bidx)))
hmmer_df = hmmer_df.loc[idx]
states = list(map(state_f, zip(list(hmmer_df['query_string']), list(hmmer_df['hit_string']))))
states = ''.join(list(map(revstate_f, states)))
hmmer_df['aln'] = states
return hmmer_df |
class OptionPlotoptionsSunburstSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class FutureGroup():
args: argparse.Namespace
fal_dbt: FalDbt
task_group: TaskGroup
executor: Executor
futures: List[Future] = field(default_factory=list)
status: int = SUCCESS
state: Optional[State] = None
def __post_init__(self) -> None:
if self.task_group.pre_hooks:
self.switch_to(State.PRE_HOOKS)
else:
self.switch_to(State.MAIN_TASK)
def switch_to(self, group_status: State) -> None:
self.state = group_status
if (group_status is State.PRE_HOOKS):
self._add_tasks(*self.task_group.pre_hooks)
elif (group_status is State.MAIN_TASK):
self._add_tasks(self.task_group.task)
else:
assert (group_status is State.POST_HOOKS)
self._add_tasks(*self.task_group.post_hooks)
def process(self, future: Future) -> None:
assert (future in self.futures)
self.futures.remove(future)
self.status |= future.result()
if self.futures:
return None
if (self.state is State.PRE_HOOKS):
if (self.status == SUCCESS):
self.switch_to(State.MAIN_TASK)
else:
self.switch_to(State.POST_HOOKS)
elif (self.state is State.MAIN_TASK):
self.switch_to(State.POST_HOOKS)
else:
assert (self.state is State.POST_HOOKS)
return None
def _add_tasks(self, *tasks: Task) -> None:
for task in tasks:
future = self.executor.submit(task.execute, args=self.args, fal_dbt=self.fal_dbt)
(future.task, future.group) = (task, self)
self.futures.append(future)
def is_done(self) -> int:
return (len(self.futures) == 0) |
def control():
global command, action, gripper_value, is_enable, is_zero
while True:
if (is_enable and is_zero):
if (action == 1):
command[0] += 2
if (command[0] > 285):
command[0] = 280
print('x + coords---->:', command[0])
mc.send_coord(1, command[0], speed)
elif (action == 2):
command[0] -= 2
if (command[0] < (- 285)):
command[0] = (- 280)
print('x - coords---->:', command[0])
mc.send_coord(1, command[0], speed)
elif (action == 3):
command[1] += 2
if (command[1] > 285):
command[1] = 280
print('y + coords---->:', command[1])
mc.send_coord(2, command[1], speed)
elif (action == 4):
command[1] -= 2
if (command[1] < (- 285)):
command[1] = (- 280)
print('y - coords---->:', command[1])
mc.send_coord(2, command[1], speed)
elif (action == 5):
command[2] += 2
if (command[2] > 450):
command[2] = 445
mc.send_coord(3, command[2], speed)
elif (action == 6):
command[2] -= 2
if (command[2] < (- 120)):
command[2] = (- 118)
mc.send_coord(3, command[2], speed)
elif (action == 7):
if (mc.is_all_servo_enable() != 1):
mc.set_color(0, 0, 0)
time.sleep(0.5)
mc.set_color(255, 0, 0)
time.sleep(0.5)
mc.set_color(0, 0, 0)
time.sleep(0.5)
mc.set_color(255, 0, 0)
time.sleep(0.5)
mc.set_color(0, 0, 0)
time.sleep(0.5)
mc.set_color(255, 0, 0)
is_enable = False
else:
mc.set_color(0, 0, 0)
time.sleep(0.5)
mc.set_color(0, 255, 0)
time.sleep(0.5)
mc.set_color(0, 0, 0)
time.sleep(0.5)
mc.set_color(0, 255, 0)
time.sleep(0.5)
mc.set_color(0, 0, 0)
time.sleep(0.5)
mc.set_color(0, 255, 0)
is_enable = True
action = 0
elif (action == 8):
command[2] += 2
if (command[2] > 450):
command[2] = 445
print('z + coords---->:', command[2])
mc.send_coord(3, command[2], speed)
elif (action == 9):
command[2] -= 2
if (command[2] < (- 120)):
command[2] = (- 118)
print('z - coords---->:', command[2])
mc.send_coord(3, command[2], speed)
elif (action == 10):
gripper_value = 95
mc.set_gripper_value(gripper_value, speed)
action = 0
elif (action == 11):
gripper_value = 5
mc.set_gripper_value(gripper_value, speed)
action = 0
elif (action == 12):
command[3] += 10
if (command[3] > 285):
command[3] = 280
mc.send_coord(4, command[3], speed)
elif (action == 13):
command[3] -= 10
if (command[3] < (- 285)):
command[3] = (- 280)
mc.send_coord(4, command[3], speed)
elif (action == 14):
command[4] += 10
if (command[4] > 285):
command[4] = 280
mc.send_coord(5, command[4], speed)
elif (action == 15):
command[4] -= 10
if (command[4] < (- 285)):
command[4] = (- 280)
mc.send_coord(5, command[4], speed)
elif (action == 16):
command[5] += 10
if (command[5] > 450):
command[5] = 440
elif (action == 17):
command[5] -= 10
if (command[5] < (- 120)):
command[5] = (- 118)
elif (action == 18):
time.sleep(2)
if (action == 18):
mc.send_angles([0, 0, 0, 0, 0, 0, 0], 30)
time.sleep(2)
action = 0
elif (action == 19):
time.sleep(2)
if (action == 19):
mc.release_all_servos()
action = 0
elif (action == 20):
time.sleep(2)
if (action == 20):
mc.power_on()
action = 0
elif (action == 21):
time.sleep(2)
if (action == 21):
mc.send_angles([(- 30), 0, 0, (- 90), 0, (- 90), 0], 30)
time.sleep(3)
command = zero.copy()
print('command', command)
action = 0
else:
pass
elif (action == 7):
statue = 0
for _ in range(3):
statue = mc.is_all_servo_enable()
if statue:
break
time.sleep(0.1)
if (statue in [0, (- 1)]):
mc.set_color(0, 0, 0)
time.sleep(0.5)
mc.set_color(255, 0, 0)
time.sleep(0.5)
mc.set_color(0, 0, 0)
time.sleep(0.5)
mc.set_color(255, 0, 0)
time.sleep(0.5)
mc.set_color(0, 0, 0)
time.sleep(0.5)
mc.set_color(255, 0, 0)
is_enable = False
else:
mc.set_color(0, 0, 0)
time.sleep(0.5)
mc.set_color(0, 255, 0)
time.sleep(0.5)
mc.set_color(0, 0, 0)
time.sleep(0.5)
mc.set_color(0, 255, 0)
time.sleep(0.5)
mc.set_color(0, 0, 0)
time.sleep(0.5)
mc.set_color(0, 255, 0)
mc.power_on()
is_enable = True
action = 0
elif (action == 21):
time.sleep(2)
if (action == 21):
mc.send_coords(zero, 20)
command = zero.copy()
action = 0
is_zero = True
time.sleep(0.05) |
def test_same_statements_have_the_same_order():
statement1 = Statement(Effect='Allow', Action=[], Resource=['a'])
statement2 = Statement(Effect='Allow', Action=[], Resource=['a'])
assert (not (statement1 < statement2))
assert (not (statement1 > statement2))
assert (statement1 == statement2) |
def test_edit_stream_rooms_different_admin_error(db, client, admin_jwt):
stream = get_stream(db)
old_room = stream.rooms[0]
room = MicrolocationSubVideoStreamFactory()
db.session.commit()
data = json.dumps({'data': {'id': str(stream.id), 'type': 'video-stream', 'relationships': {'rooms': {'data': [{'id': str(room.id), 'type': 'microlocation'}, {'id': str(old_room.id), 'type': 'microlocation'}]}}}})
assert (len(stream.rooms) == 1)
response = client.patch(f'/v1/video-streams/{stream.id}', content_type='application/vnd.api+json', headers=admin_jwt, data=data)
assert (response.status_code == 403)
assert (json.loads(response.data)['errors'][0]['detail'] == 'Video Stream can only be created/edited with rooms of a single event') |
class Data(HasTraits):
volume = Array()
pressure = Property(Array, observe=['temperature', 'attraction', 'tot_volume'])
attraction = Range(low=(- 50.0), high=50.0, value=0.0)
tot_volume = Range(low=0.01, high=100.0, value=0.01)
temperature = Range(low=(- 50.0), high=50.0, value=50.0)
r_constant = Float(8.314472)
plot_type = Enum('line', 'scatter')
plot = Instance(Plot)
def _plot_default(self):
self.plotdata = ArrayPlotData(x=self.volume, y=self.pressure)
plot = Plot(self.plotdata)
plot.title = 'Pressure vs. Volume'
plot.x_axis.title = 'Volume'
plot.y_axis.title = 'Pressure'
plot.range2d.set_bounds(((- 10), (- 2000)), (120, 4000))
plot.padding_left = 80
plot.plot(('x', 'y'), type=self.plot_type, name=self.plot_type, color='blue')
return plot
def _volume_default(self):
return np.arange(0.1, 100)
def _get_pressure(self):
return (((self.r_constant * self.temperature) / (self.volume - self.tot_volume)) - (self.attraction / (self.volume * self.volume)))
('pressure')
def _update_plot(self, event):
self.plotdata.set_data('y', self.pressure)
('plot_type')
def _update_plot_type(self, event):
(old_plot_type, new_plot_type) = (event.old, event.new)
self.plot.delplot(old_plot_type)
self.plot.plot(('x', 'y'), type=new_plot_type, name=new_plot_type, color='blue')
self.plot.invalidate_and_redraw()
traits_view = View(UItem('plot', editor=ComponentEditor(), resizable=True), Item(name='attraction'), Item(name='tot_volume'), Item(name='temperature'), Item(name='r_constant', style='readonly'), Item(name='plot_type'), resizable=True, buttons=['OK'], title='Van der Waal Equation', width=900, height=800) |
def test_exclude_coverage(coverage_mode, tester, config):
tester.useSafeMath(5, 10)
coverage_eval = coverage.get_merged_coverage_eval()
report = _build_coverage_output(coverage_eval)
assert (_build_coverage_output(coverage_eval) == report)
config.settings['reports']['exclude_contracts'] = 'SafeMath'
assert (_build_coverage_output(coverage_eval) != report) |
class NonlinearVariationalProblemMixin():
def _ad_annotate_init(init):
_annotations
(init)
def wrapper(self, *args, **kwargs):
from firedrake import derivative, adjoint, TrialFunction
init(self, *args, **kwargs)
self._ad_F = self.F
self._ad_u = self.u
self._ad_bcs = self.bcs
self._ad_J = self.J
try:
dFdu = derivative(self.F, self.u, TrialFunction(self.u.function_space()))
self._ad_adj_F = adjoint(dFdu)
except (TypeError, NotImplementedError):
self._ad_adj_F = None
self._ad_kwargs = {'Jp': self.Jp, 'form_compiler_parameters': self.form_compiler_parameters, 'is_linear': self.is_linear}
self._ad_count_map = {}
return wrapper
def _ad_count_map_update(self, updated_ad_count_map):
self._ad_count_map = updated_ad_count_map |
class JsBase(JsPackage):
def version(self, no=None):
if (no is not None):
self
return
def addFilterHandler(self, addFilterHandler):
self._js.append(('addFilterHandler(%s)' % addFilterHandler))
return self
def chartGroup(self, groupId=None):
return
def data(self, callback):
def chartID(self):
def x(self, xScale):
self._js.append(('x(%s)' % xScale))
return self
def y(self, yScale):
self._js.append(('y(%s)' % yScale))
return self
def yAxis(self, yAxis=None):
self._js.append(('yAxis(%s)' % yAxis))
return self
def xUnits(self, unit=None):
self._js.append(('xUnits(%s)' % unit))
return self
def brushOn(self, brushOn=True):
self._js.append('brushOn(%s)')
return self
def dimension(self, dimension):
def expireCache(self):
def filter(self, filter):
def filterAll(self):
def filterHandler(self, filterHandler):
def filterPrinter(self, filterPrinter):
def filters(self):
def group(self):
def hasFilter(self, filter=None):
def height(self, height=None):
self._js.append(('height(%s)' % height))
return self
def width(self, width=None):
self._js.append(('width(%s)' % width))
return self
def label(self):
def render(self):
self._js.append('render()')
return self |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.