hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f736f2f0ce52820334da65475998c88bb20ab73a | 423 | py | Python | util/log.py | icyda17/MNT | f88a0a199e79d6d4201ec724478c710380e71ea3 | [
"MIT"
] | null | null | null | util/log.py | icyda17/MNT | f88a0a199e79d6d4201ec724478c710380e71ea3 | [
"MIT"
] | null | null | null | util/log.py | icyda17/MNT | f88a0a199e79d6d4201ec724478c710380e71ea3 | [
"MIT"
] | 1 | 2021-02-24T11:32:09.000Z | 2021-02-24T11:32:09.000Z | import time
def timeit(method):
def timed(*args, **kw):
ts = time.time()
result = method(*args, **kw)
te = time.time()
if 'log_time' in kw:
name = kw.get('log_name', method.__name__.upper())
kw['log_time'][name] = int((te - ts) * 1000)
else:
print('%r %2.2f ms' % (method.__name__, (te - ts) * 1000))
return result
return timed | 26.4375 | 71 | 0.501182 | import time
def timeit(method):
def timed(*args, **kw):
ts = time.time()
result = method(*args, **kw)
te = time.time()
if 'log_time' in kw:
name = kw.get('log_name', method.__name__.upper())
kw['log_time'][name] = int((te - ts) * 1000)
else:
print('%r %2.2f ms' % (method.__name__, (te - ts) * 1000))
return result
return timed | true | true |
f736f4001a8075f4f0fe86f72ce2a6273e994743 | 95 | py | Python | exercicios_curso_em_video/Exercicio 12.py | Sposigor/Caminho_do_Python | e84d74e9dc89c0966f931a94cb9ebe3ee4671b6d | [
"MIT"
] | 1 | 2021-01-13T18:07:46.000Z | 2021-01-13T18:07:46.000Z | exercicios_curso_em_video/Exercicio 12.py | Sposigor/Caminho_do_Python | e84d74e9dc89c0966f931a94cb9ebe3ee4671b6d | [
"MIT"
] | null | null | null | exercicios_curso_em_video/Exercicio 12.py | Sposigor/Caminho_do_Python | e84d74e9dc89c0966f931a94cb9ebe3ee4671b6d | [
"MIT"
] | null | null | null | preco = float(input('Preço: '))
print(f'Depois do desconto de 5% aplicado: {preco * 0.95:.2f}') | 47.5 | 63 | 0.663158 | preco = float(input('Preço: '))
print(f'Depois do desconto de 5% aplicado: {preco * 0.95:.2f}') | true | true |
f736f4ba4224fe905a2780f231825d898cf9133f | 2,291 | py | Python | code/distance/mmd_pytorch.py | Flsahkong/transferlearning | fdc76a7e03d7771517ea938cb5b90aa5dfb8dfbd | [
"MIT"
] | 9 | 2019-12-26T08:51:30.000Z | 2021-04-16T06:44:52.000Z | code/distance/mmd_pytorch.py | clxiao/transferlearning | 0fe84de59dcb2871e2dca24130dc24e1ccce8506 | [
"MIT"
] | null | null | null | code/distance/mmd_pytorch.py | clxiao/transferlearning | 0fe84de59dcb2871e2dca24130dc24e1ccce8506 | [
"MIT"
] | 5 | 2021-09-19T08:02:58.000Z | 2022-03-30T07:20:18.000Z | # Compute MMD distance using pytorch
import torch
import torch.nn as nn
class MMD_loss(nn.Module):
def __init__(self, kernel_type='rbf', kernel_mul=2.0, kernel_num=5):
super(MMD_loss, self).__init__()
self.kernel_num = kernel_num
self.kernel_mul = kernel_mul
self.fix_sigma = None
self.kernel_type = kernel_type
def guassian_kernel(self, source, target, kernel_mul=2.0, kernel_num=5, fix_sigma=None):
n_samples = int(source.size()[0]) + int(target.size()[0])
total = torch.cat([source, target], dim=0)
total0 = total.unsqueeze(0).expand(
int(total.size(0)), int(total.size(0)), int(total.size(1)))
total1 = total.unsqueeze(1).expand(
int(total.size(0)), int(total.size(0)), int(total.size(1)))
L2_distance = ((total0-total1)**2).sum(2)
if fix_sigma:
bandwidth = fix_sigma
else:
bandwidth = torch.sum(L2_distance.data) / (n_samples**2-n_samples)
bandwidth /= kernel_mul ** (kernel_num // 2)
bandwidth_list = [bandwidth * (kernel_mul**i)
for i in range(kernel_num)]
kernel_val = [torch.exp(-L2_distance / bandwidth_temp)
for bandwidth_temp in bandwidth_list]
return sum(kernel_val)
def linear_mmd2(self, f_of_X, f_of_Y):
loss = 0.0
delta = f_of_X.float().mean(0) - f_of_Y.float().mean(0)
loss = delta.dot(delta.T)
return loss
def forward(self, source, target):
if self.kernel_type == 'linear':
return self.linear_mmd2(source, target)
elif self.kernel_type == 'rbf':
batch_size = int(source.size()[0])
kernels = self.guassian_kernel(
source, target, kernel_mul=self.kernel_mul, kernel_num=self.kernel_num, fix_sigma=self.fix_sigma)
with torch.no_grad():
XX = torch.mean(kernels[:batch_size, :batch_size])
YY = torch.mean(kernels[batch_size:, batch_size:])
XY = torch.mean(kernels[:batch_size, batch_size:])
YX = torch.mean(kernels[batch_size:, :batch_size])
loss = torch.mean(XX + YY - XY - YX)
torch.cuda.empty_cache()
return loss
| 41.654545 | 113 | 0.594937 |
import torch
import torch.nn as nn
class MMD_loss(nn.Module):
def __init__(self, kernel_type='rbf', kernel_mul=2.0, kernel_num=5):
super(MMD_loss, self).__init__()
self.kernel_num = kernel_num
self.kernel_mul = kernel_mul
self.fix_sigma = None
self.kernel_type = kernel_type
def guassian_kernel(self, source, target, kernel_mul=2.0, kernel_num=5, fix_sigma=None):
n_samples = int(source.size()[0]) + int(target.size()[0])
total = torch.cat([source, target], dim=0)
total0 = total.unsqueeze(0).expand(
int(total.size(0)), int(total.size(0)), int(total.size(1)))
total1 = total.unsqueeze(1).expand(
int(total.size(0)), int(total.size(0)), int(total.size(1)))
L2_distance = ((total0-total1)**2).sum(2)
if fix_sigma:
bandwidth = fix_sigma
else:
bandwidth = torch.sum(L2_distance.data) / (n_samples**2-n_samples)
bandwidth /= kernel_mul ** (kernel_num // 2)
bandwidth_list = [bandwidth * (kernel_mul**i)
for i in range(kernel_num)]
kernel_val = [torch.exp(-L2_distance / bandwidth_temp)
for bandwidth_temp in bandwidth_list]
return sum(kernel_val)
def linear_mmd2(self, f_of_X, f_of_Y):
loss = 0.0
delta = f_of_X.float().mean(0) - f_of_Y.float().mean(0)
loss = delta.dot(delta.T)
return loss
def forward(self, source, target):
if self.kernel_type == 'linear':
return self.linear_mmd2(source, target)
elif self.kernel_type == 'rbf':
batch_size = int(source.size()[0])
kernels = self.guassian_kernel(
source, target, kernel_mul=self.kernel_mul, kernel_num=self.kernel_num, fix_sigma=self.fix_sigma)
with torch.no_grad():
XX = torch.mean(kernels[:batch_size, :batch_size])
YY = torch.mean(kernels[batch_size:, batch_size:])
XY = torch.mean(kernels[:batch_size, batch_size:])
YX = torch.mean(kernels[batch_size:, :batch_size])
loss = torch.mean(XX + YY - XY - YX)
torch.cuda.empty_cache()
return loss
| true | true |
f736f71da9307986982ce5b8734e9109c8e23e6c | 34,055 | py | Python | salt/modules/boto_rds.py | fictivekin/salt | f4b6f815d47ab8c790028e8ddad64ee0f8bb3f00 | [
"Apache-2.0"
] | null | null | null | salt/modules/boto_rds.py | fictivekin/salt | f4b6f815d47ab8c790028e8ddad64ee0f8bb3f00 | [
"Apache-2.0"
] | null | null | null | salt/modules/boto_rds.py | fictivekin/salt | f4b6f815d47ab8c790028e8ddad64ee0f8bb3f00 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
Connection module for Amazon RDS
.. versionadded:: 2015.8.0
:configuration: This module accepts explicit rds credentials but can also
utilize IAM roles assigned to the instance through Instance Profiles.
Dynamic credentials are then automatically obtained from AWS API and no
further configuration is necessary. More Information available at:
.. code-block:: text
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html
If IAM roles are not used you need to specify them either in a pillar or
in the minion's config file:
.. code-block:: yaml
rds.keyid: GKTADJGHEIQSXMKKRBJ08H
rds.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
A region may also be specified in the configuration:
.. code-block:: yaml
rds.region: us-east-1
If a region is not specified, the default is us-east-1.
It's also possible to specify key, keyid and region via a profile, either
as a passed in dict, or as a string to pull from pillars or minion config:
.. code-block:: yaml
myprofile:
keyid: GKTADJGHEIQSXMKKRBJ08H
key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
region: us-east-1
:depends: boto3
'''
# keep lint from choking on _get_conn and _cache_id
#pylint: disable=E0602
# Import Python libs
from __future__ import absolute_import
import logging
from salt.exceptions import SaltInvocationError
from distutils.version import LooseVersion as _LooseVersion # pylint: disable=import-error,no-name-in-module
from time import time, sleep
# Import Salt libs
import salt.utils.boto3
import salt.utils.compat
import salt.utils.odict as odict
import salt.utils
import salt.ext.six as six
log = logging.getLogger(__name__)
# Import third party libs
# pylint: disable=import-error
try:
#pylint: disable=unused-import
import boto
import boto3
#pylint: enable=unused-import
from botocore.exceptions import ClientError
logging.getLogger('boto').setLevel(logging.CRITICAL)
logging.getLogger('boto3').setLevel(logging.CRITICAL)
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
# pylint: enable=import-error
boto3_param_map = {
'allocated_storage': ('AllocatedStorage', int),
'allow_major_version_upgrade': ('AllowMajorVersionUpgrade', bool),
'apply_immediately': ('ApplyImmediately', bool),
'auto_minor_version_upgrade': ('AutoMinorVersionUpgrade', bool),
'availability_zone': ('AvailabilityZone', str),
'backup_retention_period': ('BackupRetentionPeriod', int),
'ca_certificate_identifier': ('CACertificateIdentifier', str),
'character_set_name': ('CharacterSetName', str),
'copy_tags_to_snapshot': ('CopyTagsToSnapshot', bool),
'db_cluster_identifier': ('DBClusterIdentifier', str),
'db_instance_class': ('DBInstanceClass', str),
'db_name': ('DBName', str),
'db_parameter_group_name': ('DBParameterGroupName', str),
'db_port_number': ('DBPortNumber', int),
'db_security_groups': ('DBSecurityGroups', list),
'db_subnet_group_name': ('DBSubnetGroupName', str),
'domain': ('Domain', str),
'domain_iam_role_name': ('DomainIAMRoleName', str),
'engine': ('Engine', str),
'engine_version': ('EngineVersion', str),
'iops': ('Iops', int),
'kms_key_id': ('KmsKeyId', str),
'license_model': ('LicenseModel', str),
'master_user_password': ('MasterUserPassword', str),
'master_username': ('MasterUsername', str),
'monitoring_interval': ('MonitoringInterval', int),
'monitoring_role_arn': ('MonitoringRoleArn', str),
'multi_az': ('MultiAZ', bool),
'name': ('DBInstanceIdentifier', str),
'new_db_instance_identifier': ('NewDBInstanceIdentifier', str),
'option_group_name': ('OptionGroupName', str),
'port': ('Port', int),
'preferred_backup_window': ('PreferredBackupWindow', str),
'preferred_maintenance_window': ('PreferredMaintenanceWindow', str),
'promotion_tier': ('PromotionTier', int),
'publicly_accessible': ('PubliclyAccessible', bool),
'storage_encrypted': ('StorageEncrypted', bool),
'storage_type': ('StorageType', str),
'taglist': ('Tags', list),
'tde_credential_arn': ('TdeCredentialArn', str),
'tde_credential_password': ('TdeCredentialPassword', str),
'vpc_security_group_ids': ('VpcSecurityGroupIds', list),
}
def __virtual__():
'''
Only load if boto libraries exist and if boto libraries are greater than
a given version.
'''
required_boto3_version = '1.3.1'
if not HAS_BOTO:
return (False, 'The boto_rds module could not be loaded: '
'boto libraries not found')
elif _LooseVersion(boto3.__version__) < _LooseVersion(required_boto3_version):
return (False, 'The boto_rds module could not be loaded: '
'boto version {0} or later must be installed.'.format(required_boto3_version))
else:
return True
def __init__(opts):
salt.utils.compat.pack_dunder(__name__)
if HAS_BOTO:
__utils__['boto3.assign_funcs'](__name__, 'rds')
def exists(name, tags=None, region=None, key=None, keyid=None, profile=None):
'''
Check to see if an RDS exists.
CLI example::
salt myminion boto_rds.exists myrds region=us-east-1
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
rds = conn.describe_db_instances(DBInstanceIdentifier=name)
return {'exists': bool(rds)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def option_group_exists(name, tags=None, region=None, key=None, keyid=None,
profile=None):
'''
Check to see if an RDS option group exists.
CLI example::
salt myminion boto_rds.option_group_exists myoptiongr region=us-east-1
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
rds = conn.describe_option_groups(OptionGroupName=name)
return {'exists': bool(rds)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def parameter_group_exists(name, tags=None, region=None, key=None, keyid=None,
profile=None):
'''
Check to see if an RDS parameter group exists.
CLI example::
salt myminion boto_rds.parameter_group_exists myparametergroup \
region=us-east-1
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
rds = conn.describe_db_parameter_groups(DBParameterGroupName=name)
return {'exists': bool(rds), 'error': None}
except ClientError as e:
resp = {}
if e.response['Error']['Code'] == 'DBParameterGroupNotFound':
resp['exists'] = False
resp['error'] = salt.utils.boto3.get_error(e)
return resp
def subnet_group_exists(name, tags=None, region=None, key=None, keyid=None,
profile=None):
'''
Check to see if an RDS subnet group exists.
CLI example::
salt myminion boto_rds.subnet_group_exists my-param-group \
region=us-east-1
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'exists': bool(conn)}
rds = conn.describe_db_subnet_groups(DBSubnetGroupName=name)
return {'exists': bool(rds)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def create(name, allocated_storage, db_instance_class, engine,
master_username, master_user_password, db_name=None,
db_security_groups=None, vpc_security_group_ids=None,
availability_zone=None, db_subnet_group_name=None,
preferred_maintenance_window=None, db_parameter_group_name=None,
backup_retention_period=None, preferred_backup_window=None,
port=None, multi_az=None, engine_version=None,
auto_minor_version_upgrade=None, license_model=None, iops=None,
option_group_name=None, character_set_name=None,
publicly_accessible=None, wait_status=None, tags=None,
db_cluster_identifier=None, storage_type=None,
tde_credential_arn=None, tde_credential_password=None,
storage_encrypted=None, kms_key_id=None, domain=None,
copy_tags_to_snapshot=None, monitoring_interval=None,
monitoring_role_arn=None, domain_iam_role_name=None, region=None,
promotion_tier=None, key=None, keyid=None, profile=None):
'''
Create an RDS
CLI example to create an RDS::
salt myminion boto_rds.create myrds 10 db.t2.micro MySQL sqlusr sqlpassw
'''
if not allocated_storage:
raise SaltInvocationError('allocated_storage is required')
if not db_instance_class:
raise SaltInvocationError('db_instance_class is required')
if not engine:
raise SaltInvocationError('engine is required')
if not master_username:
raise SaltInvocationError('master_username is required')
if not master_user_password:
raise SaltInvocationError('master_user_password is required')
if availability_zone and multi_az:
raise SaltInvocationError('availability_zone and multi_az are mutually'
' exclusive arguments.')
if wait_status:
wait_statuses = ['available', 'modifying', 'backing-up']
if wait_status not in wait_statuses:
raise SaltInvocationError('wait_status can be one of: '
'{0}'.format(wait_statuses))
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
kwargs = {}
boto_params = set(boto3_param_map.keys())
keys = set(locals().keys())
for key in keys.intersection(boto_params):
val = locals()[key]
if val is not None:
mapped = boto3_param_map[key]
kwargs[mapped[0]] = mapped[1](val)
taglist = _tag_doc(tags)
# Validation doesn't want parameters that are None
# https://github.com/boto/boto3/issues/400
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v is not None)
rds = conn.create_db_instance(**kwargs)
if not rds:
return {'created': False}
if not wait_status:
return {'created': True, 'message':
'Created RDS instance {0}.'.format(name)}
while True:
log.info('Waiting 10 secs...')
sleep(10)
_describe = describe(name=name, tags=tags, region=region, key=key,
keyid=keyid, profile=profile)['rds']
if not _describe:
return {'created': True}
if _describe['DBInstanceStatus'] == wait_status:
return {'created': True, 'message':
'Created RDS {0} with current status '
'{1}'.format(name, _describe['DBInstanceStatus'])}
log.info('Current status: {0}'.format(_describe['DBInstanceStatus']))
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def create_read_replica(name, source_name, db_instance_class=None,
availability_zone=None, port=None,
auto_minor_version_upgrade=None, iops=None,
option_group_name=None, publicly_accessible=None,
tags=None, db_subnet_group_name=None,
storage_type=None, copy_tags_to_snapshot=None,
monitoring_interval=None, monitoring_role_arn=None,
region=None, key=None, keyid=None, profile=None):
'''
Create an RDS read replica
CLI example to create an RDS read replica::
salt myminion boto_rds.create_read_replica replicaname source_name
'''
if not backup_retention_period:
raise SaltInvocationError('backup_retention_period is required')
res = __salt__['boto_rds.exists'](source_name, tags, region, key, keyid, profile)
if not res.get('exists'):
return {'exists': bool(res), 'message':
'RDS instance source {0} does not exists.'.format(source_name)}
res = __salt__['boto_rds.exists'](name, tags, region, key, keyid, profile)
if res.get('exists'):
return {'exists': bool(res), 'message':
'RDS replica instance {0} already exists.'.format(name)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
kwargs = {}
for key in ('OptionGroupName', 'MonitoringRoleArn'):
if locals()[key] is not None:
kwargs[key] = str(locals()[key])
for key in ('MonitoringInterval', 'Iops', 'Port'):
if locals()[key] is not None:
kwargs[key] = int(locals()[key])
for key in ('CopyTagsToSnapshot', 'AutoMinorVersionUpgrade'):
if locals()[key] is not None:
kwargs[key] = bool(locals()[key])
taglist = _tag_doc(tags)
rds_replica = conn.create_db_instance_read_replica(DBInstanceIdentifier=name,
SourceDBInstanceIdentifier=source_name,
DBInstanceClass=db_instance_class,
AvailabilityZone=availability_zone,
PubliclyAccessible=publicly_accessible,
Tags=taglist, DBSubnetGroupName=db_subnet_group_name,
StorageType=storage_type,
**kwargs)
return {'exists': bool(rds_replica)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def create_option_group(name, engine_name, major_engine_version,
option_group_description, tags=None, region=None,
key=None, keyid=None, profile=None):
'''
Create an RDS option group
CLI example to create an RDS option group::
salt myminion boto_rds.create_option_group my-opt-group mysql 5.6 \
"group description"
'''
res = __salt__['boto_rds.option_group_exists'](name, tags, region, key, keyid,
profile)
if res.get('exists'):
return {'exists': bool(res)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
taglist = _tag_doc(tags)
rds = conn.create_option_group(OptionGroupName=name,
EngineName=engine_name,
MajorEngineVersion=major_engine_version,
OptionGroupDescription=option_group_description,
Tags=taglist)
return {'exists': bool(rds)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def create_parameter_group(name, db_parameter_group_family, description,
tags=None, region=None, key=None, keyid=None,
profile=None):
'''
Create an RDS parameter group
CLI example to create an RDS parameter group::
salt myminion boto_rds.create_parameter_group my-param-group mysql5.6 \
"group description"
'''
res = __salt__['boto_rds.parameter_group_exists'](name, tags, region, key,
keyid, profile)
if res.get('exists'):
return {'exists': bool(res)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
taglist = _tag_doc(tags)
rds = conn.create_db_parameter_group(DBParameterGroupName=name,
DBParameterGroupFamily=db_parameter_group_family,
Description=description,
Tags=taglist)
if not rds:
return {'created': False, 'message':
'Failed to create RDS parameter group {0}'.format(name)}
return {'exists': bool(rds), 'message':
'Created RDS parameter group {0}'.format(name)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def create_subnet_group(name, description, subnet_ids, tags=None,
region=None, key=None, keyid=None, profile=None):
'''
Create an RDS subnet group
CLI example to create an RDS subnet group::
salt myminion boto_rds.create_subnet_group my-subnet-group \
"group description" '[subnet-12345678, subnet-87654321]' \
region=us-east-1
'''
res = __salt__['boto_rds.subnet_group_exists'](name, tags, region, key,
keyid, profile)
if res.get('exists'):
return {'exists': bool(res)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
taglist = _tag_doc(tags)
rds = conn.create_db_subnet_group(DBSubnetGroupName=name,
DBSubnetGroupDescription=description,
SubnetIds=subnet_ids, Tags=taglist)
return {'created': bool(rds)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def update_parameter_group(name, parameters, apply_method="pending-reboot",
tags=None, region=None, key=None, keyid=None,
profile=None):
'''
Update an RDS parameter group.
CLI example::
salt myminion boto_rds.update_parameter_group my-param-group \
parameters='{"back_log":1, "binlog_cache_size":4096}' \
region=us-east-1
'''
res = __salt__['boto_rds.parameter_group_exists'](name, tags, region, key,
keyid, profile)
if not res.get('exists'):
return {'exists': bool(res), 'message':
'RDS parameter group {0} does not exist.'.format(name)}
param_list = []
for key, value in six.iteritems(parameters):
item = (key, value, apply_method)
param_list.append(item)
if not len(param_list):
return {'results': False}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
res = conn.modify_db_parameter_group(DBParameterGroupName=name,
Parameters=param_list)
return {'results': bool(res)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def describe(name, tags=None, region=None, key=None, keyid=None,
profile=None):
'''
Return RDS instance details.
CLI example::
salt myminion boto_rds.describe myrds
'''
res = __salt__['boto_rds.exists'](name, tags, region, key, keyid,
profile)
if not res.get('exists'):
return {'exists': bool(res), 'message':
'RDS instance {0} does not exist.'.format(name)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
rds = conn.describe_db_instances(DBInstanceIdentifier=name)
rds = [
i for i in rds.get('DBInstances', [])
if i.get('DBInstanceIdentifier') == name
].pop(0)
if rds:
keys = ('DBInstanceIdentifier', 'DBInstanceClass', 'Engine',
'DBInstanceStatus', 'DBName', 'AllocatedStorage',
'PreferredBackupWindow', 'BackupRetentionPeriod',
'AvailabilityZone', 'PreferredMaintenanceWindow',
'LatestRestorableTime', 'EngineVersion',
'AutoMinorVersionUpgrade', 'LicenseModel',
'Iops', 'CharacterSetName', 'PubliclyAccessible',
'StorageType', 'TdeCredentialArn', 'DBInstancePort',
'DBClusterIdentifier', 'StorageEncrypted', 'KmsKeyId',
'DbiResourceId', 'CACertificateIdentifier',
'CopyTagsToSnapshot', 'MonitoringInterval',
'MonitoringRoleArn', 'PromotionTier',
'DomainMemberships')
return {'rds': dict([(k, rds.get('DBInstances', [{}])[0].get(k)) for k in keys])}
else:
return {'rds': None}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
except IndexError:
return {'rds': None}
def get_endpoint(name, tags=None, region=None, key=None, keyid=None,
profile=None):
'''
Return the endpoint of an RDS instance.
CLI example::
salt myminion boto_rds.get_endpoint myrds
'''
endpoint = 'None'
res = __salt__['boto_rds.exists'](name, tags, region, key, keyid,
profile)
if not res:
return {'exists': bool(res), 'message':
'RDS instance {0} does not exist.'.format(name)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
rds = conn.describe_db_instances(DBInstanceIdentifier=name)
if rds:
inst = rds['DBInstances'][0]['Endpoint']
endpoint = '{0}:{1}'.format(inst.get('Address'), inst.get('Port'))
return endpoint
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def delete(name, skip_final_snapshot=None, final_db_snapshot_identifier=None,
region=None, key=None, keyid=None, profile=None,
wait_for_deletion=True, timeout=180):
'''
Delete an RDS instance.
CLI example::
salt myminion boto_rds.delete myrds skip_final_snapshot=True \
region=us-east-1
'''
if timeout == 180 and not skip_final_snapshot:
timeout = 420
if not skip_final_snapshot and not final_db_snapshot_identifier:
raise SaltInvocationError('At least one of the following must'
' be specified: skip_final_snapshot'
' final_db_snapshot_identifier')
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'deleted': bool(conn)}
kwargs = {}
if locals()['skip_final_snapshot'] is not None:
kwargs['SkipFinalSnapshot'] = bool(locals()['skip_final_snapshot'])
if locals()['final_db_snapshot_identifier'] is not None:
kwargs['FinalDBSnapshotIdentifier'] = str(locals()['final_db_snapshot_identifier'])
res = conn.delete_db_instance(DBInstanceIdentifier=name, **kwargs)
if not wait_for_deletion:
return {'deleted': bool(res), 'message':
'Deleted RDS instance {0}.'.format(name)}
start_time = time()
while True:
if not __salt__['boto_rds.exists'](name=name, region=region,
key=key, keyid=keyid,
profile=profile):
return {'deleted': bool(res), 'message':
'Deleted RDS instance {0} completely.'.format(name)}
if time() - start_time > timeout:
raise SaltInvocationError('RDS instance {0} has not been '
'deleted completely after {1} '
'seconds'.format(name, timeout))
sleep(10)
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def delete_option_group(name, region=None, key=None, keyid=None, profile=None):
'''
Delete an RDS option group.
CLI example::
salt myminion boto_rds.delete_option_group my-opt-group \
region=us-east-1
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'deleted': bool(conn)}
res = conn.delete_option_group(OptionGroupName=name)
if not res:
return {'deleted': bool(res), 'message':
'Failed to delete RDS option group {0}.'.format(name)}
return {'deleted': bool(res), 'message':
'Deleted RDS option group {0}.'.format(name)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def delete_parameter_group(name, region=None, key=None, keyid=None,
profile=None):
'''
Delete an RDS parameter group.
CLI example::
salt myminion boto_rds.delete_parameter_group my-param-group \
region=us-east-1
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
r = conn.delete_db_parameter_group(DBParameterGroupName=name)
return {'deleted': bool(r), 'message':
'Deleted RDS parameter group {0}.'.format(name)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def delete_subnet_group(name, region=None, key=None, keyid=None,
profile=None):
'''
Delete an RDS subnet group.
CLI example::
salt myminion boto_rds.delete_subnet_group my-subnet-group \
region=us-east-1
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
r = conn.delete_db_subnet_group(DBSubnetGroupName=name)
return {'deleted': bool(r), 'message':
'Deleted RDS subnet group {0}.'.format(name)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def describe_parameter_group(name, Filters=None, MaxRecords=None, Marker=None,
region=None, key=None, keyid=None, profile=None):
'''
Returns a list of `DBParameterGroup` descriptions.
CLI example to description of parameter group::
salt myminion boto_rds.describe_parameter_group parametergroupname\
region=us-east-1
'''
res = __salt__['boto_rds.parameter_group_exists'](name, tags=None,
region=region, key=key,
keyid=keyid,
profile=profile)
if not res.get('exists'):
return {'exists': bool(res)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
kwargs = {}
for key in ('Marker', 'Filters'):
if locals()[key] is not None:
kwargs[key] = str(locals()[key])
if locals()['MaxRecords'] is not None:
kwargs['MaxRecords'] = int(locals()['MaxRecords'])
info = conn.describe_db_parameter_groups(DBParameterGroupName=name,
**kwargs)
if not info:
return {'results': bool(info), 'message':
'Failed to get RDS description for group {0}.'.format(name)}
return {'results': bool(info), 'message':
'Got RDS descrition for group {0}.'.format(name)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def describe_parameters(name, Source=None, MaxRecords=None, Marker=None,
region=None, key=None, keyid=None, profile=None):
'''
Returns a list of `DBParameterGroup` parameters.
CLI example to description of parameters ::
salt myminion boto_rds.describe_parameters parametergroupname\
region=us-east-1
'''
res = __salt__['boto_rds.parameter_group_exists'](name, tags=None,
region=region, key=key,
keyid=keyid,
profile=profile)
if not res.get('exists'):
return {'result': False,
'message': 'Parameter group {0} does not exist'.format(name)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'result': False,
'message': 'Could not establish a connection to RDS'}
kwargs = {}
for key in ('Marker', 'Source'):
if locals()[key] is not None:
kwargs[key] = str(locals()[key])
if locals()['MaxRecords'] is not None:
kwargs['MaxRecords'] = int(locals()['MaxRecords'])
r = conn.describe_db_parameters(DBParameterGroupName=name, **kwargs)
if not r:
return {'result': False,
'message': 'Failed to get RDS parameters for group {0}.'
.format(name)}
results = r['Parameters']
keys = ['ParameterName', 'ParameterValue', 'Description',
'Source', 'ApplyType', 'DataType', 'AllowedValues',
'IsModifieable', 'MinimumEngineVersion', 'ApplyMethod']
parameters = odict.OrderedDict()
ret = {'result': True}
for result in results:
data = odict.OrderedDict()
for k in keys:
data[k] = result.get(k)
parameters[result.get('ParameterName')] = data
ret['parameters'] = parameters
return ret
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def modify_db_instance(name,
allocated_storage=None,
allow_major_version_upgrade=None,
apply_immediately=None,
auto_minor_version_upgrade=None,
backup_retention_period=None,
ca_certificate_identifier=None,
character_set_name=None,
copy_tags_to_snapshot=None,
db_cluster_identifier=None,
db_instance_class=None,
db_name=None,
db_parameter_group_name=None,
db_port_number=None,
db_security_groups=None,
db_subnet_group_name=None,
domain=None,
domain_iam_role_name=None,
engine_version=None,
iops=None,
kms_key_id=None,
license_model=None,
master_user_password=None,
monitoring_interval=None,
monitoring_role_arn=None,
multi_az=None,
new_db_instance_identifier=None,
option_group_name=None,
preferred_backup_window=None,
preferred_maintenance_window=None,
promotion_tier=None,
publicly_accessible=None,
storage_encrypted=None,
storage_type=None,
tde_credential_arn=None,
tde_credential_password=None,
vpc_security_group_ids=None,
region=None, key=None, keyid=None, profile=None):
'''
Modify settings for a DB instance.
CLI example to description of parameters ::
salt myminion boto_rds.modify_db_instance db_instance_identifier region=us-east-1
'''
res = __salt__['boto_rds.exists'](name, tags=None, region=region, key=key, keyid=keyid, profile=profile)
if not res.get('exists'):
return {'modified': False, 'message':
'RDS db instance {0} does not exist.'.format(name)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'modified': False}
kwargs = {}
excluded = set(('name',))
boto_params = set(boto3_param_map.keys())
keys = set(locals().keys())
for key in keys.intersection(boto_params).difference(excluded):
val = locals()[key]
if val is not None:
mapped = boto3_param_map[key]
kwargs[mapped[0]] = mapped[1](val)
info = conn.modify_db_instance(DBInstanceIdentifier=name, **kwargs)
if not info:
return {'modified': bool(info), 'message':
'Failed to modify RDS db instance {0}.'.format(name)}
return {'modified': bool(info), 'message':
'Modified RDS db instance {0}.'.format(name),
'results': dict(info)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def _tag_doc(tags):
taglist = []
if tags is not None:
for k, v in six.iteritems(tags):
if str(k).startswith('__'):
continue
taglist.append({'Key': str(k), 'Value': str(v)})
return taglist
| 37.754989 | 112 | 0.585582 |
from __future__ import absolute_import
import logging
from salt.exceptions import SaltInvocationError
from distutils.version import LooseVersion as _LooseVersion
from time import time, sleep
import salt.utils.boto3
import salt.utils.compat
import salt.utils.odict as odict
import salt.utils
import salt.ext.six as six
log = logging.getLogger(__name__)
try:
import boto
import boto3
from botocore.exceptions import ClientError
logging.getLogger('boto').setLevel(logging.CRITICAL)
logging.getLogger('boto3').setLevel(logging.CRITICAL)
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
boto3_param_map = {
'allocated_storage': ('AllocatedStorage', int),
'allow_major_version_upgrade': ('AllowMajorVersionUpgrade', bool),
'apply_immediately': ('ApplyImmediately', bool),
'auto_minor_version_upgrade': ('AutoMinorVersionUpgrade', bool),
'availability_zone': ('AvailabilityZone', str),
'backup_retention_period': ('BackupRetentionPeriod', int),
'ca_certificate_identifier': ('CACertificateIdentifier', str),
'character_set_name': ('CharacterSetName', str),
'copy_tags_to_snapshot': ('CopyTagsToSnapshot', bool),
'db_cluster_identifier': ('DBClusterIdentifier', str),
'db_instance_class': ('DBInstanceClass', str),
'db_name': ('DBName', str),
'db_parameter_group_name': ('DBParameterGroupName', str),
'db_port_number': ('DBPortNumber', int),
'db_security_groups': ('DBSecurityGroups', list),
'db_subnet_group_name': ('DBSubnetGroupName', str),
'domain': ('Domain', str),
'domain_iam_role_name': ('DomainIAMRoleName', str),
'engine': ('Engine', str),
'engine_version': ('EngineVersion', str),
'iops': ('Iops', int),
'kms_key_id': ('KmsKeyId', str),
'license_model': ('LicenseModel', str),
'master_user_password': ('MasterUserPassword', str),
'master_username': ('MasterUsername', str),
'monitoring_interval': ('MonitoringInterval', int),
'monitoring_role_arn': ('MonitoringRoleArn', str),
'multi_az': ('MultiAZ', bool),
'name': ('DBInstanceIdentifier', str),
'new_db_instance_identifier': ('NewDBInstanceIdentifier', str),
'option_group_name': ('OptionGroupName', str),
'port': ('Port', int),
'preferred_backup_window': ('PreferredBackupWindow', str),
'preferred_maintenance_window': ('PreferredMaintenanceWindow', str),
'promotion_tier': ('PromotionTier', int),
'publicly_accessible': ('PubliclyAccessible', bool),
'storage_encrypted': ('StorageEncrypted', bool),
'storage_type': ('StorageType', str),
'taglist': ('Tags', list),
'tde_credential_arn': ('TdeCredentialArn', str),
'tde_credential_password': ('TdeCredentialPassword', str),
'vpc_security_group_ids': ('VpcSecurityGroupIds', list),
}
def __virtual__():
required_boto3_version = '1.3.1'
if not HAS_BOTO:
return (False, 'The boto_rds module could not be loaded: '
'boto libraries not found')
elif _LooseVersion(boto3.__version__) < _LooseVersion(required_boto3_version):
return (False, 'The boto_rds module could not be loaded: '
'boto version {0} or later must be installed.'.format(required_boto3_version))
else:
return True
def __init__(opts):
salt.utils.compat.pack_dunder(__name__)
if HAS_BOTO:
__utils__['boto3.assign_funcs'](__name__, 'rds')
def exists(name, tags=None, region=None, key=None, keyid=None, profile=None):
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
rds = conn.describe_db_instances(DBInstanceIdentifier=name)
return {'exists': bool(rds)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def option_group_exists(name, tags=None, region=None, key=None, keyid=None,
profile=None):
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
rds = conn.describe_option_groups(OptionGroupName=name)
return {'exists': bool(rds)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def parameter_group_exists(name, tags=None, region=None, key=None, keyid=None,
profile=None):
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
rds = conn.describe_db_parameter_groups(DBParameterGroupName=name)
return {'exists': bool(rds), 'error': None}
except ClientError as e:
resp = {}
if e.response['Error']['Code'] == 'DBParameterGroupNotFound':
resp['exists'] = False
resp['error'] = salt.utils.boto3.get_error(e)
return resp
def subnet_group_exists(name, tags=None, region=None, key=None, keyid=None,
profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'exists': bool(conn)}
rds = conn.describe_db_subnet_groups(DBSubnetGroupName=name)
return {'exists': bool(rds)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def create(name, allocated_storage, db_instance_class, engine,
master_username, master_user_password, db_name=None,
db_security_groups=None, vpc_security_group_ids=None,
availability_zone=None, db_subnet_group_name=None,
preferred_maintenance_window=None, db_parameter_group_name=None,
backup_retention_period=None, preferred_backup_window=None,
port=None, multi_az=None, engine_version=None,
auto_minor_version_upgrade=None, license_model=None, iops=None,
option_group_name=None, character_set_name=None,
publicly_accessible=None, wait_status=None, tags=None,
db_cluster_identifier=None, storage_type=None,
tde_credential_arn=None, tde_credential_password=None,
storage_encrypted=None, kms_key_id=None, domain=None,
copy_tags_to_snapshot=None, monitoring_interval=None,
monitoring_role_arn=None, domain_iam_role_name=None, region=None,
promotion_tier=None, key=None, keyid=None, profile=None):
if not allocated_storage:
raise SaltInvocationError('allocated_storage is required')
if not db_instance_class:
raise SaltInvocationError('db_instance_class is required')
if not engine:
raise SaltInvocationError('engine is required')
if not master_username:
raise SaltInvocationError('master_username is required')
if not master_user_password:
raise SaltInvocationError('master_user_password is required')
if availability_zone and multi_az:
raise SaltInvocationError('availability_zone and multi_az are mutually'
' exclusive arguments.')
if wait_status:
wait_statuses = ['available', 'modifying', 'backing-up']
if wait_status not in wait_statuses:
raise SaltInvocationError('wait_status can be one of: '
'{0}'.format(wait_statuses))
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
kwargs = {}
boto_params = set(boto3_param_map.keys())
keys = set(locals().keys())
for key in keys.intersection(boto_params):
val = locals()[key]
if val is not None:
mapped = boto3_param_map[key]
kwargs[mapped[0]] = mapped[1](val)
taglist = _tag_doc(tags)
# https://github.com/boto/boto3/issues/400
kwargs = dict((k, v) for k, v in six.iteritems(kwargs) if v is not None)
rds = conn.create_db_instance(**kwargs)
if not rds:
return {'created': False}
if not wait_status:
return {'created': True, 'message':
'Created RDS instance {0}.'.format(name)}
while True:
log.info('Waiting 10 secs...')
sleep(10)
_describe = describe(name=name, tags=tags, region=region, key=key,
keyid=keyid, profile=profile)['rds']
if not _describe:
return {'created': True}
if _describe['DBInstanceStatus'] == wait_status:
return {'created': True, 'message':
'Created RDS {0} with current status '
'{1}'.format(name, _describe['DBInstanceStatus'])}
log.info('Current status: {0}'.format(_describe['DBInstanceStatus']))
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def create_read_replica(name, source_name, db_instance_class=None,
availability_zone=None, port=None,
auto_minor_version_upgrade=None, iops=None,
option_group_name=None, publicly_accessible=None,
tags=None, db_subnet_group_name=None,
storage_type=None, copy_tags_to_snapshot=None,
monitoring_interval=None, monitoring_role_arn=None,
region=None, key=None, keyid=None, profile=None):
if not backup_retention_period:
raise SaltInvocationError('backup_retention_period is required')
res = __salt__['boto_rds.exists'](source_name, tags, region, key, keyid, profile)
if not res.get('exists'):
return {'exists': bool(res), 'message':
'RDS instance source {0} does not exists.'.format(source_name)}
res = __salt__['boto_rds.exists'](name, tags, region, key, keyid, profile)
if res.get('exists'):
return {'exists': bool(res), 'message':
'RDS replica instance {0} already exists.'.format(name)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
kwargs = {}
for key in ('OptionGroupName', 'MonitoringRoleArn'):
if locals()[key] is not None:
kwargs[key] = str(locals()[key])
for key in ('MonitoringInterval', 'Iops', 'Port'):
if locals()[key] is not None:
kwargs[key] = int(locals()[key])
for key in ('CopyTagsToSnapshot', 'AutoMinorVersionUpgrade'):
if locals()[key] is not None:
kwargs[key] = bool(locals()[key])
taglist = _tag_doc(tags)
rds_replica = conn.create_db_instance_read_replica(DBInstanceIdentifier=name,
SourceDBInstanceIdentifier=source_name,
DBInstanceClass=db_instance_class,
AvailabilityZone=availability_zone,
PubliclyAccessible=publicly_accessible,
Tags=taglist, DBSubnetGroupName=db_subnet_group_name,
StorageType=storage_type,
**kwargs)
return {'exists': bool(rds_replica)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def create_option_group(name, engine_name, major_engine_version,
option_group_description, tags=None, region=None,
key=None, keyid=None, profile=None):
res = __salt__['boto_rds.option_group_exists'](name, tags, region, key, keyid,
profile)
if res.get('exists'):
return {'exists': bool(res)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
taglist = _tag_doc(tags)
rds = conn.create_option_group(OptionGroupName=name,
EngineName=engine_name,
MajorEngineVersion=major_engine_version,
OptionGroupDescription=option_group_description,
Tags=taglist)
return {'exists': bool(rds)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def create_parameter_group(name, db_parameter_group_family, description,
tags=None, region=None, key=None, keyid=None,
profile=None):
res = __salt__['boto_rds.parameter_group_exists'](name, tags, region, key,
keyid, profile)
if res.get('exists'):
return {'exists': bool(res)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
taglist = _tag_doc(tags)
rds = conn.create_db_parameter_group(DBParameterGroupName=name,
DBParameterGroupFamily=db_parameter_group_family,
Description=description,
Tags=taglist)
if not rds:
return {'created': False, 'message':
'Failed to create RDS parameter group {0}'.format(name)}
return {'exists': bool(rds), 'message':
'Created RDS parameter group {0}'.format(name)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def create_subnet_group(name, description, subnet_ids, tags=None,
region=None, key=None, keyid=None, profile=None):
res = __salt__['boto_rds.subnet_group_exists'](name, tags, region, key,
keyid, profile)
if res.get('exists'):
return {'exists': bool(res)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
taglist = _tag_doc(tags)
rds = conn.create_db_subnet_group(DBSubnetGroupName=name,
DBSubnetGroupDescription=description,
SubnetIds=subnet_ids, Tags=taglist)
return {'created': bool(rds)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def update_parameter_group(name, parameters, apply_method="pending-reboot",
tags=None, region=None, key=None, keyid=None,
profile=None):
res = __salt__['boto_rds.parameter_group_exists'](name, tags, region, key,
keyid, profile)
if not res.get('exists'):
return {'exists': bool(res), 'message':
'RDS parameter group {0} does not exist.'.format(name)}
param_list = []
for key, value in six.iteritems(parameters):
item = (key, value, apply_method)
param_list.append(item)
if not len(param_list):
return {'results': False}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
res = conn.modify_db_parameter_group(DBParameterGroupName=name,
Parameters=param_list)
return {'results': bool(res)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def describe(name, tags=None, region=None, key=None, keyid=None,
profile=None):
res = __salt__['boto_rds.exists'](name, tags, region, key, keyid,
profile)
if not res.get('exists'):
return {'exists': bool(res), 'message':
'RDS instance {0} does not exist.'.format(name)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
rds = conn.describe_db_instances(DBInstanceIdentifier=name)
rds = [
i for i in rds.get('DBInstances', [])
if i.get('DBInstanceIdentifier') == name
].pop(0)
if rds:
keys = ('DBInstanceIdentifier', 'DBInstanceClass', 'Engine',
'DBInstanceStatus', 'DBName', 'AllocatedStorage',
'PreferredBackupWindow', 'BackupRetentionPeriod',
'AvailabilityZone', 'PreferredMaintenanceWindow',
'LatestRestorableTime', 'EngineVersion',
'AutoMinorVersionUpgrade', 'LicenseModel',
'Iops', 'CharacterSetName', 'PubliclyAccessible',
'StorageType', 'TdeCredentialArn', 'DBInstancePort',
'DBClusterIdentifier', 'StorageEncrypted', 'KmsKeyId',
'DbiResourceId', 'CACertificateIdentifier',
'CopyTagsToSnapshot', 'MonitoringInterval',
'MonitoringRoleArn', 'PromotionTier',
'DomainMemberships')
return {'rds': dict([(k, rds.get('DBInstances', [{}])[0].get(k)) for k in keys])}
else:
return {'rds': None}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
except IndexError:
return {'rds': None}
def get_endpoint(name, tags=None, region=None, key=None, keyid=None,
profile=None):
endpoint = 'None'
res = __salt__['boto_rds.exists'](name, tags, region, key, keyid,
profile)
if not res:
return {'exists': bool(res), 'message':
'RDS instance {0} does not exist.'.format(name)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
rds = conn.describe_db_instances(DBInstanceIdentifier=name)
if rds:
inst = rds['DBInstances'][0]['Endpoint']
endpoint = '{0}:{1}'.format(inst.get('Address'), inst.get('Port'))
return endpoint
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def delete(name, skip_final_snapshot=None, final_db_snapshot_identifier=None,
region=None, key=None, keyid=None, profile=None,
wait_for_deletion=True, timeout=180):
if timeout == 180 and not skip_final_snapshot:
timeout = 420
if not skip_final_snapshot and not final_db_snapshot_identifier:
raise SaltInvocationError('At least one of the following must'
' be specified: skip_final_snapshot'
' final_db_snapshot_identifier')
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'deleted': bool(conn)}
kwargs = {}
if locals()['skip_final_snapshot'] is not None:
kwargs['SkipFinalSnapshot'] = bool(locals()['skip_final_snapshot'])
if locals()['final_db_snapshot_identifier'] is not None:
kwargs['FinalDBSnapshotIdentifier'] = str(locals()['final_db_snapshot_identifier'])
res = conn.delete_db_instance(DBInstanceIdentifier=name, **kwargs)
if not wait_for_deletion:
return {'deleted': bool(res), 'message':
'Deleted RDS instance {0}.'.format(name)}
start_time = time()
while True:
if not __salt__['boto_rds.exists'](name=name, region=region,
key=key, keyid=keyid,
profile=profile):
return {'deleted': bool(res), 'message':
'Deleted RDS instance {0} completely.'.format(name)}
if time() - start_time > timeout:
raise SaltInvocationError('RDS instance {0} has not been '
'deleted completely after {1} '
'seconds'.format(name, timeout))
sleep(10)
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def delete_option_group(name, region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'deleted': bool(conn)}
res = conn.delete_option_group(OptionGroupName=name)
if not res:
return {'deleted': bool(res), 'message':
'Failed to delete RDS option group {0}.'.format(name)}
return {'deleted': bool(res), 'message':
'Deleted RDS option group {0}.'.format(name)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def delete_parameter_group(name, region=None, key=None, keyid=None,
profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
r = conn.delete_db_parameter_group(DBParameterGroupName=name)
return {'deleted': bool(r), 'message':
'Deleted RDS parameter group {0}.'.format(name)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def delete_subnet_group(name, region=None, key=None, keyid=None,
profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
r = conn.delete_db_subnet_group(DBSubnetGroupName=name)
return {'deleted': bool(r), 'message':
'Deleted RDS subnet group {0}.'.format(name)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def describe_parameter_group(name, Filters=None, MaxRecords=None, Marker=None,
region=None, key=None, keyid=None, profile=None):
res = __salt__['boto_rds.parameter_group_exists'](name, tags=None,
region=region, key=key,
keyid=keyid,
profile=profile)
if not res.get('exists'):
return {'exists': bool(res)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'results': bool(conn)}
kwargs = {}
for key in ('Marker', 'Filters'):
if locals()[key] is not None:
kwargs[key] = str(locals()[key])
if locals()['MaxRecords'] is not None:
kwargs['MaxRecords'] = int(locals()['MaxRecords'])
info = conn.describe_db_parameter_groups(DBParameterGroupName=name,
**kwargs)
if not info:
return {'results': bool(info), 'message':
'Failed to get RDS description for group {0}.'.format(name)}
return {'results': bool(info), 'message':
'Got RDS descrition for group {0}.'.format(name)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def describe_parameters(name, Source=None, MaxRecords=None, Marker=None,
region=None, key=None, keyid=None, profile=None):
res = __salt__['boto_rds.parameter_group_exists'](name, tags=None,
region=region, key=key,
keyid=keyid,
profile=profile)
if not res.get('exists'):
return {'result': False,
'message': 'Parameter group {0} does not exist'.format(name)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'result': False,
'message': 'Could not establish a connection to RDS'}
kwargs = {}
for key in ('Marker', 'Source'):
if locals()[key] is not None:
kwargs[key] = str(locals()[key])
if locals()['MaxRecords'] is not None:
kwargs['MaxRecords'] = int(locals()['MaxRecords'])
r = conn.describe_db_parameters(DBParameterGroupName=name, **kwargs)
if not r:
return {'result': False,
'message': 'Failed to get RDS parameters for group {0}.'
.format(name)}
results = r['Parameters']
keys = ['ParameterName', 'ParameterValue', 'Description',
'Source', 'ApplyType', 'DataType', 'AllowedValues',
'IsModifieable', 'MinimumEngineVersion', 'ApplyMethod']
parameters = odict.OrderedDict()
ret = {'result': True}
for result in results:
data = odict.OrderedDict()
for k in keys:
data[k] = result.get(k)
parameters[result.get('ParameterName')] = data
ret['parameters'] = parameters
return ret
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def modify_db_instance(name,
allocated_storage=None,
allow_major_version_upgrade=None,
apply_immediately=None,
auto_minor_version_upgrade=None,
backup_retention_period=None,
ca_certificate_identifier=None,
character_set_name=None,
copy_tags_to_snapshot=None,
db_cluster_identifier=None,
db_instance_class=None,
db_name=None,
db_parameter_group_name=None,
db_port_number=None,
db_security_groups=None,
db_subnet_group_name=None,
domain=None,
domain_iam_role_name=None,
engine_version=None,
iops=None,
kms_key_id=None,
license_model=None,
master_user_password=None,
monitoring_interval=None,
monitoring_role_arn=None,
multi_az=None,
new_db_instance_identifier=None,
option_group_name=None,
preferred_backup_window=None,
preferred_maintenance_window=None,
promotion_tier=None,
publicly_accessible=None,
storage_encrypted=None,
storage_type=None,
tde_credential_arn=None,
tde_credential_password=None,
vpc_security_group_ids=None,
region=None, key=None, keyid=None, profile=None):
res = __salt__['boto_rds.exists'](name, tags=None, region=region, key=key, keyid=keyid, profile=profile)
if not res.get('exists'):
return {'modified': False, 'message':
'RDS db instance {0} does not exist.'.format(name)}
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if not conn:
return {'modified': False}
kwargs = {}
excluded = set(('name',))
boto_params = set(boto3_param_map.keys())
keys = set(locals().keys())
for key in keys.intersection(boto_params).difference(excluded):
val = locals()[key]
if val is not None:
mapped = boto3_param_map[key]
kwargs[mapped[0]] = mapped[1](val)
info = conn.modify_db_instance(DBInstanceIdentifier=name, **kwargs)
if not info:
return {'modified': bool(info), 'message':
'Failed to modify RDS db instance {0}.'.format(name)}
return {'modified': bool(info), 'message':
'Modified RDS db instance {0}.'.format(name),
'results': dict(info)}
except ClientError as e:
return {'error': salt.utils.boto3.get_error(e)}
def _tag_doc(tags):
taglist = []
if tags is not None:
for k, v in six.iteritems(tags):
if str(k).startswith('__'):
continue
taglist.append({'Key': str(k), 'Value': str(v)})
return taglist
| true | true |
f736f8aeb2c6114d285750703b0d296db06fb264 | 9,225 | py | Python | pysiaf/iando/write.py | goudfroo/pysiaf | ca8350ce814950344789a9674079b8d0168ac05e | [
"BSD-3-Clause"
] | null | null | null | pysiaf/iando/write.py | goudfroo/pysiaf | ca8350ce814950344789a9674079b8d0168ac05e | [
"BSD-3-Clause"
] | null | null | null | pysiaf/iando/write.py | goudfroo/pysiaf | ca8350ce814950344789a9674079b8d0168ac05e | [
"BSD-3-Clause"
] | 1 | 2020-11-04T16:48:44.000Z | 2020-11-04T16:48:44.000Z | """Functions to write Science Instrument Aperture Files (SIAF).
SIAF content in an aperture_collection object can be written to an xml file that can be ingested in
the PRD. Format and order of the xml fields are defined in SIAF reference files.
Writing to Microsoft Excel .xlsx format is supported.
Writing to .csv and other formats supported by astropy.table.Table.write is enabled.
Authors
-------
Johannes Sahlmann
"""
import os
import numpy as np
import lxml.etree as ET
from astropy.time import Time
from astropy.table import Table, Column
from openpyxl import Workbook
from openpyxl.styles import Font, Color
from openpyxl.styles import Alignment
from ..version import __version__
from ..constants import _JWST_TEMPORARY_ROOT
from ..aperture import PRD_REQUIRED_ATTRIBUTES_ORDERED, SIAF_XML_FIELD_FORMAT, FLOAT_ATTRIBUTES
# dictionary used to set field precision in SIAF.XML
xml_decimal_precision = {}
field_names = list(SIAF_XML_FIELD_FORMAT['field_name'])
for attr in PRD_REQUIRED_ATTRIBUTES_ORDERED:
index = field_names.index(attr)
xml_decimal_precision[attr] = SIAF_XML_FIELD_FORMAT['pyformat'][index]
def write_jwst_siaf(aperture_collection, filename=None, basepath=None, label=None,
file_format='xml', verbose=True):
"""Write the content of aperture_collection into xml and xlsx files that are PRD-compliant.
Parameters
----------
aperture_collection : ApertureCollection
dictionary of apertures
filename
basepath
label
file_format : str list
one of ['xml', 'xlsx', 'csv', and formats supported by astropy Table.write]
verbose
Returns
-------
TODO
----
test support of astropy Table.write formats (FITS not working)
"""
if type(file_format) == str:
file_format = [file_format]
aperture_names = np.array([key for key in aperture_collection.apertures.keys()])
instrument = aperture_collection.apertures[aperture_names[0]].InstrName
if instrument == 'NIRCAM':
instrument = 'NIRCam'
elif instrument == 'NIRSPEC':
instrument = 'NIRSpec'
if (filename is not None) and (len(list(file_format)) != 1):
raise RuntimeError('When filename is specified, only one output format is supported')
if label is not None:
name_seed = instrument + '_SIAF_{}'.format(label)
else:
name_seed = instrument + '_SIAF'
filenames = []
# hostname = os.uname()[1]
username = os.getlogin()
timestamp = Time.now()
for file_format in list(file_format):
if filename is None:
if basepath is None:
basepath = _JWST_TEMPORARY_ROOT
if not os.path.isdir(basepath):
raise RuntimeError("Could not write SIAF data "
"to {}. Directory does not exist.".format(basepath))
if file_format == 'xml':
out_filename = os.path.join(basepath, name_seed+'.xml')
elif file_format == 'xlsx':
out_filename = os.path.join(basepath, name_seed+'.xlsx')
# elif file_format == 'csv':
# out_filename = os.path.join(basepath, name_seed+'.csv')
else:
out_filename = os.path.join(basepath, name_seed+'.{}'.format(file_format))
else:
out_filename = filename
if file_format == 'xml':
root = ET.Element('SiafEntries')
# add generation info as comment to SIAFXML
root.append(ET.Comment('Generated {} {}'.format(timestamp.isot, timestamp.scale)))
root.append(ET.Comment('by {}'.format(username)))
# try:
# repo = git.Repo(os.path.abspath(__file__), search_parent_directories=True)
# git_version = git.Git(repo.working_dir).describe()
# root.append(ET.Comment('pysiaf git-version {}'.format(git_version)))
# except git.exc.InvalidGitRepositoryError:
root.append(ET.Comment('pysiaf version {}'.format(__version__)))
for aperture_name in aperture_names:
aperture = aperture_collection.apertures[aperture_name]
siaf_entry = ET.SubElement(root, 'SiafEntry')
for attribute in PRD_REQUIRED_ATTRIBUTES_ORDERED:
attribute_value = getattr(aperture_collection.apertures[aperture_name],
attribute)
if attribute_value is None:
attribute_text = None
# NIRSpec special case
elif (aperture.AperType in ['TRANSFORM']) and \
(attribute in 'XSciRef YSciRef XSciScale YSciScale V2Ref V3Ref'.
split()):
attribute_text = '{:{prec}}'.format(attribute_value,
prec='.15e').strip()
elif attribute in FLOAT_ATTRIBUTES:
attribute_text = '{:{prec}}'.format(
attribute_value, prec=xml_decimal_precision[attribute]).strip()
else:
attribute_text = str(attribute_value)
if (not isinstance(attribute_value, str)) and (attribute_text is not None):
if np.isnan(attribute_value):
attribute_text = None
ET.SubElement(siaf_entry, attribute).text = attribute_text
doc = ET.ElementTree(root)
doc.write(out_filename, pretty_print=True, xml_declaration=False)
if verbose:
print('Wrote Siaf to xml file {}'.format(out_filename))
elif file_format == 'xlsx':
siaf_workbook = Workbook()
ws1 = siaf_workbook.active
ws1.title = 'SIAF'
header_row_description = 1
header_row_attributes = 2
# write descriptive header
for j, attribute_name in enumerate(PRD_REQUIRED_ATTRIBUTES_ORDERED):
col = j + 1
if attribute_name == 'InstrName':
text = 'Aperture Basic Info'
elif attribute_name == 'XDetSize':
text = 'Detector Frame'
elif attribute_name == 'XSciSize':
text = 'Science Frame'
elif attribute_name == 'V2Ref':
text = 'V Frame'
elif attribute_name == 'V2IdlYAngle':
text = 'Frame Relationships'
elif attribute_name == 'XIdlVert1':
text = 'Vertices'
elif attribute_name == 'Sci2IdlDeg':
text = 'Science to Ideal Polynomial'
else:
text = ''
cell = ws1.cell(column=col, row=header_row_description, value="{}".format(text))
cell.font = Font(name='Courier', b=True, i=True, family=3.0, sz=14.0)
# cell.font.color = Color(rgb='FF0000FF', type='rgb')
# write aperture attributes
for j, attribute_name in enumerate(PRD_REQUIRED_ATTRIBUTES_ORDERED):
col = j + 1
cell = ws1.cell(column=col, row=header_row_attributes, value="{}".
format(attribute_name))
cell.font = Font(name='Calibri', b=True, family=2.0, sz=15.0)
cell.alignment = Alignment(horizontal='center')
# write aperture values
for i, aper_name in enumerate(aperture_names):
aperture = aperture_collection.apertures[aper_name]
# aperture = siaf[aper_name]
row = i + 1 + header_row_attributes
for j, attribute_name in enumerate(PRD_REQUIRED_ATTRIBUTES_ORDERED):
col = j + 1
cell = ws1.cell(column=col, row=row, value="{}".
format(getattr(aperture, attribute_name)))
if attribute_name not in 'InstrName AperName DDCName AperType AperShape'.\
split():
cell.alignment = Alignment(horizontal='right')
# adjust column width
for column_cells in ws1.columns:
length = max(len(cell.value or '') for cell in column_cells[1:])
ws1.column_dimensions[column_cells[0].column].width = length * 1.5
siaf_workbook.save(filename=out_filename)
if verbose:
print('Wrote Siaf to xlsx file {}'.format(out_filename))
else:
table = Table()
for attribute_name in PRD_REQUIRED_ATTRIBUTES_ORDERED:
data = [getattr(aperture_collection.apertures[aperture_name], attribute_name) for
aperture_name in aperture_names]
table.add_column(Column(data=data, name=attribute_name))
table.write(out_filename, format=file_format)
if verbose:
print('Wrote Siaf to {} file {}'.format(file_format, out_filename))
filenames.append(out_filename)
return filenames
| 41 | 99 | 0.583631 |
import os
import numpy as np
import lxml.etree as ET
from astropy.time import Time
from astropy.table import Table, Column
from openpyxl import Workbook
from openpyxl.styles import Font, Color
from openpyxl.styles import Alignment
from ..version import __version__
from ..constants import _JWST_TEMPORARY_ROOT
from ..aperture import PRD_REQUIRED_ATTRIBUTES_ORDERED, SIAF_XML_FIELD_FORMAT, FLOAT_ATTRIBUTES
xml_decimal_precision = {}
field_names = list(SIAF_XML_FIELD_FORMAT['field_name'])
for attr in PRD_REQUIRED_ATTRIBUTES_ORDERED:
index = field_names.index(attr)
xml_decimal_precision[attr] = SIAF_XML_FIELD_FORMAT['pyformat'][index]
def write_jwst_siaf(aperture_collection, filename=None, basepath=None, label=None,
file_format='xml', verbose=True):
if type(file_format) == str:
file_format = [file_format]
aperture_names = np.array([key for key in aperture_collection.apertures.keys()])
instrument = aperture_collection.apertures[aperture_names[0]].InstrName
if instrument == 'NIRCAM':
instrument = 'NIRCam'
elif instrument == 'NIRSPEC':
instrument = 'NIRSpec'
if (filename is not None) and (len(list(file_format)) != 1):
raise RuntimeError('When filename is specified, only one output format is supported')
if label is not None:
name_seed = instrument + '_SIAF_{}'.format(label)
else:
name_seed = instrument + '_SIAF'
filenames = []
username = os.getlogin()
timestamp = Time.now()
for file_format in list(file_format):
if filename is None:
if basepath is None:
basepath = _JWST_TEMPORARY_ROOT
if not os.path.isdir(basepath):
raise RuntimeError("Could not write SIAF data "
"to {}. Directory does not exist.".format(basepath))
if file_format == 'xml':
out_filename = os.path.join(basepath, name_seed+'.xml')
elif file_format == 'xlsx':
out_filename = os.path.join(basepath, name_seed+'.xlsx')
else:
out_filename = os.path.join(basepath, name_seed+'.{}'.format(file_format))
else:
out_filename = filename
if file_format == 'xml':
root = ET.Element('SiafEntries')
root.append(ET.Comment('Generated {} {}'.format(timestamp.isot, timestamp.scale)))
root.append(ET.Comment('by {}'.format(username)))
root.append(ET.Comment('pysiaf version {}'.format(__version__)))
for aperture_name in aperture_names:
aperture = aperture_collection.apertures[aperture_name]
siaf_entry = ET.SubElement(root, 'SiafEntry')
for attribute in PRD_REQUIRED_ATTRIBUTES_ORDERED:
attribute_value = getattr(aperture_collection.apertures[aperture_name],
attribute)
if attribute_value is None:
attribute_text = None
elif (aperture.AperType in ['TRANSFORM']) and \
(attribute in 'XSciRef YSciRef XSciScale YSciScale V2Ref V3Ref'.
split()):
attribute_text = '{:{prec}}'.format(attribute_value,
prec='.15e').strip()
elif attribute in FLOAT_ATTRIBUTES:
attribute_text = '{:{prec}}'.format(
attribute_value, prec=xml_decimal_precision[attribute]).strip()
else:
attribute_text = str(attribute_value)
if (not isinstance(attribute_value, str)) and (attribute_text is not None):
if np.isnan(attribute_value):
attribute_text = None
ET.SubElement(siaf_entry, attribute).text = attribute_text
doc = ET.ElementTree(root)
doc.write(out_filename, pretty_print=True, xml_declaration=False)
if verbose:
print('Wrote Siaf to xml file {}'.format(out_filename))
elif file_format == 'xlsx':
siaf_workbook = Workbook()
ws1 = siaf_workbook.active
ws1.title = 'SIAF'
header_row_description = 1
header_row_attributes = 2
for j, attribute_name in enumerate(PRD_REQUIRED_ATTRIBUTES_ORDERED):
col = j + 1
if attribute_name == 'InstrName':
text = 'Aperture Basic Info'
elif attribute_name == 'XDetSize':
text = 'Detector Frame'
elif attribute_name == 'XSciSize':
text = 'Science Frame'
elif attribute_name == 'V2Ref':
text = 'V Frame'
elif attribute_name == 'V2IdlYAngle':
text = 'Frame Relationships'
elif attribute_name == 'XIdlVert1':
text = 'Vertices'
elif attribute_name == 'Sci2IdlDeg':
text = 'Science to Ideal Polynomial'
else:
text = ''
cell = ws1.cell(column=col, row=header_row_description, value="{}".format(text))
cell.font = Font(name='Courier', b=True, i=True, family=3.0, sz=14.0)
for j, attribute_name in enumerate(PRD_REQUIRED_ATTRIBUTES_ORDERED):
col = j + 1
cell = ws1.cell(column=col, row=header_row_attributes, value="{}".
format(attribute_name))
cell.font = Font(name='Calibri', b=True, family=2.0, sz=15.0)
cell.alignment = Alignment(horizontal='center')
for i, aper_name in enumerate(aperture_names):
aperture = aperture_collection.apertures[aper_name]
row = i + 1 + header_row_attributes
for j, attribute_name in enumerate(PRD_REQUIRED_ATTRIBUTES_ORDERED):
col = j + 1
cell = ws1.cell(column=col, row=row, value="{}".
format(getattr(aperture, attribute_name)))
if attribute_name not in 'InstrName AperName DDCName AperType AperShape'.\
split():
cell.alignment = Alignment(horizontal='right')
for column_cells in ws1.columns:
length = max(len(cell.value or '') for cell in column_cells[1:])
ws1.column_dimensions[column_cells[0].column].width = length * 1.5
siaf_workbook.save(filename=out_filename)
if verbose:
print('Wrote Siaf to xlsx file {}'.format(out_filename))
else:
table = Table()
for attribute_name in PRD_REQUIRED_ATTRIBUTES_ORDERED:
data = [getattr(aperture_collection.apertures[aperture_name], attribute_name) for
aperture_name in aperture_names]
table.add_column(Column(data=data, name=attribute_name))
table.write(out_filename, format=file_format)
if verbose:
print('Wrote Siaf to {} file {}'.format(file_format, out_filename))
filenames.append(out_filename)
return filenames
| true | true |
f736fb7dc6e2885672e4606d70e8c596b33b7932 | 10,770 | py | Python | featuretools/primitives/standard/aggregation_primitives.py | vnarayan13/featuretools | a86b6d8df246a13558d19915b15230c418ad27ab | [
"BSD-3-Clause"
] | null | null | null | featuretools/primitives/standard/aggregation_primitives.py | vnarayan13/featuretools | a86b6d8df246a13558d19915b15230c418ad27ab | [
"BSD-3-Clause"
] | null | null | null | featuretools/primitives/standard/aggregation_primitives.py | vnarayan13/featuretools | a86b6d8df246a13558d19915b15230c418ad27ab | [
"BSD-3-Clause"
] | null | null | null | from __future__ import division
from datetime import datetime, timedelta
import numpy as np
import pandas as pd
from ..base.aggregation_primitive_base import (
AggregationPrimitive,
make_agg_primitive
)
from featuretools.variable_types import (
Boolean,
DatetimeTimeIndex,
Discrete,
Index,
Numeric,
Variable
)
class Count(AggregationPrimitive):
"""Counts the number of non null values."""
name = "count"
input_types = [[Index]]
return_type = Numeric
stack_on_self = False
default_value = 0
def get_function(self):
return 'count'
def generate_name(self, base_feature_names, child_entity_id,
parent_entity_id, where_str, use_prev_str):
return u"COUNT(%s%s%s)" % (child_entity_id,
where_str, use_prev_str)
class Sum(AggregationPrimitive):
"""Sums elements of a numeric or boolean feature."""
name = "sum"
input_types = [Numeric]
return_type = Numeric
stack_on_self = False
stack_on_exclude = [Count]
default_value = 0
def get_function(self):
return np.sum
class Mean(AggregationPrimitive):
"""Computes the average value of a numeric feature.
Defaults to not ignoring NaNs when computing mean.
"""
name = "mean"
input_types = [Numeric]
return_type = Numeric
def __init__(self, skipna=True):
self.skipna = skipna
def get_function(self):
if self.skipna:
# np.mean of series is functionally nanmean
return np.mean
def mean(series):
return np.mean(series.values)
return mean
def generate_name(self, base_feature_names, child_entity_id,
parent_entity_id, where_str, use_prev_str):
skipna = ""
if not self.skipna:
skipna = ", skipna=False"
base_features_str = ", ".join(base_feature_names)
return u"%s(%s.%s%s%s%s)" % (self.name.upper(),
child_entity_id,
base_features_str,
where_str,
use_prev_str,
skipna)
class Mode(AggregationPrimitive):
"""Finds the most common element in a categorical feature."""
name = "mode"
input_types = [Discrete]
return_type = None
def get_function(self):
def pd_mode(s):
return s.mode().get(0, np.nan)
return pd_mode
Min = make_agg_primitive(
np.min,
[Numeric],
Numeric,
name="Min",
stack_on_self=False,
description="Finds the minimum non-null value of a numeric feature.")
class Max(AggregationPrimitive):
"""Finds the maximum non-null value of a numeric feature."""
name = "max"
input_types = [Numeric]
return_type = Numeric
stack_on_self = False
def get_function(self):
return np.max
class NUnique(AggregationPrimitive):
"""Returns the number of unique categorical variables."""
name = "num_unique"
input_types = [Discrete]
return_type = Numeric
stack_on_self = False
def get_function(self):
# note: returning pd.Series.nunique errors for python2,
# so using this branching code path while we support python2
from sys import version_info
if version_info.major < 3:
def nunique(x):
return pd.Series(x).nunique()
return nunique
else:
return pd.Series.nunique
class NumTrue(AggregationPrimitive):
"""Finds the number of 'True' values in a boolean."""
name = "num_true"
input_types = [Boolean]
return_type = Numeric
default_value = 0
stack_on = []
stack_on_exclude = []
def get_function(self):
return np.sum
class PercentTrue(AggregationPrimitive):
"""Finds the percent of 'True' values in a boolean feature."""
name = "percent_true"
input_types = [Boolean]
return_type = Numeric
stack_on = []
stack_on_exclude = []
default_value = 0
def get_function(self):
def percent_true(s):
return s.fillna(0).mean()
return percent_true
class NMostCommon(AggregationPrimitive):
"""Finds the N most common elements in a categorical feature."""
name = "n_most_common"
input_types = [Discrete]
return_type = Discrete
def __init__(self, n=3):
self.number_output_features = n
def get_function(self):
def n_most_common(x, n=self.number_output_features):
array = np.array(x.value_counts()[:n].index)
if len(array) < n:
filler = np.full(n - len(array), np.nan)
array = np.append(array, filler)
return array
return n_most_common
class AvgTimeBetween(AggregationPrimitive):
"""Computes the average time between consecutive events.
Note: equivalent to Mean(Diff(time_index)), but more performant
"""
# Potentially unnecessary if we add an trans_feat that
# calculates the difference between events. DFS
# should then calculate the average of that trans_feat
# which amounts to AvgTimeBetween
name = "avg_time_between"
input_types = [DatetimeTimeIndex]
return_type = Numeric
def get_function(self):
def pd_avg_time_between(x):
"""Assumes time scales are closer to order
of seconds than to nanoseconds
if times are much closer to nanoseconds
we could get some floating point errors
this can be fixed with another function
that calculates the mean before converting
to seconds
"""
x = x.dropna()
if x.shape[0] < 2:
return np.nan
if isinstance(x.iloc[0], (pd.Timestamp, datetime)):
x = x.astype('int64')
# use len(x)-1 because we care about difference
# between values, len(x)-1 = len(diff(x))
avg = (x.max() - x.min()) / (len(x) - 1)
avg = avg * 1e-9
# long form:
# diff_in_ns = x.diff().iloc[1:].astype('int64')
# diff_in_seconds = diff_in_ns * 1e-9
# avg = diff_in_seconds.mean()
return avg
return pd_avg_time_between
class Median(AggregationPrimitive):
"""Finds the median value of any feature with well-ordered values."""
name = "median"
input_types = [Numeric]
return_type = Numeric
def get_function(self):
return lambda x: x.median()
class Skew(AggregationPrimitive):
"""Computes the skewness of a data set.
For normally distributed data, the skewness should be about 0. A skewness
value > 0 means that there is more weight in the left tail of the
distribution.
"""
name = "skew"
input_types = [Numeric]
return_type = Numeric
stack_on = []
stack_on_self = False
def get_function(self):
return 'skew'
class Std(AggregationPrimitive):
"""Finds the standard deviation of a numeric feature ignoring null values.
"""
name = "std"
input_types = [Numeric]
return_type = Numeric
stack_on_self = False
def get_function(self):
return np.std
class Last(AggregationPrimitive):
"""Returns the last value."""
name = "last"
input_types = [Variable]
return_type = None
stack_on_self = False
def get_function(self):
def pd_last(x):
return x.iloc[-1]
return pd_last
class Any(AggregationPrimitive):
"""Test if any value is 'True'."""
name = "any"
input_types = [Boolean]
return_type = Boolean
stack_on_self = False
def get_function(self):
return np.any
class All(AggregationPrimitive):
"""Test if all values are 'True'."""
name = "all"
input_types = [Boolean]
return_type = Boolean
stack_on_self = False
def get_function(self):
return np.all
class TimeSinceLast(AggregationPrimitive):
"""Time since last related instance."""
name = "time_since_last"
input_types = [DatetimeTimeIndex]
return_type = Numeric
uses_calc_time = True
def get_function(self):
def time_since_last(values, time=None):
time_since = time - values.iloc[-1]
return time_since.total_seconds()
return time_since_last
class TimeSinceFirst(AggregationPrimitive):
"""Time since first related instance."""
name = "time_since_first"
input_types = [DatetimeTimeIndex]
return_type = Numeric
uses_calc_time = True
def get_function(self):
def time_since_first(values, time=None):
time_since = time - values.iloc[0]
return time_since.total_seconds()
return time_since_first
class Trend(AggregationPrimitive):
"""Calculates the slope of the linear trend of variable overtime."""
name = "trend"
input_types = [Numeric, DatetimeTimeIndex]
return_type = Numeric
def get_function(self):
def pd_trend(y, x):
df = pd.DataFrame({"x": x, "y": y}).dropna()
if df.shape[0] <= 2:
return np.nan
if isinstance(df['x'].iloc[0], (datetime, pd.Timestamp)):
x = convert_datetime_to_floats(df['x'])
else:
x = df['x'].values
if isinstance(df['y'].iloc[0], (datetime, pd.Timestamp)):
y = convert_datetime_to_floats(df['y'])
elif isinstance(df['y'].iloc[0], (timedelta, pd.Timedelta)):
y = convert_timedelta_to_floats(df['y'])
else:
y = df['y'].values
x = x - x.mean()
y = y - y.mean()
# prevent divide by zero error
if len(np.unique(x)) == 1:
return 0
# consider scipy.stats.linregress for large n cases
coefficients = np.polyfit(x, y, 1)
return coefficients[0]
return pd_trend
def convert_datetime_to_floats(x):
first = int(x.iloc[0].value * 1e-9)
x = pd.to_numeric(x).astype(np.float64).values
dividend = find_dividend_by_unit(first)
x *= (1e-9 / dividend)
return x
def convert_timedelta_to_floats(x):
first = int(x.iloc[0].total_seconds())
dividend = find_dividend_by_unit(first)
x = pd.TimedeltaIndex(x).total_seconds().astype(np.float64) / dividend
return x
def find_dividend_by_unit(time):
"""Finds whether time best corresponds to a value in
days, hours, minutes, or seconds.
"""
for dividend in [86400, 3600, 60]:
div = time / dividend
if round(div) == div:
return dividend
return 1
| 27.128463 | 78 | 0.608821 | from __future__ import division
from datetime import datetime, timedelta
import numpy as np
import pandas as pd
from ..base.aggregation_primitive_base import (
AggregationPrimitive,
make_agg_primitive
)
from featuretools.variable_types import (
Boolean,
DatetimeTimeIndex,
Discrete,
Index,
Numeric,
Variable
)
class Count(AggregationPrimitive):
name = "count"
input_types = [[Index]]
return_type = Numeric
stack_on_self = False
default_value = 0
def get_function(self):
return 'count'
def generate_name(self, base_feature_names, child_entity_id,
parent_entity_id, where_str, use_prev_str):
return u"COUNT(%s%s%s)" % (child_entity_id,
where_str, use_prev_str)
class Sum(AggregationPrimitive):
name = "sum"
input_types = [Numeric]
return_type = Numeric
stack_on_self = False
stack_on_exclude = [Count]
default_value = 0
def get_function(self):
return np.sum
class Mean(AggregationPrimitive):
name = "mean"
input_types = [Numeric]
return_type = Numeric
def __init__(self, skipna=True):
self.skipna = skipna
def get_function(self):
if self.skipna:
return np.mean
def mean(series):
return np.mean(series.values)
return mean
def generate_name(self, base_feature_names, child_entity_id,
parent_entity_id, where_str, use_prev_str):
skipna = ""
if not self.skipna:
skipna = ", skipna=False"
base_features_str = ", ".join(base_feature_names)
return u"%s(%s.%s%s%s%s)" % (self.name.upper(),
child_entity_id,
base_features_str,
where_str,
use_prev_str,
skipna)
class Mode(AggregationPrimitive):
name = "mode"
input_types = [Discrete]
return_type = None
def get_function(self):
def pd_mode(s):
return s.mode().get(0, np.nan)
return pd_mode
Min = make_agg_primitive(
np.min,
[Numeric],
Numeric,
name="Min",
stack_on_self=False,
description="Finds the minimum non-null value of a numeric feature.")
class Max(AggregationPrimitive):
name = "max"
input_types = [Numeric]
return_type = Numeric
stack_on_self = False
def get_function(self):
return np.max
class NUnique(AggregationPrimitive):
name = "num_unique"
input_types = [Discrete]
return_type = Numeric
stack_on_self = False
def get_function(self):
from sys import version_info
if version_info.major < 3:
def nunique(x):
return pd.Series(x).nunique()
return nunique
else:
return pd.Series.nunique
class NumTrue(AggregationPrimitive):
name = "num_true"
input_types = [Boolean]
return_type = Numeric
default_value = 0
stack_on = []
stack_on_exclude = []
def get_function(self):
return np.sum
class PercentTrue(AggregationPrimitive):
name = "percent_true"
input_types = [Boolean]
return_type = Numeric
stack_on = []
stack_on_exclude = []
default_value = 0
def get_function(self):
def percent_true(s):
return s.fillna(0).mean()
return percent_true
class NMostCommon(AggregationPrimitive):
name = "n_most_common"
input_types = [Discrete]
return_type = Discrete
def __init__(self, n=3):
self.number_output_features = n
def get_function(self):
def n_most_common(x, n=self.number_output_features):
array = np.array(x.value_counts()[:n].index)
if len(array) < n:
filler = np.full(n - len(array), np.nan)
array = np.append(array, filler)
return array
return n_most_common
class AvgTimeBetween(AggregationPrimitive):
name = "avg_time_between"
input_types = [DatetimeTimeIndex]
return_type = Numeric
def get_function(self):
def pd_avg_time_between(x):
x = x.dropna()
if x.shape[0] < 2:
return np.nan
if isinstance(x.iloc[0], (pd.Timestamp, datetime)):
x = x.astype('int64')
avg = (x.max() - x.min()) / (len(x) - 1)
avg = avg * 1e-9
return avg
return pd_avg_time_between
class Median(AggregationPrimitive):
name = "median"
input_types = [Numeric]
return_type = Numeric
def get_function(self):
return lambda x: x.median()
class Skew(AggregationPrimitive):
name = "skew"
input_types = [Numeric]
return_type = Numeric
stack_on = []
stack_on_self = False
def get_function(self):
return 'skew'
class Std(AggregationPrimitive):
name = "std"
input_types = [Numeric]
return_type = Numeric
stack_on_self = False
def get_function(self):
return np.std
class Last(AggregationPrimitive):
name = "last"
input_types = [Variable]
return_type = None
stack_on_self = False
def get_function(self):
def pd_last(x):
return x.iloc[-1]
return pd_last
class Any(AggregationPrimitive):
name = "any"
input_types = [Boolean]
return_type = Boolean
stack_on_self = False
def get_function(self):
return np.any
class All(AggregationPrimitive):
name = "all"
input_types = [Boolean]
return_type = Boolean
stack_on_self = False
def get_function(self):
return np.all
class TimeSinceLast(AggregationPrimitive):
name = "time_since_last"
input_types = [DatetimeTimeIndex]
return_type = Numeric
uses_calc_time = True
def get_function(self):
def time_since_last(values, time=None):
time_since = time - values.iloc[-1]
return time_since.total_seconds()
return time_since_last
class TimeSinceFirst(AggregationPrimitive):
name = "time_since_first"
input_types = [DatetimeTimeIndex]
return_type = Numeric
uses_calc_time = True
def get_function(self):
def time_since_first(values, time=None):
time_since = time - values.iloc[0]
return time_since.total_seconds()
return time_since_first
class Trend(AggregationPrimitive):
name = "trend"
input_types = [Numeric, DatetimeTimeIndex]
return_type = Numeric
def get_function(self):
def pd_trend(y, x):
df = pd.DataFrame({"x": x, "y": y}).dropna()
if df.shape[0] <= 2:
return np.nan
if isinstance(df['x'].iloc[0], (datetime, pd.Timestamp)):
x = convert_datetime_to_floats(df['x'])
else:
x = df['x'].values
if isinstance(df['y'].iloc[0], (datetime, pd.Timestamp)):
y = convert_datetime_to_floats(df['y'])
elif isinstance(df['y'].iloc[0], (timedelta, pd.Timedelta)):
y = convert_timedelta_to_floats(df['y'])
else:
y = df['y'].values
x = x - x.mean()
y = y - y.mean()
if len(np.unique(x)) == 1:
return 0
coefficients = np.polyfit(x, y, 1)
return coefficients[0]
return pd_trend
def convert_datetime_to_floats(x):
first = int(x.iloc[0].value * 1e-9)
x = pd.to_numeric(x).astype(np.float64).values
dividend = find_dividend_by_unit(first)
x *= (1e-9 / dividend)
return x
def convert_timedelta_to_floats(x):
first = int(x.iloc[0].total_seconds())
dividend = find_dividend_by_unit(first)
x = pd.TimedeltaIndex(x).total_seconds().astype(np.float64) / dividend
return x
def find_dividend_by_unit(time):
for dividend in [86400, 3600, 60]:
div = time / dividend
if round(div) == div:
return dividend
return 1
| true | true |
f736fbdfc59a36543527bf23822bde24b59993dc | 1,447 | py | Python | functions.py | akhil-eppa/Multi-Source-Plagiarism | bf679bdb8ec7c4b719a54002a97c1032bbc37709 | [
"MIT"
] | null | null | null | functions.py | akhil-eppa/Multi-Source-Plagiarism | bf679bdb8ec7c4b719a54002a97c1032bbc37709 | [
"MIT"
] | null | null | null | functions.py | akhil-eppa/Multi-Source-Plagiarism | bf679bdb8ec7c4b719a54002a97c1032bbc37709 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Mon Sep 7 14:11:59 2020
@author: Akhil
"""
from __future__ import print_function
import re
from pathlib import Path
from pycparser import c_ast, c_generator, parse_file
from pycparser.c_ast import FuncDef
z = []
class FuncDefVisitor(c_ast.NodeVisitor):
def visit_FuncDef(self, node):
z.append(str(node.decl.coord))
def show_func_defs(filename):
ast = parse_file(filename, use_cpp=True, cpp_args=[r"-Ifake_headers", r"-nostdinc"])
generator = c_generator.CGenerator()
for node in ast.ext:
if isinstance(node, FuncDef):
yield generator.visit(node.decl), generator.visit(node.body)
def get_parsed_lines(filename):
lines = 0
for _, body in show_func_defs(filename):
lines += len(list(filter(lambda x: x, body.split("\n")))) + 1
return lines
def get_functions(filename):
preproc_filename = f"{Path(Path(filename).name).stem}_preproc.c"
with open(filename) as file:
x = file.read()
# x = re.sub("#include", "//#include", x)
with open(preproc_filename, "w") as preproc:
preproc.write(x)
decl_to_func = {}
for decl, body in show_func_defs(preproc_filename):
decl_to_func[decl] = body
Path(preproc_filename).unlink()
return decl_to_func
if __name__ == "__main__":
a = get_parsed_lines("test.c")
print(a)
| 25.385965 | 89 | 0.639254 |
from __future__ import print_function
import re
from pathlib import Path
from pycparser import c_ast, c_generator, parse_file
from pycparser.c_ast import FuncDef
z = []
class FuncDefVisitor(c_ast.NodeVisitor):
def visit_FuncDef(self, node):
z.append(str(node.decl.coord))
def show_func_defs(filename):
ast = parse_file(filename, use_cpp=True, cpp_args=[r"-Ifake_headers", r"-nostdinc"])
generator = c_generator.CGenerator()
for node in ast.ext:
if isinstance(node, FuncDef):
yield generator.visit(node.decl), generator.visit(node.body)
def get_parsed_lines(filename):
lines = 0
for _, body in show_func_defs(filename):
lines += len(list(filter(lambda x: x, body.split("\n")))) + 1
return lines
def get_functions(filename):
preproc_filename = f"{Path(Path(filename).name).stem}_preproc.c"
with open(filename) as file:
x = file.read()
with open(preproc_filename, "w") as preproc:
preproc.write(x)
decl_to_func = {}
for decl, body in show_func_defs(preproc_filename):
decl_to_func[decl] = body
Path(preproc_filename).unlink()
return decl_to_func
if __name__ == "__main__":
a = get_parsed_lines("test.c")
print(a)
| true | true |
f736fd3b37aa750829fbe390a72071cbcaf69ba9 | 17,966 | py | Python | airflow/models/dagrun.py | wileeam/airflow | f46be8152a4d89c57db4ca46f5b3339e4876b723 | [
"Apache-2.0"
] | 1 | 2020-11-16T15:43:52.000Z | 2020-11-16T15:43:52.000Z | airflow/models/dagrun.py | devlocalca/airflow | 58c3542ed25061320ce61dbe0adf451a44c738dd | [
"Apache-2.0"
] | null | null | null | airflow/models/dagrun.py | devlocalca/airflow | 58c3542ed25061320ce61dbe0adf451a44c738dd | [
"Apache-2.0"
] | null | null | null | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Optional, cast
from sqlalchemy import (
Boolean, Column, DateTime, Index, Integer, PickleType, String, UniqueConstraint, and_, func, or_,
)
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import synonym
from sqlalchemy.orm.session import Session
from airflow.exceptions import AirflowException
from airflow.models.base import ID_LEN, Base
from airflow.stats import Stats
from airflow.ti_deps.dep_context import SCHEDULEABLE_STATES, DepContext
from airflow.utils import timezone
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.session import provide_session
from airflow.utils.sqlalchemy import UtcDateTime
from airflow.utils.state import State
class DagRun(Base, LoggingMixin):
"""
DagRun describes an instance of a Dag. It can be created
by the scheduler (for regular runs) or by an external trigger
"""
__tablename__ = "dag_run"
ID_PREFIX = 'scheduled__'
ID_FORMAT_PREFIX = ID_PREFIX + '{0}'
id = Column(Integer, primary_key=True)
dag_id = Column(String(ID_LEN))
execution_date = Column(UtcDateTime, default=timezone.utcnow)
start_date = Column(UtcDateTime, default=timezone.utcnow)
end_date = Column(UtcDateTime)
_state = Column('state', String(50), default=State.RUNNING)
run_id = Column(String(ID_LEN))
external_trigger = Column(Boolean, default=True)
conf = Column(PickleType)
dag = None
__table_args__ = (
Index('dag_id_state', dag_id, _state),
UniqueConstraint('dag_id', 'execution_date'),
UniqueConstraint('dag_id', 'run_id'),
)
def __init__(self, dag_id=None, run_id=None, execution_date=None, start_date=None, external_trigger=None,
conf=None, state=None):
self.dag_id = dag_id
self.run_id = run_id
self.execution_date = execution_date
self.start_date = start_date
self.external_trigger = external_trigger
self.conf = conf
self.state = state
super().__init__()
def __repr__(self):
return (
'<DagRun {dag_id} @ {execution_date}: {run_id}, '
'externally triggered: {external_trigger}>'
).format(
dag_id=self.dag_id,
execution_date=self.execution_date,
run_id=self.run_id,
external_trigger=self.external_trigger)
def get_state(self):
return self._state
def set_state(self, state):
if self._state != state:
self._state = state
self.end_date = timezone.utcnow() if self._state in State.finished() else None
@declared_attr
def state(self):
return synonym('_state',
descriptor=property(self.get_state, self.set_state))
@classmethod
def id_for_date(cls, date, prefix=ID_FORMAT_PREFIX):
return prefix.format(date.isoformat()[:19])
@provide_session
def refresh_from_db(self, session=None):
"""
Reloads the current dagrun from the database
:param session: database session
"""
DR = DagRun
exec_date = func.cast(self.execution_date, DateTime)
dr = session.query(DR).filter(
DR.dag_id == self.dag_id,
func.cast(DR.execution_date, DateTime) == exec_date,
DR.run_id == self.run_id
).one()
self.id = dr.id
self.state = dr.state
@staticmethod
@provide_session
def find(dag_id=None, run_id=None, execution_date=None,
state=None, external_trigger=None, no_backfills=False,
session=None):
"""
Returns a set of dag runs for the given search criteria.
:param dag_id: the dag_id to find dag runs for
:type dag_id: int, list
:param run_id: defines the run id for this dag run
:type run_id: str
:param execution_date: the execution date
:type execution_date: datetime.datetime
:param state: the state of the dag run
:type state: str
:param external_trigger: whether this dag run is externally triggered
:type external_trigger: bool
:param no_backfills: return no backfills (True), return all (False).
Defaults to False
:type no_backfills: bool
:param session: database session
:type session: sqlalchemy.orm.session.Session
"""
DR = DagRun
qry = session.query(DR)
if dag_id:
qry = qry.filter(DR.dag_id == dag_id)
if run_id:
qry = qry.filter(DR.run_id == run_id)
if execution_date:
if isinstance(execution_date, list):
qry = qry.filter(DR.execution_date.in_(execution_date))
else:
qry = qry.filter(DR.execution_date == execution_date)
if state:
qry = qry.filter(DR.state == state)
if external_trigger is not None:
qry = qry.filter(DR.external_trigger == external_trigger)
if no_backfills:
# in order to prevent a circular dependency
from airflow.jobs import BackfillJob
qry = qry.filter(DR.run_id.notlike(BackfillJob.ID_PREFIX + '%'))
dr = qry.order_by(DR.execution_date).all()
return dr
@provide_session
def get_task_instances(self, state=None, session=None):
"""
Returns the task instances for this dag run
"""
from airflow.models.taskinstance import TaskInstance # Avoid circular import
tis = session.query(TaskInstance).filter(
TaskInstance.dag_id == self.dag_id,
TaskInstance.execution_date == self.execution_date,
)
if state:
if isinstance(state, str):
tis = tis.filter(TaskInstance.state == state)
else:
# this is required to deal with NULL values
if None in state:
if all(x is None for x in state):
tis = tis.filter(TaskInstance.state.is_(None))
else:
not_none_state = [s for s in state if s]
tis = tis.filter(
or_(TaskInstance.state.in_(not_none_state),
TaskInstance.state.is_(None))
)
else:
tis = tis.filter(TaskInstance.state.in_(state))
if self.dag and self.dag.partial:
tis = tis.filter(TaskInstance.task_id.in_(self.dag.task_ids))
return tis.all()
@provide_session
def get_task_instance(self, task_id, session=None):
"""
Returns the task instance specified by task_id for this dag run
:param task_id: the task id
"""
from airflow.models.taskinstance import TaskInstance # Avoid circular import
TI = TaskInstance
ti = session.query(TI).filter(
TI.dag_id == self.dag_id,
TI.execution_date == self.execution_date,
TI.task_id == task_id
).first()
return ti
def get_dag(self):
"""
Returns the Dag associated with this DagRun.
:return: DAG
"""
if not self.dag:
raise AirflowException("The DAG (.dag) for {} needs to be set"
.format(self))
return self.dag
@provide_session
def get_previous_dagrun(self, state: Optional[str] = None, session: Session = None) -> Optional['DagRun']:
"""The previous DagRun, if there is one"""
session = cast(Session, session) # mypy
filters = [
DagRun.dag_id == self.dag_id,
DagRun.execution_date < self.execution_date,
]
if state is not None:
filters.append(DagRun.state == state)
return session.query(DagRun).filter(
*filters
).order_by(
DagRun.execution_date.desc()
).first()
@provide_session
def get_previous_scheduled_dagrun(self, session=None):
"""The previous, SCHEDULED DagRun, if there is one"""
dag = self.get_dag()
return session.query(DagRun).filter(
DagRun.dag_id == self.dag_id,
DagRun.execution_date == dag.previous_schedule(self.execution_date)
).first()
@provide_session
def update_state(self, session=None):
"""
Determines the overall state of the DagRun based on the state
of its TaskInstances.
:return: ready_tis: the tis that can be scheduled in the current loop
:rtype ready_tis: list[airflow.models.TaskInstance]
"""
dag = self.get_dag()
ready_tis = []
tis = [ti for ti in self.get_task_instances(session=session,
state=State.task_states + (State.SHUTDOWN,))]
self.log.debug("number of tis tasks for %s: %s task(s)", self, len(tis))
for ti in list(tis):
ti.task = dag.get_task(ti.task_id)
start_dttm = timezone.utcnow()
unfinished_tasks = [t for t in tis if t.state in State.unfinished()]
finished_tasks = [t for t in tis if t.state in State.finished() + [State.UPSTREAM_FAILED]]
none_depends_on_past = all(not t.task.depends_on_past for t in unfinished_tasks)
none_task_concurrency = all(t.task.task_concurrency is None
for t in unfinished_tasks)
# small speed up
if unfinished_tasks and none_depends_on_past and none_task_concurrency:
scheduleable_tasks = [ut for ut in unfinished_tasks if ut.state in SCHEDULEABLE_STATES]
self.log.debug("number of scheduleable tasks for %s: %s task(s)", self, len(scheduleable_tasks))
ready_tis, changed_tis = self._get_ready_tis(scheduleable_tasks, finished_tasks, session)
self.log.debug("ready tis length for %s: %s task(s)", self, len(ready_tis))
are_runnable_tasks = ready_tis or self._are_premature_tis(
unfinished_tasks, finished_tasks, session) or changed_tis
duration = (timezone.utcnow() - start_dttm)
Stats.timing("dagrun.dependency-check.{}".format(self.dag_id), duration)
leaf_tis = [ti for ti in tis if ti.task_id in {t.task_id for t in dag.leaves}]
# if all roots finished and at least one failed, the run failed
if not unfinished_tasks and any(
leaf_ti.state in {State.FAILED, State.UPSTREAM_FAILED} for leaf_ti in leaf_tis
):
self.log.info('Marking run %s failed', self)
self.set_state(State.FAILED)
dag.handle_callback(self, success=False, reason='task_failure',
session=session)
# if all leafs succeeded and no unfinished tasks, the run succeeded
elif not unfinished_tasks and all(
leaf_ti.state in {State.SUCCESS, State.SKIPPED} for leaf_ti in leaf_tis
):
self.log.info('Marking run %s successful', self)
self.set_state(State.SUCCESS)
dag.handle_callback(self, success=True, reason='success', session=session)
# if *all tasks* are deadlocked, the run failed
elif (unfinished_tasks and none_depends_on_past and
none_task_concurrency and not are_runnable_tasks):
self.log.info('Deadlock; marking run %s failed', self)
self.set_state(State.FAILED)
dag.handle_callback(self, success=False, reason='all_tasks_deadlocked',
session=session)
# finally, if the roots aren't done, the dag is still running
else:
self.set_state(State.RUNNING)
self._emit_duration_stats_for_finished_state()
# todo: determine we want to use with_for_update to make sure to lock the run
session.merge(self)
session.commit()
return ready_tis
def _get_ready_tis(self, scheduleable_tasks, finished_tasks, session):
ready_tis = []
changed_tis = False
for st in scheduleable_tasks:
st_old_state = st.state
if st.are_dependencies_met(
dep_context=DepContext(
flag_upstream_failed=True,
finished_tasks=finished_tasks),
session=session):
ready_tis.append(st)
elif st_old_state != st.current_state(session=session):
changed_tis = True
return ready_tis, changed_tis
def _are_premature_tis(self, unfinished_tasks, finished_tasks, session):
# there might be runnable tasks that are up for retry and from some reason(retry delay, etc) are
# not ready yet so we set the flags to count them in
for ut in unfinished_tasks:
if ut.are_dependencies_met(
dep_context=DepContext(
flag_upstream_failed=True,
ignore_in_retry_period=True,
ignore_in_reschedule_period=True,
finished_tasks=finished_tasks),
session=session):
return True
def _emit_duration_stats_for_finished_state(self):
if self.state == State.RUNNING:
return
duration = (self.end_date - self.start_date)
if self.state is State.SUCCESS:
Stats.timing('dagrun.duration.success.{}'.format(self.dag_id), duration)
elif self.state == State.FAILED:
Stats.timing('dagrun.duration.failed.{}'.format(self.dag_id), duration)
@provide_session
def verify_integrity(self, session=None):
"""
Verifies the DagRun by checking for removed tasks or tasks that are not in the
database yet. It will set state to removed or add the task if required.
"""
from airflow.models.taskinstance import TaskInstance # Avoid circular import
dag = self.get_dag()
tis = self.get_task_instances(session=session)
# check for removed or restored tasks
task_ids = []
for ti in tis:
task_ids.append(ti.task_id)
task = None
try:
task = dag.get_task(ti.task_id)
except AirflowException:
if ti.state == State.REMOVED:
pass # ti has already been removed, just ignore it
elif self.state is not State.RUNNING and not dag.partial:
self.log.warning("Failed to get task '{}' for dag '{}'. "
"Marking it as removed.".format(ti, dag))
Stats.incr(
"task_removed_from_dag.{}".format(dag.dag_id), 1, 1)
ti.state = State.REMOVED
should_restore_task = (task is not None) and ti.state == State.REMOVED
if should_restore_task:
self.log.info("Restoring task '{}' which was previously "
"removed from DAG '{}'".format(ti, dag))
Stats.incr("task_restored_to_dag.{}".format(dag.dag_id), 1, 1)
ti.state = State.NONE
# check for missing tasks
for task in dag.task_dict.values():
if task.start_date > self.execution_date and not self.is_backfill:
continue
if task.task_id not in task_ids:
Stats.incr(
"task_instance_created-{}".format(task.__class__.__name__),
1, 1)
ti = TaskInstance(task, self.execution_date)
session.add(ti)
session.commit()
@staticmethod
def get_run(session, dag_id, execution_date):
"""
:param dag_id: DAG ID
:type dag_id: unicode
:param execution_date: execution date
:type execution_date: datetime
:return: DagRun corresponding to the given dag_id and execution date
if one exists. None otherwise.
:rtype: airflow.models.DagRun
"""
qry = session.query(DagRun).filter(
DagRun.dag_id == dag_id,
DagRun.external_trigger == False, # noqa pylint: disable=singleton-comparison
DagRun.execution_date == execution_date,
)
return qry.first()
@property
def is_backfill(self):
from airflow.jobs import BackfillJob
return (
self.run_id is not None and
self.run_id.startswith(BackfillJob.ID_PREFIX)
)
@classmethod
@provide_session
def get_latest_runs(cls, session):
"""Returns the latest DagRun for each DAG. """
subquery = (
session
.query(
cls.dag_id,
func.max(cls.execution_date).label('execution_date'))
.group_by(cls.dag_id)
.subquery()
)
dagruns = (
session
.query(cls)
.join(subquery,
and_(cls.dag_id == subquery.c.dag_id,
cls.execution_date == subquery.c.execution_date))
.all()
)
return dagruns
| 38.063559 | 110 | 0.608928 |
from typing import Optional, cast
from sqlalchemy import (
Boolean, Column, DateTime, Index, Integer, PickleType, String, UniqueConstraint, and_, func, or_,
)
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import synonym
from sqlalchemy.orm.session import Session
from airflow.exceptions import AirflowException
from airflow.models.base import ID_LEN, Base
from airflow.stats import Stats
from airflow.ti_deps.dep_context import SCHEDULEABLE_STATES, DepContext
from airflow.utils import timezone
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils.session import provide_session
from airflow.utils.sqlalchemy import UtcDateTime
from airflow.utils.state import State
class DagRun(Base, LoggingMixin):
__tablename__ = "dag_run"
ID_PREFIX = 'scheduled__'
ID_FORMAT_PREFIX = ID_PREFIX + '{0}'
id = Column(Integer, primary_key=True)
dag_id = Column(String(ID_LEN))
execution_date = Column(UtcDateTime, default=timezone.utcnow)
start_date = Column(UtcDateTime, default=timezone.utcnow)
end_date = Column(UtcDateTime)
_state = Column('state', String(50), default=State.RUNNING)
run_id = Column(String(ID_LEN))
external_trigger = Column(Boolean, default=True)
conf = Column(PickleType)
dag = None
__table_args__ = (
Index('dag_id_state', dag_id, _state),
UniqueConstraint('dag_id', 'execution_date'),
UniqueConstraint('dag_id', 'run_id'),
)
def __init__(self, dag_id=None, run_id=None, execution_date=None, start_date=None, external_trigger=None,
conf=None, state=None):
self.dag_id = dag_id
self.run_id = run_id
self.execution_date = execution_date
self.start_date = start_date
self.external_trigger = external_trigger
self.conf = conf
self.state = state
super().__init__()
def __repr__(self):
return (
'<DagRun {dag_id} @ {execution_date}: {run_id}, '
'externally triggered: {external_trigger}>'
).format(
dag_id=self.dag_id,
execution_date=self.execution_date,
run_id=self.run_id,
external_trigger=self.external_trigger)
def get_state(self):
return self._state
def set_state(self, state):
if self._state != state:
self._state = state
self.end_date = timezone.utcnow() if self._state in State.finished() else None
@declared_attr
def state(self):
return synonym('_state',
descriptor=property(self.get_state, self.set_state))
@classmethod
def id_for_date(cls, date, prefix=ID_FORMAT_PREFIX):
return prefix.format(date.isoformat()[:19])
@provide_session
def refresh_from_db(self, session=None):
DR = DagRun
exec_date = func.cast(self.execution_date, DateTime)
dr = session.query(DR).filter(
DR.dag_id == self.dag_id,
func.cast(DR.execution_date, DateTime) == exec_date,
DR.run_id == self.run_id
).one()
self.id = dr.id
self.state = dr.state
@staticmethod
@provide_session
def find(dag_id=None, run_id=None, execution_date=None,
state=None, external_trigger=None, no_backfills=False,
session=None):
DR = DagRun
qry = session.query(DR)
if dag_id:
qry = qry.filter(DR.dag_id == dag_id)
if run_id:
qry = qry.filter(DR.run_id == run_id)
if execution_date:
if isinstance(execution_date, list):
qry = qry.filter(DR.execution_date.in_(execution_date))
else:
qry = qry.filter(DR.execution_date == execution_date)
if state:
qry = qry.filter(DR.state == state)
if external_trigger is not None:
qry = qry.filter(DR.external_trigger == external_trigger)
if no_backfills:
from airflow.jobs import BackfillJob
qry = qry.filter(DR.run_id.notlike(BackfillJob.ID_PREFIX + '%'))
dr = qry.order_by(DR.execution_date).all()
return dr
@provide_session
def get_task_instances(self, state=None, session=None):
from airflow.models.taskinstance import TaskInstance
tis = session.query(TaskInstance).filter(
TaskInstance.dag_id == self.dag_id,
TaskInstance.execution_date == self.execution_date,
)
if state:
if isinstance(state, str):
tis = tis.filter(TaskInstance.state == state)
else:
if None in state:
if all(x is None for x in state):
tis = tis.filter(TaskInstance.state.is_(None))
else:
not_none_state = [s for s in state if s]
tis = tis.filter(
or_(TaskInstance.state.in_(not_none_state),
TaskInstance.state.is_(None))
)
else:
tis = tis.filter(TaskInstance.state.in_(state))
if self.dag and self.dag.partial:
tis = tis.filter(TaskInstance.task_id.in_(self.dag.task_ids))
return tis.all()
@provide_session
def get_task_instance(self, task_id, session=None):
from airflow.models.taskinstance import TaskInstance
TI = TaskInstance
ti = session.query(TI).filter(
TI.dag_id == self.dag_id,
TI.execution_date == self.execution_date,
TI.task_id == task_id
).first()
return ti
def get_dag(self):
if not self.dag:
raise AirflowException("The DAG (.dag) for {} needs to be set"
.format(self))
return self.dag
@provide_session
def get_previous_dagrun(self, state: Optional[str] = None, session: Session = None) -> Optional['DagRun']:
session = cast(Session, session)
filters = [
DagRun.dag_id == self.dag_id,
DagRun.execution_date < self.execution_date,
]
if state is not None:
filters.append(DagRun.state == state)
return session.query(DagRun).filter(
*filters
).order_by(
DagRun.execution_date.desc()
).first()
@provide_session
def get_previous_scheduled_dagrun(self, session=None):
dag = self.get_dag()
return session.query(DagRun).filter(
DagRun.dag_id == self.dag_id,
DagRun.execution_date == dag.previous_schedule(self.execution_date)
).first()
@provide_session
def update_state(self, session=None):
dag = self.get_dag()
ready_tis = []
tis = [ti for ti in self.get_task_instances(session=session,
state=State.task_states + (State.SHUTDOWN,))]
self.log.debug("number of tis tasks for %s: %s task(s)", self, len(tis))
for ti in list(tis):
ti.task = dag.get_task(ti.task_id)
start_dttm = timezone.utcnow()
unfinished_tasks = [t for t in tis if t.state in State.unfinished()]
finished_tasks = [t for t in tis if t.state in State.finished() + [State.UPSTREAM_FAILED]]
none_depends_on_past = all(not t.task.depends_on_past for t in unfinished_tasks)
none_task_concurrency = all(t.task.task_concurrency is None
for t in unfinished_tasks)
if unfinished_tasks and none_depends_on_past and none_task_concurrency:
scheduleable_tasks = [ut for ut in unfinished_tasks if ut.state in SCHEDULEABLE_STATES]
self.log.debug("number of scheduleable tasks for %s: %s task(s)", self, len(scheduleable_tasks))
ready_tis, changed_tis = self._get_ready_tis(scheduleable_tasks, finished_tasks, session)
self.log.debug("ready tis length for %s: %s task(s)", self, len(ready_tis))
are_runnable_tasks = ready_tis or self._are_premature_tis(
unfinished_tasks, finished_tasks, session) or changed_tis
duration = (timezone.utcnow() - start_dttm)
Stats.timing("dagrun.dependency-check.{}".format(self.dag_id), duration)
leaf_tis = [ti for ti in tis if ti.task_id in {t.task_id for t in dag.leaves}]
if not unfinished_tasks and any(
leaf_ti.state in {State.FAILED, State.UPSTREAM_FAILED} for leaf_ti in leaf_tis
):
self.log.info('Marking run %s failed', self)
self.set_state(State.FAILED)
dag.handle_callback(self, success=False, reason='task_failure',
session=session)
elif not unfinished_tasks and all(
leaf_ti.state in {State.SUCCESS, State.SKIPPED} for leaf_ti in leaf_tis
):
self.log.info('Marking run %s successful', self)
self.set_state(State.SUCCESS)
dag.handle_callback(self, success=True, reason='success', session=session)
elif (unfinished_tasks and none_depends_on_past and
none_task_concurrency and not are_runnable_tasks):
self.log.info('Deadlock; marking run %s failed', self)
self.set_state(State.FAILED)
dag.handle_callback(self, success=False, reason='all_tasks_deadlocked',
session=session)
else:
self.set_state(State.RUNNING)
self._emit_duration_stats_for_finished_state()
# todo: determine we want to use with_for_update to make sure to lock the run
session.merge(self)
session.commit()
return ready_tis
def _get_ready_tis(self, scheduleable_tasks, finished_tasks, session):
ready_tis = []
changed_tis = False
for st in scheduleable_tasks:
st_old_state = st.state
if st.are_dependencies_met(
dep_context=DepContext(
flag_upstream_failed=True,
finished_tasks=finished_tasks),
session=session):
ready_tis.append(st)
elif st_old_state != st.current_state(session=session):
changed_tis = True
return ready_tis, changed_tis
def _are_premature_tis(self, unfinished_tasks, finished_tasks, session):
# there might be runnable tasks that are up for retry and from some reason(retry delay, etc) are
# not ready yet so we set the flags to count them in
for ut in unfinished_tasks:
if ut.are_dependencies_met(
dep_context=DepContext(
flag_upstream_failed=True,
ignore_in_retry_period=True,
ignore_in_reschedule_period=True,
finished_tasks=finished_tasks),
session=session):
return True
def _emit_duration_stats_for_finished_state(self):
if self.state == State.RUNNING:
return
duration = (self.end_date - self.start_date)
if self.state is State.SUCCESS:
Stats.timing('dagrun.duration.success.{}'.format(self.dag_id), duration)
elif self.state == State.FAILED:
Stats.timing('dagrun.duration.failed.{}'.format(self.dag_id), duration)
@provide_session
def verify_integrity(self, session=None):
from airflow.models.taskinstance import TaskInstance # Avoid circular import
dag = self.get_dag()
tis = self.get_task_instances(session=session)
# check for removed or restored tasks
task_ids = []
for ti in tis:
task_ids.append(ti.task_id)
task = None
try:
task = dag.get_task(ti.task_id)
except AirflowException:
if ti.state == State.REMOVED:
pass # ti has already been removed, just ignore it
elif self.state is not State.RUNNING and not dag.partial:
self.log.warning("Failed to get task '{}' for dag '{}'. "
"Marking it as removed.".format(ti, dag))
Stats.incr(
"task_removed_from_dag.{}".format(dag.dag_id), 1, 1)
ti.state = State.REMOVED
should_restore_task = (task is not None) and ti.state == State.REMOVED
if should_restore_task:
self.log.info("Restoring task '{}' which was previously "
"removed from DAG '{}'".format(ti, dag))
Stats.incr("task_restored_to_dag.{}".format(dag.dag_id), 1, 1)
ti.state = State.NONE
# check for missing tasks
for task in dag.task_dict.values():
if task.start_date > self.execution_date and not self.is_backfill:
continue
if task.task_id not in task_ids:
Stats.incr(
"task_instance_created-{}".format(task.__class__.__name__),
1, 1)
ti = TaskInstance(task, self.execution_date)
session.add(ti)
session.commit()
@staticmethod
def get_run(session, dag_id, execution_date):
qry = session.query(DagRun).filter(
DagRun.dag_id == dag_id,
DagRun.external_trigger == False, # noqa pylint: disable=singleton-comparison
DagRun.execution_date == execution_date,
)
return qry.first()
@property
def is_backfill(self):
from airflow.jobs import BackfillJob
return (
self.run_id is not None and
self.run_id.startswith(BackfillJob.ID_PREFIX)
)
@classmethod
@provide_session
def get_latest_runs(cls, session):
subquery = (
session
.query(
cls.dag_id,
func.max(cls.execution_date).label('execution_date'))
.group_by(cls.dag_id)
.subquery()
)
dagruns = (
session
.query(cls)
.join(subquery,
and_(cls.dag_id == subquery.c.dag_id,
cls.execution_date == subquery.c.execution_date))
.all()
)
return dagruns
| true | true |
f736fd4520e17f470184be3dc033774e1f7dbd0f | 2,004 | py | Python | test/test_entity_data_api.py | docktermj/senzing-python-rest-client | 396c4842c72c93a4a9d7cf0cefc027f73892a518 | [
"Apache-2.0"
] | null | null | null | test/test_entity_data_api.py | docktermj/senzing-python-rest-client | 396c4842c72c93a4a9d7cf0cefc027f73892a518 | [
"Apache-2.0"
] | null | null | null | test/test_entity_data_api.py | docktermj/senzing-python-rest-client | 396c4842c72c93a4a9d7cf0cefc027f73892a518 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Senzing REST API
This is the Senzing REST API. It describes the REST interface to Senzing API functions available via REST. It leverages the Senzing native API which is documented at [https://docs.senzing.com](https://docs.senzing.com) # noqa: E501
OpenAPI spec version: 1.6.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from api.entity_data_api import EntityDataApi # noqa: E501
from swagger_client.rest import ApiException
class TestEntityDataApi(unittest.TestCase):
"""EntityDataApi unit test stubs"""
def setUp(self):
self.api = api.entity_data_api.EntityDataApi() # noqa: E501
def tearDown(self):
pass
def test_add_record(self):
"""Test case for add_record
Load a new record or replace a record in a data source with a specific record ID. # noqa: E501
"""
pass
def test_add_record_with_returned_record_id(self):
"""Test case for add_record_with_returned_record_id
Load a new record in # noqa: E501
"""
pass
def test_get_data_source_record(self):
"""Test case for get_data_source_record
Get an entity record by data source and record ID. # noqa: E501
"""
pass
def test_get_entity_by_entity_id(self):
"""Test case for get_entity_by_entity_id
Get a resolved entity by entity ID. # noqa: E501
"""
pass
def test_get_entity_by_record_id(self):
"""Test case for get_entity_by_record_id
Get a resolved entity by data source and record ID. # noqa: E501
"""
pass
def test_search_by_attributes(self):
"""Test case for search_by_attributes
Search for entities that would match or relate to the provided entity features. # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 26.368421 | 238 | 0.667166 |
from __future__ import absolute_import
import unittest
import swagger_client
from api.entity_data_api import EntityDataApi
from swagger_client.rest import ApiException
class TestEntityDataApi(unittest.TestCase):
def setUp(self):
self.api = api.entity_data_api.EntityDataApi()
def tearDown(self):
pass
def test_add_record(self):
pass
def test_add_record_with_returned_record_id(self):
pass
def test_get_data_source_record(self):
pass
def test_get_entity_by_entity_id(self):
pass
def test_get_entity_by_record_id(self):
pass
def test_search_by_attributes(self):
pass
if __name__ == '__main__':
unittest.main()
| true | true |
f736fde412857f25a4128b330b934401390ae46f | 12,877 | py | Python | aiokubernetes/models/v1beta1_stateful_set_status.py | tantioch/aiokubernetes | 2f332498598ece14d22f8e59ecb02665db6db68d | [
"Apache-2.0"
] | 24 | 2018-07-07T15:12:19.000Z | 2021-09-01T07:33:11.000Z | aiokubernetes/models/v1beta1_stateful_set_status.py | revoteon/aiokubernetes | 730eae03e4779563740f07ad3ecef180b511ac18 | [
"Apache-2.0"
] | 5 | 2018-07-11T00:09:17.000Z | 2018-10-22T16:41:54.000Z | aiokubernetes/models/v1beta1_stateful_set_status.py | revoteon/aiokubernetes | 730eae03e4779563740f07ad3ecef180b511ac18 | [
"Apache-2.0"
] | 3 | 2018-07-10T10:16:57.000Z | 2018-10-20T19:32:05.000Z | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.10.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
from aiokubernetes.models.v1beta1_stateful_set_condition import V1beta1StatefulSetCondition # noqa: F401,E501
class V1beta1StatefulSetStatus(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'collision_count': 'int',
'conditions': 'list[V1beta1StatefulSetCondition]',
'current_replicas': 'int',
'current_revision': 'str',
'observed_generation': 'int',
'ready_replicas': 'int',
'replicas': 'int',
'update_revision': 'str',
'updated_replicas': 'int'
}
attribute_map = {
'collision_count': 'collisionCount',
'conditions': 'conditions',
'current_replicas': 'currentReplicas',
'current_revision': 'currentRevision',
'observed_generation': 'observedGeneration',
'ready_replicas': 'readyReplicas',
'replicas': 'replicas',
'update_revision': 'updateRevision',
'updated_replicas': 'updatedReplicas'
}
def __init__(self, collision_count=None, conditions=None, current_replicas=None, current_revision=None, observed_generation=None, ready_replicas=None, replicas=None, update_revision=None, updated_replicas=None): # noqa: E501
"""V1beta1StatefulSetStatus - a model defined in Swagger""" # noqa: E501
self._collision_count = None
self._conditions = None
self._current_replicas = None
self._current_revision = None
self._observed_generation = None
self._ready_replicas = None
self._replicas = None
self._update_revision = None
self._updated_replicas = None
self.discriminator = None
if collision_count is not None:
self.collision_count = collision_count
if conditions is not None:
self.conditions = conditions
if current_replicas is not None:
self.current_replicas = current_replicas
if current_revision is not None:
self.current_revision = current_revision
if observed_generation is not None:
self.observed_generation = observed_generation
if ready_replicas is not None:
self.ready_replicas = ready_replicas
self.replicas = replicas
if update_revision is not None:
self.update_revision = update_revision
if updated_replicas is not None:
self.updated_replicas = updated_replicas
@property
def collision_count(self):
"""Gets the collision_count of this V1beta1StatefulSetStatus. # noqa: E501
collisionCount is the count of hash collisions for the StatefulSet. The StatefulSet controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ControllerRevision. # noqa: E501
:return: The collision_count of this V1beta1StatefulSetStatus. # noqa: E501
:rtype: int
"""
return self._collision_count
@collision_count.setter
def collision_count(self, collision_count):
"""Sets the collision_count of this V1beta1StatefulSetStatus.
collisionCount is the count of hash collisions for the StatefulSet. The StatefulSet controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ControllerRevision. # noqa: E501
:param collision_count: The collision_count of this V1beta1StatefulSetStatus. # noqa: E501
:type: int
"""
self._collision_count = collision_count
@property
def conditions(self):
"""Gets the conditions of this V1beta1StatefulSetStatus. # noqa: E501
Represents the latest available observations of a statefulset's current state. # noqa: E501
:return: The conditions of this V1beta1StatefulSetStatus. # noqa: E501
:rtype: list[V1beta1StatefulSetCondition]
"""
return self._conditions
@conditions.setter
def conditions(self, conditions):
"""Sets the conditions of this V1beta1StatefulSetStatus.
Represents the latest available observations of a statefulset's current state. # noqa: E501
:param conditions: The conditions of this V1beta1StatefulSetStatus. # noqa: E501
:type: list[V1beta1StatefulSetCondition]
"""
self._conditions = conditions
@property
def current_replicas(self):
"""Gets the current_replicas of this V1beta1StatefulSetStatus. # noqa: E501
currentReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by currentRevision. # noqa: E501
:return: The current_replicas of this V1beta1StatefulSetStatus. # noqa: E501
:rtype: int
"""
return self._current_replicas
@current_replicas.setter
def current_replicas(self, current_replicas):
"""Sets the current_replicas of this V1beta1StatefulSetStatus.
currentReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by currentRevision. # noqa: E501
:param current_replicas: The current_replicas of this V1beta1StatefulSetStatus. # noqa: E501
:type: int
"""
self._current_replicas = current_replicas
@property
def current_revision(self):
"""Gets the current_revision of this V1beta1StatefulSetStatus. # noqa: E501
currentRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [0,currentReplicas). # noqa: E501
:return: The current_revision of this V1beta1StatefulSetStatus. # noqa: E501
:rtype: str
"""
return self._current_revision
@current_revision.setter
def current_revision(self, current_revision):
"""Sets the current_revision of this V1beta1StatefulSetStatus.
currentRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [0,currentReplicas). # noqa: E501
:param current_revision: The current_revision of this V1beta1StatefulSetStatus. # noqa: E501
:type: str
"""
self._current_revision = current_revision
@property
def observed_generation(self):
"""Gets the observed_generation of this V1beta1StatefulSetStatus. # noqa: E501
observedGeneration is the most recent generation observed for this StatefulSet. It corresponds to the StatefulSet's generation, which is updated on mutation by the API Server. # noqa: E501
:return: The observed_generation of this V1beta1StatefulSetStatus. # noqa: E501
:rtype: int
"""
return self._observed_generation
@observed_generation.setter
def observed_generation(self, observed_generation):
"""Sets the observed_generation of this V1beta1StatefulSetStatus.
observedGeneration is the most recent generation observed for this StatefulSet. It corresponds to the StatefulSet's generation, which is updated on mutation by the API Server. # noqa: E501
:param observed_generation: The observed_generation of this V1beta1StatefulSetStatus. # noqa: E501
:type: int
"""
self._observed_generation = observed_generation
@property
def ready_replicas(self):
"""Gets the ready_replicas of this V1beta1StatefulSetStatus. # noqa: E501
readyReplicas is the number of Pods created by the StatefulSet controller that have a Ready Condition. # noqa: E501
:return: The ready_replicas of this V1beta1StatefulSetStatus. # noqa: E501
:rtype: int
"""
return self._ready_replicas
@ready_replicas.setter
def ready_replicas(self, ready_replicas):
"""Sets the ready_replicas of this V1beta1StatefulSetStatus.
readyReplicas is the number of Pods created by the StatefulSet controller that have a Ready Condition. # noqa: E501
:param ready_replicas: The ready_replicas of this V1beta1StatefulSetStatus. # noqa: E501
:type: int
"""
self._ready_replicas = ready_replicas
@property
def replicas(self):
"""Gets the replicas of this V1beta1StatefulSetStatus. # noqa: E501
replicas is the number of Pods created by the StatefulSet controller. # noqa: E501
:return: The replicas of this V1beta1StatefulSetStatus. # noqa: E501
:rtype: int
"""
return self._replicas
@replicas.setter
def replicas(self, replicas):
"""Sets the replicas of this V1beta1StatefulSetStatus.
replicas is the number of Pods created by the StatefulSet controller. # noqa: E501
:param replicas: The replicas of this V1beta1StatefulSetStatus. # noqa: E501
:type: int
"""
if replicas is None:
raise ValueError("Invalid value for `replicas`, must not be `None`") # noqa: E501
self._replicas = replicas
@property
def update_revision(self):
"""Gets the update_revision of this V1beta1StatefulSetStatus. # noqa: E501
updateRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [replicas-updatedReplicas,replicas) # noqa: E501
:return: The update_revision of this V1beta1StatefulSetStatus. # noqa: E501
:rtype: str
"""
return self._update_revision
@update_revision.setter
def update_revision(self, update_revision):
"""Sets the update_revision of this V1beta1StatefulSetStatus.
updateRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [replicas-updatedReplicas,replicas) # noqa: E501
:param update_revision: The update_revision of this V1beta1StatefulSetStatus. # noqa: E501
:type: str
"""
self._update_revision = update_revision
@property
def updated_replicas(self):
"""Gets the updated_replicas of this V1beta1StatefulSetStatus. # noqa: E501
updatedReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by updateRevision. # noqa: E501
:return: The updated_replicas of this V1beta1StatefulSetStatus. # noqa: E501
:rtype: int
"""
return self._updated_replicas
@updated_replicas.setter
def updated_replicas(self, updated_replicas):
"""Sets the updated_replicas of this V1beta1StatefulSetStatus.
updatedReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by updateRevision. # noqa: E501
:param updated_replicas: The updated_replicas of this V1beta1StatefulSetStatus. # noqa: E501
:type: int
"""
self._updated_replicas = updated_replicas
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in self.swagger_types.items():
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1beta1StatefulSetStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 37.762463 | 235 | 0.671041 |
import pprint
import re
from aiokubernetes.models.v1beta1_stateful_set_condition import V1beta1StatefulSetCondition
class V1beta1StatefulSetStatus(object):
swagger_types = {
'collision_count': 'int',
'conditions': 'list[V1beta1StatefulSetCondition]',
'current_replicas': 'int',
'current_revision': 'str',
'observed_generation': 'int',
'ready_replicas': 'int',
'replicas': 'int',
'update_revision': 'str',
'updated_replicas': 'int'
}
attribute_map = {
'collision_count': 'collisionCount',
'conditions': 'conditions',
'current_replicas': 'currentReplicas',
'current_revision': 'currentRevision',
'observed_generation': 'observedGeneration',
'ready_replicas': 'readyReplicas',
'replicas': 'replicas',
'update_revision': 'updateRevision',
'updated_replicas': 'updatedReplicas'
}
def __init__(self, collision_count=None, conditions=None, current_replicas=None, current_revision=None, observed_generation=None, ready_replicas=None, replicas=None, update_revision=None, updated_replicas=None):
self._collision_count = None
self._conditions = None
self._current_replicas = None
self._current_revision = None
self._observed_generation = None
self._ready_replicas = None
self._replicas = None
self._update_revision = None
self._updated_replicas = None
self.discriminator = None
if collision_count is not None:
self.collision_count = collision_count
if conditions is not None:
self.conditions = conditions
if current_replicas is not None:
self.current_replicas = current_replicas
if current_revision is not None:
self.current_revision = current_revision
if observed_generation is not None:
self.observed_generation = observed_generation
if ready_replicas is not None:
self.ready_replicas = ready_replicas
self.replicas = replicas
if update_revision is not None:
self.update_revision = update_revision
if updated_replicas is not None:
self.updated_replicas = updated_replicas
@property
def collision_count(self):
return self._collision_count
@collision_count.setter
def collision_count(self, collision_count):
self._collision_count = collision_count
@property
def conditions(self):
return self._conditions
@conditions.setter
def conditions(self, conditions):
self._conditions = conditions
@property
def current_replicas(self):
return self._current_replicas
@current_replicas.setter
def current_replicas(self, current_replicas):
self._current_replicas = current_replicas
@property
def current_revision(self):
return self._current_revision
@current_revision.setter
def current_revision(self, current_revision):
self._current_revision = current_revision
@property
def observed_generation(self):
return self._observed_generation
@observed_generation.setter
def observed_generation(self, observed_generation):
self._observed_generation = observed_generation
@property
def ready_replicas(self):
return self._ready_replicas
@ready_replicas.setter
def ready_replicas(self, ready_replicas):
self._ready_replicas = ready_replicas
@property
def replicas(self):
return self._replicas
@replicas.setter
def replicas(self, replicas):
if replicas is None:
raise ValueError("Invalid value for `replicas`, must not be `None`")
self._replicas = replicas
@property
def update_revision(self):
return self._update_revision
@update_revision.setter
def update_revision(self, update_revision):
self._update_revision = update_revision
@property
def updated_replicas(self):
return self._updated_replicas
@updated_replicas.setter
def updated_replicas(self, updated_replicas):
self._updated_replicas = updated_replicas
def to_dict(self):
result = {}
for attr, _ in self.swagger_types.items():
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, V1beta1StatefulSetStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f736ff4d8a8dbdd9eb0a5b2c271becc5760e9358 | 2,761 | py | Python | app/mod_utils/intelix.py | 0xbennyv/intelix-livediscover-lookup | e028b7659ef3c291aa169d174d73cc2ea47b86a2 | [
"MIT"
] | null | null | null | app/mod_utils/intelix.py | 0xbennyv/intelix-livediscover-lookup | e028b7659ef3c291aa169d174d73cc2ea47b86a2 | [
"MIT"
] | null | null | null | app/mod_utils/intelix.py | 0xbennyv/intelix-livediscover-lookup | e028b7659ef3c291aa169d174d73cc2ea47b86a2 | [
"MIT"
] | null | null | null | import validators
import requests
import base64
from app import app
def intelixlookup(ioc):
#Get a token
token = get_token()
# use Validators to redirect the IOC to the correct Intelix endpoint
if validators.ipv4(ioc):
u = f"https://de.api.labs.sophos.com/lookup/ips/v1/{ioc}"
elif validators.md5(ioc):
u = f"https://de.api.labs.sophos.com/lookup/urls/v1/{ioc}"
elif validators.sha256(ioc):
u = f"https://de.api.labs.sophos.com/lookup/files/v1/{ioc}"
h = {"Authorization": f"{token}"}
r = requests.get(u, headers=h)
j = r.json()
response = {}
# File reponses
if validators.sha256(ioc) or validators.md5(ioc):
if 'reputationScore' in j:
response['reputationScore'] = j['reputationScore']
if j['reputationScore'] <= 19:
response['fileReputation'] = 'Malware'
elif j['reputationScore'] <= 29:
response['fileReputation'] = 'PUA (potentially unwanted application)'
elif j['reputationScore'] <= 69:
response['fileReputation'] = 'Unknown/suspicious'
elif j['reputationScore'] <= 100:
response['fileReputation'] = 'Known good'
if 'detectionName' in j:
response['detectionName'] = j['detectionName']
response['type'] = 'File Hash'
# IP reponses
if validators.ipv4(ioc):
if 'category' in j:
response['category'] = j['category']
else:
response['category'] = 'Unknown IP Address'
if 'ttl' in j:
response['ttl'] = j['ttl']
response['type'] = 'IP Address'
# Generic consistent repsponses
if 'correlationId' in j:
response['correlationId'] = j['correlationId']
if 'requestId' in j:
response['requestId'] = j['requestId']
# Generic Error Handling based on reponses
# https://api.labs.sophos.com/doc/lookup/ips.html
# https://api.labs.sophos.com/doc/lookup/files.html
if 'error' in j:
response['error'] = j['error']
if 'message' in j:
response['message'] = j['message']
# Return a dict, flask will return this as JSON to the browser
return response
def get_token():
# This is lazy, the token should be stored for quicker request times.
creds = f"{app.config['INTELIX_CLIENT_ID']}:{app.config['INTELIX_CLIENT_SECRET']}"
t = base64.b64encode(creds.encode("UTF-8")).decode("ascii")
d = {'grant_type': 'client_credentials'}
h = {'Authorization': f"Basic {t}",
'Content-Type': 'application/x-www-form-urlencoded'
}
r = requests.post('https://api.labs.sophos.com/oauth2/token', headers=h, data=d)
r = r.json()
return r['access_token'] | 36.813333 | 86 | 0.603042 | import validators
import requests
import base64
from app import app
def intelixlookup(ioc):
token = get_token()
if validators.ipv4(ioc):
u = f"https://de.api.labs.sophos.com/lookup/ips/v1/{ioc}"
elif validators.md5(ioc):
u = f"https://de.api.labs.sophos.com/lookup/urls/v1/{ioc}"
elif validators.sha256(ioc):
u = f"https://de.api.labs.sophos.com/lookup/files/v1/{ioc}"
h = {"Authorization": f"{token}"}
r = requests.get(u, headers=h)
j = r.json()
response = {}
if validators.sha256(ioc) or validators.md5(ioc):
if 'reputationScore' in j:
response['reputationScore'] = j['reputationScore']
if j['reputationScore'] <= 19:
response['fileReputation'] = 'Malware'
elif j['reputationScore'] <= 29:
response['fileReputation'] = 'PUA (potentially unwanted application)'
elif j['reputationScore'] <= 69:
response['fileReputation'] = 'Unknown/suspicious'
elif j['reputationScore'] <= 100:
response['fileReputation'] = 'Known good'
if 'detectionName' in j:
response['detectionName'] = j['detectionName']
response['type'] = 'File Hash'
if validators.ipv4(ioc):
if 'category' in j:
response['category'] = j['category']
else:
response['category'] = 'Unknown IP Address'
if 'ttl' in j:
response['ttl'] = j['ttl']
response['type'] = 'IP Address'
if 'correlationId' in j:
response['correlationId'] = j['correlationId']
if 'requestId' in j:
response['requestId'] = j['requestId']
if 'error' in j:
response['error'] = j['error']
if 'message' in j:
response['message'] = j['message']
return response
def get_token():
creds = f"{app.config['INTELIX_CLIENT_ID']}:{app.config['INTELIX_CLIENT_SECRET']}"
t = base64.b64encode(creds.encode("UTF-8")).decode("ascii")
d = {'grant_type': 'client_credentials'}
h = {'Authorization': f"Basic {t}",
'Content-Type': 'application/x-www-form-urlencoded'
}
r = requests.post('https://api.labs.sophos.com/oauth2/token', headers=h, data=d)
r = r.json()
return r['access_token'] | true | true |
f7370113674360f191e051a8e006f13f62d0e2ae | 3,727 | py | Python | test/libtiff.py | sdarwin/build | 2c4217ebb6bdeb5001b33a5d0d6718420aef988c | [
"BSL-1.0"
] | 106 | 2015-08-07T04:23:50.000Z | 2020-12-27T18:25:15.000Z | test/libtiff.py | sdarwin/build | 2c4217ebb6bdeb5001b33a5d0d6718420aef988c | [
"BSL-1.0"
] | 130 | 2016-06-22T22:11:25.000Z | 2020-11-29T20:24:09.000Z | test/libtiff.py | sdarwin/build | 2c4217ebb6bdeb5001b33a5d0d6718420aef988c | [
"BSL-1.0"
] | 41 | 2015-07-08T19:18:35.000Z | 2021-01-14T16:39:56.000Z | #!/usr/bin/python
# Copyright (C) 2013 Steven Watanabe
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE.txt or copy at
# https://www.bfgroup.xyz/b2/LICENSE.txt)
import BoostBuild
import MockToolset
t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0)
MockToolset.create(t)
# Build from source
t.write("libtiff/tiff.h", 'libtiff')
t.write("libtiff/tiff.c", 'tiff')
t.write("Jamroot.jam", """
path-constant here : . ;
using libtiff : : <source>$(here)/libtiff ;
alias libtiff : /libtiff//libtiff : : <link>static <link>shared ;
""")
MockToolset.set_expected(t, '''
source_file('tiff.c', 'tiff')
action('-c -x c -I./libtiff -o $tiff.o $tiff.c')
action('--dll $tiff.o -o $tiff.so')
action('--archive $tiff.o -o $tiff.a')
''')
t.run_build_system()
t.expect_addition('bin/standalone/libtiff/mock/debug/tiff.dll')
t.expect_addition('bin/standalone/libtiff/mock/debug/link-static/tiff.lib')
t.rm('libtiff')
# Generic definitions that aren't configuration specific
common_stuff = '''
source_file('test.cpp', 'test.cpp')
source_file('main.cpp', 'int main() {}')
source_file('tiff.h.cpp', '#include <tiff.h>')
action('-c -x c++ $main.cpp -o $main.o')
'''
t.write('test.cpp', 'test.cpp')
# Default initialization - static library
t.rm('bin')
t.write("Jamroot.jam", """
path-constant here : . ;
using libtiff ;
exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
""")
MockToolset.set_expected(t, common_stuff + '''
action('$main.o --static-lib=tiff -o $config.exe')
action('-c -x c++ $tiff.h.cpp -o $tiff.h.o')
action('-c -x c++ $test.cpp -o $test.o')
action('$test.o --static-lib=tiff -o $test')
''')
t.run_build_system()
t.expect_addition('bin/mock/debug/test.exe')
t.expect_addition('bin/mock/debug/link-static/test.exe')
# Default initialization - shared library
t.rm('bin')
t.write("Jamroot.jam", """
path-constant here : . ;
using libtiff ;
exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
""")
MockToolset.set_expected(t, common_stuff + '''
action('$main.o --shared-lib=tiff -o $config.exe')
action('-c -x c++ $tiff.h.cpp -o $tiff.h.o')
action('-c -x c++ $test.cpp -o $test.o')
action('$test.o --shared-lib=tiff -o $test')
''')
t.run_build_system()
t.expect_addition('bin/mock/debug/test.exe')
t.expect_addition('bin/mock/debug/link-static/test.exe')
# Initialization in explicit location - static library
t.rm('bin')
t.write("Jamroot.jam", """
path-constant here : . ;
using libtiff : : <name>mylibtiff <include>$(here)/libtiff <search>$(here)/libtiff ;
exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
""")
t.write('libtiff/tiff.h', 'libtiff')
MockToolset.set_expected(t, common_stuff + '''
action('$main.o -L./libtiff --static-lib=mylibtiff -o $config.exe')
action('-c -x c++ $test.cpp -I./libtiff -o $test.o')
action('$test.o -L./libtiff --static-lib=mylibtiff -o $test')
''')
t.run_build_system()
t.expect_addition('bin/mock/debug/test.exe')
t.expect_addition('bin/mock/debug/link-static/test.exe')
# Initialization in explicit location - shared library
t.rm('bin')
t.write("Jamroot.jam", """
path-constant here : . ;
using libtiff : : <name>mylibtiff <include>$(here)/libtiff <search>$(here)/libtiff ;
exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
""")
MockToolset.set_expected(t, common_stuff + '''
action('$main.o -L./libtiff --shared-lib=mylibtiff -o $config.exe')
action('-c -x c++ $test.cpp -I./libtiff -o $test.o')
action('$test.o -L./libtiff --shared-lib=mylibtiff -o $test')
''')
t.run_build_system()
t.expect_addition('bin/mock/debug/test.exe')
t.expect_addition('bin/mock/debug/link-static/test.exe')
t.cleanup()
| 31.058333 | 107 | 0.686611 |
import BoostBuild
import MockToolset
t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0)
MockToolset.create(t)
t.write("libtiff/tiff.h", 'libtiff')
t.write("libtiff/tiff.c", 'tiff')
t.write("Jamroot.jam", """
path-constant here : . ;
using libtiff : : <source>$(here)/libtiff ;
alias libtiff : /libtiff//libtiff : : <link>static <link>shared ;
""")
MockToolset.set_expected(t, '''
source_file('tiff.c', 'tiff')
action('-c -x c -I./libtiff -o $tiff.o $tiff.c')
action('--dll $tiff.o -o $tiff.so')
action('--archive $tiff.o -o $tiff.a')
''')
t.run_build_system()
t.expect_addition('bin/standalone/libtiff/mock/debug/tiff.dll')
t.expect_addition('bin/standalone/libtiff/mock/debug/link-static/tiff.lib')
t.rm('libtiff')
common_stuff = '''
source_file('test.cpp', 'test.cpp')
source_file('main.cpp', 'int main() {}')
source_file('tiff.h.cpp', '#include <tiff.h>')
action('-c -x c++ $main.cpp -o $main.o')
'''
t.write('test.cpp', 'test.cpp')
# Default initialization - static library
t.rm('bin')
t.write("Jamroot.jam", """
path-constant here : . ;
using libtiff ;
exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
""")
MockToolset.set_expected(t, common_stuff + '''
action('$main.o --static-lib=tiff -o $config.exe')
action('-c -x c++ $tiff.h.cpp -o $tiff.h.o')
action('-c -x c++ $test.cpp -o $test.o')
action('$test.o --static-lib=tiff -o $test')
''')
t.run_build_system()
t.expect_addition('bin/mock/debug/test.exe')
t.expect_addition('bin/mock/debug/link-static/test.exe')
# Default initialization - shared library
t.rm('bin')
t.write("Jamroot.jam", """
path-constant here : . ;
using libtiff ;
exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
""")
MockToolset.set_expected(t, common_stuff + '''
action('$main.o --shared-lib=tiff -o $config.exe')
action('-c -x c++ $tiff.h.cpp -o $tiff.h.o')
action('-c -x c++ $test.cpp -o $test.o')
action('$test.o --shared-lib=tiff -o $test')
''')
t.run_build_system()
t.expect_addition('bin/mock/debug/test.exe')
t.expect_addition('bin/mock/debug/link-static/test.exe')
# Initialization in explicit location - static library
t.rm('bin')
t.write("Jamroot.jam", """
path-constant here : . ;
using libtiff : : <name>mylibtiff <include>$(here)/libtiff <search>$(here)/libtiff ;
exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
""")
t.write('libtiff/tiff.h', 'libtiff')
MockToolset.set_expected(t, common_stuff + '''
action('$main.o -L./libtiff --static-lib=mylibtiff -o $config.exe')
action('-c -x c++ $test.cpp -I./libtiff -o $test.o')
action('$test.o -L./libtiff --static-lib=mylibtiff -o $test')
''')
t.run_build_system()
t.expect_addition('bin/mock/debug/test.exe')
t.expect_addition('bin/mock/debug/link-static/test.exe')
# Initialization in explicit location - shared library
t.rm('bin')
t.write("Jamroot.jam", """
path-constant here : . ;
using libtiff : : <name>mylibtiff <include>$(here)/libtiff <search>$(here)/libtiff ;
exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
""")
MockToolset.set_expected(t, common_stuff + '''
action('$main.o -L./libtiff --shared-lib=mylibtiff -o $config.exe')
action('-c -x c++ $test.cpp -I./libtiff -o $test.o')
action('$test.o -L./libtiff --shared-lib=mylibtiff -o $test')
''')
t.run_build_system()
t.expect_addition('bin/mock/debug/test.exe')
t.expect_addition('bin/mock/debug/link-static/test.exe')
t.cleanup()
| true | true |
f737018c98ae5f579916c5cbea87bfb14e79c2f3 | 1,055 | py | Python | src/libs/utils/config.py | rjarman/Bus-Mama-Python-Server | 35da2cd42c5affaa0e8d2f371bbe4735bab90082 | [
"MIT"
] | 6 | 2020-11-15T22:32:27.000Z | 2021-10-14T18:26:40.000Z | src/libs/utils/config.py | rjarman/Bus-Mama-Python-Server | 35da2cd42c5affaa0e8d2f371bbe4735bab90082 | [
"MIT"
] | null | null | null | src/libs/utils/config.py | rjarman/Bus-Mama-Python-Server | 35da2cd42c5affaa0e8d2f371bbe4735bab90082 | [
"MIT"
] | null | null | null | import os
class Config:
__dir = os.path.dirname(__file__)
SERVER = {
'host': 'localhost',
'port': 8080
}
PATH = {
'question_ans': __dir + '/../../data/bangla_questions_ans.pkl',
'question_domain': __dir + '/../../data/bangla_questions_domain.pkl',
'original_questions_ans': __dir + '/../../data/original_questions_ans.xlsx',
'original_questions_domains': __dir + '/../../data/original_questions_domains.xlsx',
'original_questions_ans_csv': __dir + '/../../data/original_questions_ans.csv',
'original_questions_domains_csv': __dir + '/../../data/original_questions_domains.csv',
'log': __dir + '/../../logs/server_log.csv'
}
SPLITTER = {
'test_size': 0.3,
'random_state': 42
}
TFIDF = {
'max_df': 1,
'min_df': 1,
'max_features': None,
'norm': 'l2',
'smooth_idf': True,
'sublinear_tf': True
}
PERCENTILE = {
'score_func': 'f_classif',
'percentile': 10
}
| 26.375 | 95 | 0.559242 | import os
class Config:
__dir = os.path.dirname(__file__)
SERVER = {
'host': 'localhost',
'port': 8080
}
PATH = {
'question_ans': __dir + '/../../data/bangla_questions_ans.pkl',
'question_domain': __dir + '/../../data/bangla_questions_domain.pkl',
'original_questions_ans': __dir + '/../../data/original_questions_ans.xlsx',
'original_questions_domains': __dir + '/../../data/original_questions_domains.xlsx',
'original_questions_ans_csv': __dir + '/../../data/original_questions_ans.csv',
'original_questions_domains_csv': __dir + '/../../data/original_questions_domains.csv',
'log': __dir + '/../../logs/server_log.csv'
}
SPLITTER = {
'test_size': 0.3,
'random_state': 42
}
TFIDF = {
'max_df': 1,
'min_df': 1,
'max_features': None,
'norm': 'l2',
'smooth_idf': True,
'sublinear_tf': True
}
PERCENTILE = {
'score_func': 'f_classif',
'percentile': 10
}
| true | true |
f73702cc6c0a3f85d13ce025224e1744ed193865 | 2,721 | py | Python | projects/sachinl0har/chatbots/helper.py | Dalekvim/Python | dacc73c319d835f7cda3177c2fdd7c4328828cb9 | [
"MIT"
] | null | null | null | projects/sachinl0har/chatbots/helper.py | Dalekvim/Python | dacc73c319d835f7cda3177c2fdd7c4328828cb9 | [
"MIT"
] | null | null | null | projects/sachinl0har/chatbots/helper.py | Dalekvim/Python | dacc73c319d835f7cda3177c2fdd7c4328828cb9 | [
"MIT"
] | null | null | null | from datetime import datetime
import pyttsx3
import speech_recognition as sr
engine = pyttsx3.init()
def say(sentence: str):
print(sentence)
engine.say(sentence)
engine.runAndWait()
def recognize_speech_from_mic(recognizer, microphone):
"""Transcribe speech from recorded from `microphone`.
Returns a dictionary with three keys:
"success": a boolean indicating whether or not the API request was
successful
"error": `None` if no error occured, otherwise a string containing
an error message if the API could not be reached or
speech was unrecognizable
"transcription": `None` if speech could not be transcribed,
otherwise a string containing the transcribed text
"""
# check that recognizer and microphone arguments are appropriate type
if not isinstance(recognizer, sr.Recognizer):
raise TypeError("`recognizer` must be `Recognizer` instance")
if not isinstance(microphone, sr.Microphone):
raise TypeError("`microphone` must be `Microphone` instance")
# adjust the recognizer sensitivity to ambient noise and record audio
# from the microphone
with microphone as source:
recognizer.adjust_for_ambient_noise(source)
audio = recognizer.listen(source)
# set up the response object
response = {
"success": True,
"error": None,
"transcription": None
}
# try recognizing the speech in the recording
# if a RequestError or UnknownValueError exception is caught,
# update the response object accordingly
try:
response["transcription"] = recognizer.recognize_google(audio)
except sr.RequestError:
# API was unreachable or unresponsive
response["success"] = False
response["error"] = "API unavailable"
except sr.UnknownValueError:
# speech was unintelligible
response["error"] = "Unable to recognize speech"
return response
def run(name: str, func):
# create recognizer and mic instances
recognizer = sr.Recognizer()
microphone = sr.Microphone()
wish_me()
say(f"{name} here, how may I help you?")
while True:
_input = recognize_speech_from_mic(recognizer, microphone)
if not _input["error"]:
print(_input["transcription"])
func(_input["transcription"], recognizer, microphone)
else:
say(_input["error"])
def wish_me():
now = datetime.now()
hour = now.hour
times = [("morning", (0 <= hour < 12)), ("afternoon", (12 <= hour < 18)), ("evening", (18 <= hour < 24))]
for time in times:
if time[1]:
say(f"Good {time[0]}!")
| 29.576087 | 109 | 0.652334 | from datetime import datetime
import pyttsx3
import speech_recognition as sr
engine = pyttsx3.init()
def say(sentence: str):
print(sentence)
engine.say(sentence)
engine.runAndWait()
def recognize_speech_from_mic(recognizer, microphone):
if not isinstance(recognizer, sr.Recognizer):
raise TypeError("`recognizer` must be `Recognizer` instance")
if not isinstance(microphone, sr.Microphone):
raise TypeError("`microphone` must be `Microphone` instance")
with microphone as source:
recognizer.adjust_for_ambient_noise(source)
audio = recognizer.listen(source)
response = {
"success": True,
"error": None,
"transcription": None
}
try:
response["transcription"] = recognizer.recognize_google(audio)
except sr.RequestError:
response["success"] = False
response["error"] = "API unavailable"
except sr.UnknownValueError:
response["error"] = "Unable to recognize speech"
return response
def run(name: str, func):
recognizer = sr.Recognizer()
microphone = sr.Microphone()
wish_me()
say(f"{name} here, how may I help you?")
while True:
_input = recognize_speech_from_mic(recognizer, microphone)
if not _input["error"]:
print(_input["transcription"])
func(_input["transcription"], recognizer, microphone)
else:
say(_input["error"])
def wish_me():
now = datetime.now()
hour = now.hour
times = [("morning", (0 <= hour < 12)), ("afternoon", (12 <= hour < 18)), ("evening", (18 <= hour < 24))]
for time in times:
if time[1]:
say(f"Good {time[0]}!")
| true | true |
f73702db17fa780e5ecc587ea13c83b999a843d2 | 7,239 | py | Python | opsgenie_swagger/models/action_categorized.py | Logicworks/opsgenie-python-sdk | 244c4c40ddcc25e70df5ba4425ab8d7c8da59c18 | [
"Apache-2.0"
] | null | null | null | opsgenie_swagger/models/action_categorized.py | Logicworks/opsgenie-python-sdk | 244c4c40ddcc25e70df5ba4425ab8d7c8da59c18 | [
"Apache-2.0"
] | null | null | null | opsgenie_swagger/models/action_categorized.py | Logicworks/opsgenie-python-sdk | 244c4c40ddcc25e70df5ba4425ab8d7c8da59c18 | [
"Apache-2.0"
] | 1 | 2020-11-07T11:27:13.000Z | 2020-11-07T11:27:13.000Z | # coding: utf-8
"""
OpsGenie REST API
OpsGenie OpenAPI Specification # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from opsgenie_swagger.models.ack_integration_action import AckIntegrationAction # noqa: F401,E501
from opsgenie_swagger.models.add_note_integration_action import AddNoteIntegrationAction # noqa: F401,E501
from opsgenie_swagger.models.close_integration_action import CloseIntegrationAction # noqa: F401,E501
from opsgenie_swagger.models.create_integration_action import CreateIntegrationAction # noqa: F401,E501
from opsgenie_swagger.models.ignore_integration_action import IgnoreIntegrationAction # noqa: F401,E501
from opsgenie_swagger.models.integration_meta import IntegrationMeta # noqa: F401,E501
class ActionCategorized(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'parent': 'IntegrationMeta',
'ignore': 'list[IgnoreIntegrationAction]',
'create': 'list[CreateIntegrationAction]',
'close': 'list[CloseIntegrationAction]',
'acknowledge': 'list[AckIntegrationAction]',
'add_note': 'list[AddNoteIntegrationAction]'
}
attribute_map = {
'parent': '_parent',
'ignore': 'ignore',
'create': 'create',
'close': 'close',
'acknowledge': 'acknowledge',
'add_note': 'addNote'
}
def __init__(self, parent=None, ignore=None, create=None, close=None, acknowledge=None, add_note=None): # noqa: E501
"""ActionCategorized - a model defined in Swagger""" # noqa: E501
self._parent = None
self._ignore = None
self._create = None
self._close = None
self._acknowledge = None
self._add_note = None
self.discriminator = None
if parent is not None:
self.parent = parent
if ignore is not None:
self.ignore = ignore
if create is not None:
self.create = create
if close is not None:
self.close = close
if acknowledge is not None:
self.acknowledge = acknowledge
if add_note is not None:
self.add_note = add_note
@property
def parent(self):
"""Gets the parent of this ActionCategorized. # noqa: E501
:return: The parent of this ActionCategorized. # noqa: E501
:rtype: IntegrationMeta
"""
return self._parent
@parent.setter
def parent(self, parent):
"""Sets the parent of this ActionCategorized.
:param parent: The parent of this ActionCategorized. # noqa: E501
:type: IntegrationMeta
"""
self._parent = parent
@property
def ignore(self):
"""Gets the ignore of this ActionCategorized. # noqa: E501
:return: The ignore of this ActionCategorized. # noqa: E501
:rtype: list[IgnoreIntegrationAction]
"""
return self._ignore
@ignore.setter
def ignore(self, ignore):
"""Sets the ignore of this ActionCategorized.
:param ignore: The ignore of this ActionCategorized. # noqa: E501
:type: list[IgnoreIntegrationAction]
"""
self._ignore = ignore
@property
def create(self):
"""Gets the create of this ActionCategorized. # noqa: E501
:return: The create of this ActionCategorized. # noqa: E501
:rtype: list[CreateIntegrationAction]
"""
return self._create
@create.setter
def create(self, create):
"""Sets the create of this ActionCategorized.
:param create: The create of this ActionCategorized. # noqa: E501
:type: list[CreateIntegrationAction]
"""
self._create = create
@property
def close(self):
"""Gets the close of this ActionCategorized. # noqa: E501
:return: The close of this ActionCategorized. # noqa: E501
:rtype: list[CloseIntegrationAction]
"""
return self._close
@close.setter
def close(self, close):
"""Sets the close of this ActionCategorized.
:param close: The close of this ActionCategorized. # noqa: E501
:type: list[CloseIntegrationAction]
"""
self._close = close
@property
def acknowledge(self):
"""Gets the acknowledge of this ActionCategorized. # noqa: E501
:return: The acknowledge of this ActionCategorized. # noqa: E501
:rtype: list[AckIntegrationAction]
"""
return self._acknowledge
@acknowledge.setter
def acknowledge(self, acknowledge):
"""Sets the acknowledge of this ActionCategorized.
:param acknowledge: The acknowledge of this ActionCategorized. # noqa: E501
:type: list[AckIntegrationAction]
"""
self._acknowledge = acknowledge
@property
def add_note(self):
"""Gets the add_note of this ActionCategorized. # noqa: E501
:return: The add_note of this ActionCategorized. # noqa: E501
:rtype: list[AddNoteIntegrationAction]
"""
return self._add_note
@add_note.setter
def add_note(self, add_note):
"""Sets the add_note of this ActionCategorized.
:param add_note: The add_note of this ActionCategorized. # noqa: E501
:type: list[AddNoteIntegrationAction]
"""
self._add_note = add_note
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ActionCategorized):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.956 | 121 | 0.611687 |
import pprint
import re
import six
from opsgenie_swagger.models.ack_integration_action import AckIntegrationAction
from opsgenie_swagger.models.add_note_integration_action import AddNoteIntegrationAction
from opsgenie_swagger.models.close_integration_action import CloseIntegrationAction
from opsgenie_swagger.models.create_integration_action import CreateIntegrationAction
from opsgenie_swagger.models.ignore_integration_action import IgnoreIntegrationAction
from opsgenie_swagger.models.integration_meta import IntegrationMeta
class ActionCategorized(object):
swagger_types = {
'parent': 'IntegrationMeta',
'ignore': 'list[IgnoreIntegrationAction]',
'create': 'list[CreateIntegrationAction]',
'close': 'list[CloseIntegrationAction]',
'acknowledge': 'list[AckIntegrationAction]',
'add_note': 'list[AddNoteIntegrationAction]'
}
attribute_map = {
'parent': '_parent',
'ignore': 'ignore',
'create': 'create',
'close': 'close',
'acknowledge': 'acknowledge',
'add_note': 'addNote'
}
def __init__(self, parent=None, ignore=None, create=None, close=None, acknowledge=None, add_note=None):
self._parent = None
self._ignore = None
self._create = None
self._close = None
self._acknowledge = None
self._add_note = None
self.discriminator = None
if parent is not None:
self.parent = parent
if ignore is not None:
self.ignore = ignore
if create is not None:
self.create = create
if close is not None:
self.close = close
if acknowledge is not None:
self.acknowledge = acknowledge
if add_note is not None:
self.add_note = add_note
@property
def parent(self):
return self._parent
@parent.setter
def parent(self, parent):
self._parent = parent
@property
def ignore(self):
return self._ignore
@ignore.setter
def ignore(self, ignore):
self._ignore = ignore
@property
def create(self):
return self._create
@create.setter
def create(self, create):
self._create = create
@property
def close(self):
return self._close
@close.setter
def close(self, close):
self._close = close
@property
def acknowledge(self):
return self._acknowledge
@acknowledge.setter
def acknowledge(self, acknowledge):
self._acknowledge = acknowledge
@property
def add_note(self):
return self._add_note
@add_note.setter
def add_note(self, add_note):
self._add_note = add_note
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, ActionCategorized):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f73702ffad0e52858fde93f47f5e9dda3b76f954 | 6,651 | py | Python | aat/engine/dispatch/utils.py | mthomascarcamo/aat | fd86f513ccf79625516d2236be655498b24ec742 | [
"Apache-2.0"
] | 305 | 2020-02-24T02:25:43.000Z | 2022-03-26T22:53:43.000Z | aat/engine/dispatch/utils.py | mthomascarcamo/aat | fd86f513ccf79625516d2236be655498b24ec742 | [
"Apache-2.0"
] | 79 | 2020-02-20T21:00:58.000Z | 2022-03-27T14:06:26.000Z | aat/engine/dispatch/utils.py | mthomascarcamo/aat | fd86f513ccf79625516d2236be655498b24ec742 | [
"Apache-2.0"
] | 71 | 2020-05-10T11:52:25.000Z | 2022-03-29T07:51:48.000Z | import asyncio
from datetime import datetime
from typing import Any, Callable, List, Optional, Union, TYPE_CHECKING
from aat import AATException
from aat.config import ExitRoutine, InstrumentType, TradingType
from aat.core import Instrument, ExchangeType, Event, Order, Trade, OrderBook
from aat.exchange import Exchange
from .periodic import Periodic
if TYPE_CHECKING:
from aat.engine import TradingEngine
from aat.strategy import Strategy
class StrategyManagerUtilsMixin(object):
_engine: "TradingEngine"
_exchanges: List[Exchange]
_periodics: List[Periodic]
_data_subscriptions = {} # type: ignore
#################
# Other Methods #
#################
def tradingType(self) -> TradingType:
return self._engine.trading_type
def loop(self) -> asyncio.AbstractEventLoop:
return self._engine.event_loop
def now(self) -> datetime:
"""Return the current datetime. Useful to avoid code changes between
live trading and backtesting. Defaults to `datetime.now`"""
return self._engine.now()
def instruments(
self, type: InstrumentType = None, exchange: Optional[ExchangeType] = None
) -> List[Instrument]:
"""Return list of all available instruments"""
return Instrument._instrumentdb.instruments(type=type, exchange=exchange)
def exchanges(self, type: InstrumentType = None) -> List[ExchangeType]:
"""Return list of all available exchanges"""
if type:
raise NotImplementedError()
return [exc.exchange() for exc in self._exchanges]
async def subscribe(self, instrument: Instrument, strategy: "Strategy") -> None:
"""Subscribe to market data for the given instrument"""
if strategy not in self._data_subscriptions:
self._data_subscriptions[strategy] = []
self._data_subscriptions[strategy].append(instrument)
if instrument.exchange not in self.exchanges():
raise AATException(
"Exchange not installed: {} (Installed are [{}]".format(
instrument.exchange, self.exchanges()
)
)
for exc in self._exchanges:
if instrument and instrument.exchange == exc.exchange():
await exc.subscribe(instrument)
def dataSubscriptions(self, handler: Callable, event: Event) -> bool:
"""does handler subscribe to the data for event"""
if handler not in self._data_subscriptions:
# subscribe all by default
return True
target: Union[Order, Trade] = event.target # type: ignore
return target.instrument in self._data_subscriptions[handler]
async def lookup(
self, instrument: Optional[Instrument], exchange: ExchangeType = None
) -> List[Instrument]:
"""Return list of all available instruments that match the instrument given"""
if exchange:
for exchange_inst in self._exchanges:
if exchange == exchange_inst.exchange():
if instrument:
return await exchange_inst.lookup(instrument)
return []
elif exchange is None:
ret = []
for exchange_inst in self._exchanges:
if instrument:
ret.extend(await exchange_inst.lookup(instrument))
return ret
# None implement
raise NotImplementedError()
async def book(self, instrument: Instrument) -> Optional[OrderBook]:
"""Return list of all available instruments that match the instrument given"""
if instrument.exchange not in self.exchanges():
raise AATException("")
for exchange_inst in self._exchanges:
if instrument.exchange == exchange_inst.exchange():
return await exchange_inst.book(instrument)
return None
def _make_async(self, function: Callable) -> Callable:
async def _wrapper() -> Any:
return await self.loop().run_in_executor(
self._engine, self._engine.executor, function
)
return _wrapper
def periodic(
self,
function: Callable,
second: Union[int, str] = 0,
minute: Union[int, str] = "*",
hour: Union[int, str] = "*",
) -> Periodic:
"""periodically run a given async function. NOTE: precise timing
is NOT guaranteed due to event loop scheduling."""
if not self.loop().is_running():
raise Exception("Install periodics after engine start (e.g. in `onStart`)")
# Validation
if not asyncio.iscoroutinefunction(function):
function = self._make_async(function)
if not isinstance(second, (int, str)):
raise Exception("`second` arg must be int or str")
if not isinstance(minute, (int, str)):
raise Exception("`minute` arg must be int or str")
if not isinstance(hour, (int, str)):
raise Exception("`hour` arg must be int or str")
if isinstance(second, str) and second != "*":
raise Exception('Only "*" or int allowed for argument `second`')
elif isinstance(second, str):
second = None # type: ignore
elif second < 0 or second > 60:
raise Exception("`second` must be between 0 and 60")
if isinstance(minute, str) and minute != "*":
raise Exception('Only "*" or int allowed for argument `minute`')
elif isinstance(minute, str):
minute = None # type: ignore
elif minute < 0 or minute > 60:
raise Exception("`minute` must be between 0 and 60")
if isinstance(hour, str) and hour != "*":
raise Exception('Only "*" or int allowed for argument `hour`')
elif isinstance(hour, str):
hour = None # type: ignore
elif hour < 0 or hour > 24:
raise Exception("`hour` must be between 0 and 24")
# End Validation
periodic = Periodic(
self.loop(), self._engine._latest, function, second, minute, hour # type: ignore
)
self._periodics.append(periodic)
return periodic
def restrictTradingHours(
self,
strategy: "Strategy",
start_second: Optional[int] = None,
start_minute: Optional[int] = None,
start_hour: Optional[int] = None,
end_second: Optional[int] = None,
end_minute: Optional[int] = None,
end_hour: Optional[int] = None,
on_end_of_day: ExitRoutine = ExitRoutine.NONE,
) -> None:
pass
| 36.745856 | 93 | 0.615547 | import asyncio
from datetime import datetime
from typing import Any, Callable, List, Optional, Union, TYPE_CHECKING
from aat import AATException
from aat.config import ExitRoutine, InstrumentType, TradingType
from aat.core import Instrument, ExchangeType, Event, Order, Trade, OrderBook
from aat.exchange import Exchange
from .periodic import Periodic
if TYPE_CHECKING:
from aat.engine import TradingEngine
from aat.strategy import Strategy
class StrategyManagerUtilsMixin(object):
_engine: "TradingEngine"
_exchanges: List[Exchange]
_periodics: List[Periodic]
_data_subscriptions = {}
self, type: InstrumentType = None, exchange: Optional[ExchangeType] = None
) -> List[Instrument]:
return Instrument._instrumentdb.instruments(type=type, exchange=exchange)
def exchanges(self, type: InstrumentType = None) -> List[ExchangeType]:
if type:
raise NotImplementedError()
return [exc.exchange() for exc in self._exchanges]
async def subscribe(self, instrument: Instrument, strategy: "Strategy") -> None:
if strategy not in self._data_subscriptions:
self._data_subscriptions[strategy] = []
self._data_subscriptions[strategy].append(instrument)
if instrument.exchange not in self.exchanges():
raise AATException(
"Exchange not installed: {} (Installed are [{}]".format(
instrument.exchange, self.exchanges()
)
)
for exc in self._exchanges:
if instrument and instrument.exchange == exc.exchange():
await exc.subscribe(instrument)
def dataSubscriptions(self, handler: Callable, event: Event) -> bool:
if handler not in self._data_subscriptions:
return True
target: Union[Order, Trade] = event.target
return target.instrument in self._data_subscriptions[handler]
async def lookup(
self, instrument: Optional[Instrument], exchange: ExchangeType = None
) -> List[Instrument]:
if exchange:
for exchange_inst in self._exchanges:
if exchange == exchange_inst.exchange():
if instrument:
return await exchange_inst.lookup(instrument)
return []
elif exchange is None:
ret = []
for exchange_inst in self._exchanges:
if instrument:
ret.extend(await exchange_inst.lookup(instrument))
return ret
raise NotImplementedError()
async def book(self, instrument: Instrument) -> Optional[OrderBook]:
if instrument.exchange not in self.exchanges():
raise AATException("")
for exchange_inst in self._exchanges:
if instrument.exchange == exchange_inst.exchange():
return await exchange_inst.book(instrument)
return None
def _make_async(self, function: Callable) -> Callable:
async def _wrapper() -> Any:
return await self.loop().run_in_executor(
self._engine, self._engine.executor, function
)
return _wrapper
def periodic(
self,
function: Callable,
second: Union[int, str] = 0,
minute: Union[int, str] = "*",
hour: Union[int, str] = "*",
) -> Periodic:
if not self.loop().is_running():
raise Exception("Install periodics after engine start (e.g. in `onStart`)")
if not asyncio.iscoroutinefunction(function):
function = self._make_async(function)
if not isinstance(second, (int, str)):
raise Exception("`second` arg must be int or str")
if not isinstance(minute, (int, str)):
raise Exception("`minute` arg must be int or str")
if not isinstance(hour, (int, str)):
raise Exception("`hour` arg must be int or str")
if isinstance(second, str) and second != "*":
raise Exception('Only "*" or int allowed for argument `second`')
elif isinstance(second, str):
second = None
elif second < 0 or second > 60:
raise Exception("`second` must be between 0 and 60")
if isinstance(minute, str) and minute != "*":
raise Exception('Only "*" or int allowed for argument `minute`')
elif isinstance(minute, str):
minute = None
elif minute < 0 or minute > 60:
raise Exception("`minute` must be between 0 and 60")
if isinstance(hour, str) and hour != "*":
raise Exception('Only "*" or int allowed for argument `hour`')
elif isinstance(hour, str):
hour = None
elif hour < 0 or hour > 24:
raise Exception("`hour` must be between 0 and 24")
periodic = Periodic(
self.loop(), self._engine._latest, function, second, minute, hour
)
self._periodics.append(periodic)
return periodic
def restrictTradingHours(
self,
strategy: "Strategy",
start_second: Optional[int] = None,
start_minute: Optional[int] = None,
start_hour: Optional[int] = None,
end_second: Optional[int] = None,
end_minute: Optional[int] = None,
end_hour: Optional[int] = None,
on_end_of_day: ExitRoutine = ExitRoutine.NONE,
) -> None:
pass
| true | true |
f73703158d8c71193bd5ca312a83857462b4fddb | 296 | py | Python | casier_closing/casier_closing/doctype/cashier_closing_payments/cashier_closing_payments.py | Lewinta/Cashier-Closing | 9ad639c9d743465f028335d2d4a59cdea7dfbc85 | [
"MIT"
] | 1 | 2021-04-29T14:55:29.000Z | 2021-04-29T14:55:29.000Z | casier_closing/casier_closing/doctype/cashier_closing_payments/cashier_closing_payments.py | Lewinta/Cashier-Closing | 9ad639c9d743465f028335d2d4a59cdea7dfbc85 | [
"MIT"
] | null | null | null | casier_closing/casier_closing/doctype/cashier_closing_payments/cashier_closing_payments.py | Lewinta/Cashier-Closing | 9ad639c9d743465f028335d2d4a59cdea7dfbc85 | [
"MIT"
] | 1 | 2021-04-29T14:39:01.000Z | 2021-04-29T14:39:01.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class CashierClosingPayments(Document):
pass
| 26.909091 | 68 | 0.783784 |
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class CashierClosingPayments(Document):
pass
| true | true |
f7370323b15752e3f6b19164f6d5184566958a80 | 318 | py | Python | listtocommaseparated.py | serhii73/PyTricks | cae22a0960a7bf9a39c7fb3227e283c3b86bd05b | [
"MIT"
] | 3,271 | 2015-03-19T11:27:39.000Z | 2022-03-19T03:00:37.000Z | listtocommaseparated.py | serhii73/PyTricks | cae22a0960a7bf9a39c7fb3227e283c3b86bd05b | [
"MIT"
] | 78 | 2015-03-19T09:45:09.000Z | 2020-12-24T08:47:15.000Z | listtocommaseparated.py | serhii73/PyTricks | cae22a0960a7bf9a39c7fb3227e283c3b86bd05b | [
"MIT"
] | 531 | 2015-03-19T11:12:52.000Z | 2022-03-23T15:07:07.000Z | #! /usr/bin/env python3
"""converts list to comma separated string"""
items = ['foo', 'bar', 'xyz']
print (','.join(items))
"""list of numbers to comma separated"""
numbers = [2, 3, 5, 10]
print (','.join(map(str, numbers)))
"""list of mix data"""
data = [2, 'hello', 3, 3.4]
print (','.join(map(str, data)))
| 16.736842 | 45 | 0.584906 |
items = ['foo', 'bar', 'xyz']
print (','.join(items))
numbers = [2, 3, 5, 10]
print (','.join(map(str, numbers)))
data = [2, 'hello', 3, 3.4]
print (','.join(map(str, data)))
| true | true |
f7370371c94b70b03a4200ec6d26067a36ac96a6 | 587 | py | Python | setup.py | napyk/foamfile | d5b575c35f3461b776b91b5640d20f7eae8a30de | [
"MIT"
] | 1 | 2020-10-02T09:27:23.000Z | 2020-10-02T09:27:23.000Z | setup.py | napyk/foamfile | d5b575c35f3461b776b91b5640d20f7eae8a30de | [
"MIT"
] | 1 | 2020-08-19T12:54:53.000Z | 2020-08-19T15:42:32.000Z | setup.py | napyk/foamfile | d5b575c35f3461b776b91b5640d20f7eae8a30de | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
with open("README.md", 'r') as f:
long_description = f.read()
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(name="foamfile",
version="0.11",
description='OpenFOAM config file parser',
long_description=long_description,
long_description_content_type='text/markdown',
author="Nikolas Pfister",
author_email="pfister.nikolas@gmail.com",
packages=find_packages(),
license="MIT",
url="https://github.com/napyk/foamfile",
install_requires=required)
| 29.35 | 52 | 0.684838 | from setuptools import setup, find_packages
with open("README.md", 'r') as f:
long_description = f.read()
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(name="foamfile",
version="0.11",
description='OpenFOAM config file parser',
long_description=long_description,
long_description_content_type='text/markdown',
author="Nikolas Pfister",
author_email="pfister.nikolas@gmail.com",
packages=find_packages(),
license="MIT",
url="https://github.com/napyk/foamfile",
install_requires=required)
| true | true |
f73703bc54cf082a12df11580aba39b827d10c7e | 3,594 | py | Python | app/api/resources/user.py | quanpower/sitp | 082f244dd35c5e881b332a624d4808f3e9e81a96 | [
"Apache-2.0"
] | null | null | null | app/api/resources/user.py | quanpower/sitp | 082f244dd35c5e881b332a624d4808f3e9e81a96 | [
"Apache-2.0"
] | 4 | 2020-03-24T15:46:19.000Z | 2022-03-08T21:09:16.000Z | app/api/resources/user.py | quanpower/sitp | 082f244dd35c5e881b332a624d4808f3e9e81a96 | [
"Apache-2.0"
] | null | null | null | from flask import request, jsonify, make_response
from flask_restful import Resource, reqparse, abort
import time
import datetime
import json
from app.models import User
from app import db
class Register(Resource):
def get(self):
pass
def post(self):
# parser = reqparse.RequestParser()
# parser.add_argument('userName', type=str)
# parser.add_argument('password', type=str)
# parser.add_argument('email', type=str)
# parser.add_argument('phone', type=str)
# args = parser.parse_args()
# username = args['userName']
# password = args['password']
# email = args['email']
# phone = args['phone']
# user = User(email=email,
# username=username,
# password=password,
# about_me=phone)
# db.session.add(user)
# db.session.commit()
return jsonify({ 'status': 'ok', 'currentAuthority': 'user' })
def delete(self):
pass
def put(self):
pass
class Login(Resource):
def get(self):
pass
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('userName', type=str)
parser.add_argument('password', type=str)
parser.add_argument('type', type=str)
args = parser.parse_args()
username = args['userName']
password = args['password']
post_type = args['type']
user = User.query.filter_by(username=username).first()
if user is not None and user.verify_password(password):
print('-----verify success!-----')
user_id = user.id
print(user_id)
if username == 'admin':
return jsonify({
'status': 'ok',
'type': post_type,
'currentAuthority': 'admin',
})
else:
return jsonify({
'status': 'ok',
'type': post_type,
'currentAuthority': 'user',
})
else:
return jsonify({
'status': 'error',
'type': post_type,
'currentAuthority': 'guest',
})
def delete(self):
pass
def put(self):
pass
class Logout(Resource):
def get(self):
resp = make_response("Delete cookie")
resp.delete_cookie('token')
return resp
def post(self):
pass
def delete(self):
pass
def put(self):
pass
class Users(Resource):
def get(self):
users = User.query.all()
resp = []
for user in users:
user_dict = {
'key':user.id,
'name':user.username,
'age':user.name,
'address':user.location,
}
resp.append(user_dict)
return jsonify(resp)
def post(self):
pass
def delete(self):
pass
def put(self):
pass
class GetUser(Resource):
def get(self):
return jsonify({
'name': 'Jiajia Wen',
'avatar': 'https://gw.alipayobjects.com/zos/rmsportal/BiazfanxmamNRoxxVxka.png',
'userid': '00000001',
'notifyCount': 12,
})
def post(self):
pass
def delete(self):
pass
def put(self):
pass
| 22.89172 | 102 | 0.484975 | from flask import request, jsonify, make_response
from flask_restful import Resource, reqparse, abort
import time
import datetime
import json
from app.models import User
from app import db
class Register(Resource):
def get(self):
pass
def post(self):
return jsonify({ 'status': 'ok', 'currentAuthority': 'user' })
def delete(self):
pass
def put(self):
pass
class Login(Resource):
def get(self):
pass
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('userName', type=str)
parser.add_argument('password', type=str)
parser.add_argument('type', type=str)
args = parser.parse_args()
username = args['userName']
password = args['password']
post_type = args['type']
user = User.query.filter_by(username=username).first()
if user is not None and user.verify_password(password):
print('-----verify success!-----')
user_id = user.id
print(user_id)
if username == 'admin':
return jsonify({
'status': 'ok',
'type': post_type,
'currentAuthority': 'admin',
})
else:
return jsonify({
'status': 'ok',
'type': post_type,
'currentAuthority': 'user',
})
else:
return jsonify({
'status': 'error',
'type': post_type,
'currentAuthority': 'guest',
})
def delete(self):
pass
def put(self):
pass
class Logout(Resource):
def get(self):
resp = make_response("Delete cookie")
resp.delete_cookie('token')
return resp
def post(self):
pass
def delete(self):
pass
def put(self):
pass
class Users(Resource):
def get(self):
users = User.query.all()
resp = []
for user in users:
user_dict = {
'key':user.id,
'name':user.username,
'age':user.name,
'address':user.location,
}
resp.append(user_dict)
return jsonify(resp)
def post(self):
pass
def delete(self):
pass
def put(self):
pass
class GetUser(Resource):
def get(self):
return jsonify({
'name': 'Jiajia Wen',
'avatar': 'https://gw.alipayobjects.com/zos/rmsportal/BiazfanxmamNRoxxVxka.png',
'userid': '00000001',
'notifyCount': 12,
})
def post(self):
pass
def delete(self):
pass
def put(self):
pass
| true | true |
f73703c2202b96b3c3d11b95265204f9ab4eb572 | 5,248 | py | Python | neutron/debug/commands.py | gampel/neutron | 51a6260266dc59c066072ca890ad9c40b1aad6cf | [
"Apache-2.0"
] | 10 | 2015-09-22T10:22:53.000Z | 2016-02-25T06:12:05.000Z | neutron/debug/commands.py | gampel/neutron | 51a6260266dc59c066072ca890ad9c40b1aad6cf | [
"Apache-2.0"
] | 8 | 2015-05-06T07:51:19.000Z | 2015-07-09T08:15:09.000Z | neutron/debug/commands.py | gampel/neutron | 51a6260266dc59c066072ca890ad9c40b1aad6cf | [
"Apache-2.0"
] | 3 | 2015-10-23T15:22:18.000Z | 2022-03-16T16:32:54.000Z | # Copyright 2012, Nachi Ueno, NTT MCL, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cliff import lister
from neutronclient.common import utils
from neutronclient.neutron import v2_0 as client
from neutronclient.neutron.v2_0 import port
from neutron.i18n import _LI
from neutron.openstack.common import log as logging
class ProbeCommand(client.NeutronCommand):
log = logging.getLogger(__name__ + '.ProbeCommand')
def get_debug_agent(self):
return self.app.debug_agent
def run(self, parsed_args):
self.log.debug('run(%s)', parsed_args)
self.log.info(_('Unimplemented commands'))
class CreateProbe(ProbeCommand):
"""Create probe port and interface, then plug it in."""
log = logging.getLogger(__name__ + '.CreateProbe')
def get_parser(self, prog_name):
parser = super(CreateProbe, self).get_parser(prog_name)
parser.add_argument(
'id', metavar='network_id',
help=_('ID of network to probe'))
parser.add_argument(
'--device-owner',
default='network', choices=['network', 'compute'],
help=_('Owner type of the device: network/compute'))
return parser
def run(self, parsed_args):
self.log.debug('run(%s)', parsed_args)
debug_agent = self.get_debug_agent()
probe_port = debug_agent.create_probe(parsed_args.id,
parsed_args.device_owner)
self.log.info(_('Probe created : %s '), probe_port.id)
class DeleteProbe(ProbeCommand):
"""Delete probe - delete port then uplug."""
log = logging.getLogger(__name__ + '.DeleteProbe')
def get_parser(self, prog_name):
parser = super(DeleteProbe, self).get_parser(prog_name)
parser.add_argument(
'id', metavar='port_id',
help=_('ID of probe port to delete'))
return parser
def run(self, parsed_args):
self.log.debug('run(%s)', parsed_args)
debug_agent = self.get_debug_agent()
debug_agent.delete_probe(parsed_args.id)
self.log.info(_('Probe %s deleted'), parsed_args.id)
class ListProbe(client.NeutronCommand, lister.Lister):
"""List probes."""
log = logging.getLogger(__name__ + '.ListProbe')
_formatters = {'fixed_ips': port._format_fixed_ips, }
def get_debug_agent(self):
return self.app.debug_agent
def get_data(self, parsed_args):
debug_agent = self.get_debug_agent()
info = debug_agent.list_probes()
columns = sorted(info[0].keys()) if info else []
return (columns, (utils.get_item_properties(
s, columns, formatters=self._formatters, )
for s in info), )
class ClearProbe(ProbeCommand):
"""Clear All probes."""
log = logging.getLogger(__name__ + '.ClearProbe')
def run(self, parsed_args):
self.log.debug('run(%s)', parsed_args)
debug_agent = self.get_debug_agent()
cleared_probes_count = debug_agent.clear_probes()
self.log.info(_LI('%d probe(s) deleted'), cleared_probes_count)
class ExecProbe(ProbeCommand):
"""Exec commands on the namespace of the probe."""
log = logging.getLogger(__name__ + '.ExecProbe')
def get_parser(self, prog_name):
parser = super(ExecProbe, self).get_parser(prog_name)
parser.add_argument(
'id', metavar='port_id',
help=_('ID of probe port to execute command'))
parser.add_argument(
'command', metavar='command',
nargs='?',
default=None,
help=_('Command to execute'))
return parser
def run(self, parsed_args):
self.log.debug('run(%s)', parsed_args)
debug_agent = self.get_debug_agent()
result = debug_agent.exec_command(parsed_args.id, parsed_args.command)
self.app.stdout.write(result + '\n')
class PingAll(ProbeCommand):
"""Ping all fixed_ip."""
log = logging.getLogger(__name__ + '.ExecProbe')
def get_parser(self, prog_name):
parser = super(PingAll, self).get_parser(prog_name)
parser.add_argument(
'--timeout', metavar='<timeout>',
default=10,
help=_('Ping timeout'))
parser.add_argument(
'--id', metavar='network_id',
default=None,
help=_('ID of network'))
return parser
def run(self, parsed_args):
self.log.debug('run(%s)', parsed_args)
debug_agent = self.get_debug_agent()
result = debug_agent.ping_all(parsed_args.id,
timeout=parsed_args.timeout)
self.app.stdout.write(result + '\n')
| 33.426752 | 79 | 0.638338 |
from cliff import lister
from neutronclient.common import utils
from neutronclient.neutron import v2_0 as client
from neutronclient.neutron.v2_0 import port
from neutron.i18n import _LI
from neutron.openstack.common import log as logging
class ProbeCommand(client.NeutronCommand):
log = logging.getLogger(__name__ + '.ProbeCommand')
def get_debug_agent(self):
return self.app.debug_agent
def run(self, parsed_args):
self.log.debug('run(%s)', parsed_args)
self.log.info(_('Unimplemented commands'))
class CreateProbe(ProbeCommand):
log = logging.getLogger(__name__ + '.CreateProbe')
def get_parser(self, prog_name):
parser = super(CreateProbe, self).get_parser(prog_name)
parser.add_argument(
'id', metavar='network_id',
help=_('ID of network to probe'))
parser.add_argument(
'--device-owner',
default='network', choices=['network', 'compute'],
help=_('Owner type of the device: network/compute'))
return parser
def run(self, parsed_args):
self.log.debug('run(%s)', parsed_args)
debug_agent = self.get_debug_agent()
probe_port = debug_agent.create_probe(parsed_args.id,
parsed_args.device_owner)
self.log.info(_('Probe created : %s '), probe_port.id)
class DeleteProbe(ProbeCommand):
log = logging.getLogger(__name__ + '.DeleteProbe')
def get_parser(self, prog_name):
parser = super(DeleteProbe, self).get_parser(prog_name)
parser.add_argument(
'id', metavar='port_id',
help=_('ID of probe port to delete'))
return parser
def run(self, parsed_args):
self.log.debug('run(%s)', parsed_args)
debug_agent = self.get_debug_agent()
debug_agent.delete_probe(parsed_args.id)
self.log.info(_('Probe %s deleted'), parsed_args.id)
class ListProbe(client.NeutronCommand, lister.Lister):
log = logging.getLogger(__name__ + '.ListProbe')
_formatters = {'fixed_ips': port._format_fixed_ips, }
def get_debug_agent(self):
return self.app.debug_agent
def get_data(self, parsed_args):
debug_agent = self.get_debug_agent()
info = debug_agent.list_probes()
columns = sorted(info[0].keys()) if info else []
return (columns, (utils.get_item_properties(
s, columns, formatters=self._formatters, )
for s in info), )
class ClearProbe(ProbeCommand):
log = logging.getLogger(__name__ + '.ClearProbe')
def run(self, parsed_args):
self.log.debug('run(%s)', parsed_args)
debug_agent = self.get_debug_agent()
cleared_probes_count = debug_agent.clear_probes()
self.log.info(_LI('%d probe(s) deleted'), cleared_probes_count)
class ExecProbe(ProbeCommand):
log = logging.getLogger(__name__ + '.ExecProbe')
def get_parser(self, prog_name):
parser = super(ExecProbe, self).get_parser(prog_name)
parser.add_argument(
'id', metavar='port_id',
help=_('ID of probe port to execute command'))
parser.add_argument(
'command', metavar='command',
nargs='?',
default=None,
help=_('Command to execute'))
return parser
def run(self, parsed_args):
self.log.debug('run(%s)', parsed_args)
debug_agent = self.get_debug_agent()
result = debug_agent.exec_command(parsed_args.id, parsed_args.command)
self.app.stdout.write(result + '\n')
class PingAll(ProbeCommand):
log = logging.getLogger(__name__ + '.ExecProbe')
def get_parser(self, prog_name):
parser = super(PingAll, self).get_parser(prog_name)
parser.add_argument(
'--timeout', metavar='<timeout>',
default=10,
help=_('Ping timeout'))
parser.add_argument(
'--id', metavar='network_id',
default=None,
help=_('ID of network'))
return parser
def run(self, parsed_args):
self.log.debug('run(%s)', parsed_args)
debug_agent = self.get_debug_agent()
result = debug_agent.ping_all(parsed_args.id,
timeout=parsed_args.timeout)
self.app.stdout.write(result + '\n')
| true | true |
f73704698efd7b42a3f6e3afa5f4bb3127779a69 | 2,847 | py | Python | opensanctions/crawlers/pl_mswia_sanctions.py | opensanctions/opennames | 39675797b0e70e71f54edff2b8e623e23aef9c15 | [
"MIT"
] | 3 | 2017-04-14T21:27:07.000Z | 2017-04-25T14:57:22.000Z | opensanctions/crawlers/pl_mswia_sanctions.py | opensanctions/opennames | 39675797b0e70e71f54edff2b8e623e23aef9c15 | [
"MIT"
] | null | null | null | opensanctions/crawlers/pl_mswia_sanctions.py | opensanctions/opennames | 39675797b0e70e71f54edff2b8e623e23aef9c15 | [
"MIT"
] | null | null | null | from lxml import html
from normality import collapse_spaces, slugify
from pantomime.types import HTML
from opensanctions.core import Context
from opensanctions import helpers as h
TYPES = {"OSOBY": "Person", "PODMIOTY": "Company"}
CHOPSKA = [
("Nr NIP", "taxNumber"),
("NIP", "taxNumber"),
("Nr KRS", "registrationNumber"),
("KRS", "registrationNumber"),
("siedziba:", "address"),
]
def crawl(context: Context):
path = context.fetch_resource("source.html", context.dataset.data.url)
context.export_resource(path, HTML, title=context.SOURCE_TITLE)
with open(path, "r", encoding="utf-8") as fh:
doc = html.fromstring(fh.read())
for table in doc.findall('.//div[@class="editor-content"]//table'):
headers = None
schema = None
for row in table.findall(".//tr"):
cells = [collapse_spaces(c.text_content()) for c in row.findall("./td")]
if headers is None:
headers = [slugify(c, sep="_") for c in cells]
continue
if len(cells) == 1:
schema = TYPES[cells[0]]
continue
row = dict(zip(headers, cells))
entity = context.make(schema)
name = row.pop("imie_i_nazwisko_nazwa_podmiotu")
entity.id = context.make_slug(name)
names = name.split("(")
entity.add("name", names[0])
for alias in names[1:]:
entity.add("alias", alias.split(")")[0])
notes = row.pop("uzasadnienie_wpisu_na_liste")
entity.add("notes", notes)
details = row.pop("dane_identyfikacyjne_osoby_podmiotu")
for (chop, prop) in CHOPSKA:
parts = details.rsplit(chop, 1)
details = parts[0]
if len(parts) > 1:
if prop == "address":
addr = h.make_address(context, full=parts[1])
h.apply_address(context, entity, addr)
else:
entity.add(prop, parts[1])
if len(details.strip()):
result = context.lookup("details", details)
if result is None:
context.log.warning("Unhandled details", details=details)
else:
for prop, value in result.props.items():
entity.add(prop, value)
sanction = h.make_sanction(context, entity)
provisions = row.pop("zastosowane_srodki_sankcyjne")
sanction.add("provisions", provisions)
start_date = row.pop("data_umieszczenia_na_liscie")
sanction.add("startDate", h.parse_date(start_date, ["%d.%m.%Y"]))
h.audit_data(row)
context.emit(entity, target=True)
context.emit(sanction)
| 38.472973 | 84 | 0.551458 | from lxml import html
from normality import collapse_spaces, slugify
from pantomime.types import HTML
from opensanctions.core import Context
from opensanctions import helpers as h
TYPES = {"OSOBY": "Person", "PODMIOTY": "Company"}
CHOPSKA = [
("Nr NIP", "taxNumber"),
("NIP", "taxNumber"),
("Nr KRS", "registrationNumber"),
("KRS", "registrationNumber"),
("siedziba:", "address"),
]
def crawl(context: Context):
path = context.fetch_resource("source.html", context.dataset.data.url)
context.export_resource(path, HTML, title=context.SOURCE_TITLE)
with open(path, "r", encoding="utf-8") as fh:
doc = html.fromstring(fh.read())
for table in doc.findall('.//div[@class="editor-content"]//table'):
headers = None
schema = None
for row in table.findall(".//tr"):
cells = [collapse_spaces(c.text_content()) for c in row.findall("./td")]
if headers is None:
headers = [slugify(c, sep="_") for c in cells]
continue
if len(cells) == 1:
schema = TYPES[cells[0]]
continue
row = dict(zip(headers, cells))
entity = context.make(schema)
name = row.pop("imie_i_nazwisko_nazwa_podmiotu")
entity.id = context.make_slug(name)
names = name.split("(")
entity.add("name", names[0])
for alias in names[1:]:
entity.add("alias", alias.split(")")[0])
notes = row.pop("uzasadnienie_wpisu_na_liste")
entity.add("notes", notes)
details = row.pop("dane_identyfikacyjne_osoby_podmiotu")
for (chop, prop) in CHOPSKA:
parts = details.rsplit(chop, 1)
details = parts[0]
if len(parts) > 1:
if prop == "address":
addr = h.make_address(context, full=parts[1])
h.apply_address(context, entity, addr)
else:
entity.add(prop, parts[1])
if len(details.strip()):
result = context.lookup("details", details)
if result is None:
context.log.warning("Unhandled details", details=details)
else:
for prop, value in result.props.items():
entity.add(prop, value)
sanction = h.make_sanction(context, entity)
provisions = row.pop("zastosowane_srodki_sankcyjne")
sanction.add("provisions", provisions)
start_date = row.pop("data_umieszczenia_na_liscie")
sanction.add("startDate", h.parse_date(start_date, ["%d.%m.%Y"]))
h.audit_data(row)
context.emit(entity, target=True)
context.emit(sanction)
| true | true |
f73704a096295e412e1e98c15724f6c1ea370176 | 639 | py | Python | test/tests/python-imports/container.py | Vogtinator/official-images | 632a6171cc8a5d59bfb45ca58f12905dcf351efd | [
"Apache-2.0"
] | 45 | 2016-06-03T01:02:19.000Z | 2022-03-25T16:24:46.000Z | test/tests/python-imports/container.py | skar10/official-images | 3f7c529df214db013379b7ec78e00b736f527c3a | [
"Apache-2.0"
] | 6 | 2015-08-16T20:16:40.000Z | 2019-03-11T22:17:02.000Z | test/tests/python-imports/container.py | skar10/official-images | 3f7c529df214db013379b7ec78e00b736f527c3a | [
"Apache-2.0"
] | 7 | 2015-03-20T20:26:18.000Z | 2021-04-22T02:57:30.000Z | import curses
import readline
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import platform
isNotPypy = platform.python_implementation() != 'PyPy'
isCaveman = platform.python_version_tuple()[0] == '2'
if isCaveman:
import gdbm
else:
import dbm.gnu
if isNotPypy:
# PyPy and Python 2 don't support lzma
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
| 26.625 | 110 | 0.70892 | import curses
import readline
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import platform
isNotPypy = platform.python_implementation() != 'PyPy'
isCaveman = platform.python_version_tuple()[0] == '2'
if isCaveman:
import gdbm
else:
import dbm.gnu
if isNotPypy:
import lzma
assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
| true | true |
f73704a61b3e5af1dc00be48aae8a0f7a2220c52 | 2,046 | py | Python | tensorflow_federated/tools/runtime/remote_executor_service.py | iahsanujunda/federated | 109a5653a305dc9d4bcbafc259257add4dc70365 | [
"Apache-2.0"
] | 5 | 2020-06-04T20:10:25.000Z | 2020-07-22T02:15:38.000Z | tensorflow_federated/tools/runtime/remote_executor_service.py | iahsanujunda/federated | 109a5653a305dc9d4bcbafc259257add4dc70365 | [
"Apache-2.0"
] | 5 | 2020-07-20T13:39:12.000Z | 2020-08-27T18:00:56.000Z | tensorflow_federated/tools/runtime/remote_executor_service.py | iahsanujunda/federated | 109a5653a305dc9d4bcbafc259257add4dc70365 | [
"Apache-2.0"
] | 1 | 2022-02-27T18:31:12.000Z | 2022-02-27T18:31:12.000Z | # Copyright 2019, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A generic worker binary for deployment, e.g., on GCP."""
from absl import app
from absl import flags
import grpc
import tensorflow_federated as tff
FLAGS = flags.FLAGS
flags.DEFINE_integer('port', '8000', 'port to listen on')
flags.DEFINE_integer('threads', '10', 'number of worker threads in thread pool')
flags.DEFINE_string('private_key', '', 'the private key for SSL/TLS setup')
flags.DEFINE_string('certificate_chain', '', 'the cert for SSL/TLS setup')
flags.DEFINE_integer('clients', '1', 'number of clients to host on this worker')
flags.DEFINE_integer('fanout', '100',
'max fanout in the hierarchy of local executors')
def main(argv):
del argv
executor_factory = tff.framework.local_executor_factory(
num_clients=FLAGS.clients, max_fanout=FLAGS.fanout)
if FLAGS.private_key:
if FLAGS.certificate_chain:
with open(FLAGS.private_key, 'rb') as f:
private_key = f.read()
with open(FLAGS.certificate_chain, 'rb') as f:
certificate_chain = f.read()
credentials = grpc.ssl_server_credentials(((
private_key,
certificate_chain,
),))
else:
raise ValueError(
'Private key has been specified, but the certificate chain missing.')
else:
credentials = None
tff.simulation.run_server(
executor_factory.create_executor({}), FLAGS.threads, FLAGS.port,
credentials)
if __name__ == '__main__':
app.run(main)
| 35.275862 | 80 | 0.713099 |
from absl import app
from absl import flags
import grpc
import tensorflow_federated as tff
FLAGS = flags.FLAGS
flags.DEFINE_integer('port', '8000', 'port to listen on')
flags.DEFINE_integer('threads', '10', 'number of worker threads in thread pool')
flags.DEFINE_string('private_key', '', 'the private key for SSL/TLS setup')
flags.DEFINE_string('certificate_chain', '', 'the cert for SSL/TLS setup')
flags.DEFINE_integer('clients', '1', 'number of clients to host on this worker')
flags.DEFINE_integer('fanout', '100',
'max fanout in the hierarchy of local executors')
def main(argv):
del argv
executor_factory = tff.framework.local_executor_factory(
num_clients=FLAGS.clients, max_fanout=FLAGS.fanout)
if FLAGS.private_key:
if FLAGS.certificate_chain:
with open(FLAGS.private_key, 'rb') as f:
private_key = f.read()
with open(FLAGS.certificate_chain, 'rb') as f:
certificate_chain = f.read()
credentials = grpc.ssl_server_credentials(((
private_key,
certificate_chain,
),))
else:
raise ValueError(
'Private key has been specified, but the certificate chain missing.')
else:
credentials = None
tff.simulation.run_server(
executor_factory.create_executor({}), FLAGS.threads, FLAGS.port,
credentials)
if __name__ == '__main__':
app.run(main)
| true | true |
f73704e43035b72c9407e81489cebe9f36aaa5ee | 4,901 | py | Python | omniglot_dataset.py | yardenas/meta-learning-tutorial | c5154eae85f6255f58fe6028ab630e3499238b3a | [
"MIT"
] | null | null | null | omniglot_dataset.py | yardenas/meta-learning-tutorial | c5154eae85f6255f58fe6028ab630e3499238b3a | [
"MIT"
] | null | null | null | omniglot_dataset.py | yardenas/meta-learning-tutorial | c5154eae85f6255f58fe6028ab630e3499238b3a | [
"MIT"
] | null | null | null | from typing import Iterator, List, Tuple
import os
import random
import numpy as np
from tensorflow import data as tfd
from tensorflow import image as tfi
from tensorflow import io as tfio
from tensorflow import dtypes
import tensorflow as tf
from google_drive_downloader import GoogleDriveDownloader
class Omniglot:
def __init__(self,
meta_batch_size: int,
num_classes: int,
num_samples_per_class: int,
seed: int = 666):
self.meta_batch_size = meta_batch_size
self.num_samples_per_class = num_samples_per_class
self.num_classes = num_classes
self.seed = seed
if not os.path.isdir('./omniglot_resized'):
GoogleDriveDownloader.download_file_from_google_drive(
file_id='1iaSFXIYC3AB8q9K_M-oVMa4pmB7yKMtI',
dest_path='./omniglot_resized.zip',
unzip=True)
data_folder = './omniglot_resized'
self.img_size = 28, 28
character_folders = [
os.path.join(data_folder, family, character)
for family in os.listdir(data_folder)
if os.path.isdir(os.path.join(data_folder, family))
for character in os.listdir(os.path.join(data_folder, family))
if os.path.isdir(os.path.join(data_folder, family, character))
]
random.seed(1)
random.shuffle(character_folders)
num_val = 100
num_train = 1100
self.metatrain = self._make_dataset(character_folders[:num_train])
self.metaval = self._make_dataset(character_folders[num_train:num_train +
num_val])
self.metatest = self._make_dataset(character_folders[num_train + num_val:])
@property
def train_set(
self
) -> Iterator[Tuple[Tuple[np.ndarray, np.ndarray], Tuple[np.ndarray,
np.ndarray]]]:
yield from self.metatrain.as_numpy_iterator()
@property
def eval_set(
self
) -> Iterator[Tuple[Tuple[np.ndarray, np.ndarray], Tuple[np.ndarray,
np.ndarray]]]:
yield from self.metaval.as_numpy_iterator()
@property
def test_set(
self
) -> Iterator[Tuple[Tuple[np.ndarray, np.ndarray], Tuple[np.ndarray,
np.ndarray]]]:
yield from self.metatest.as_numpy_iterator()
def _make_dataset(self, folders: List[str]) -> tfd.Dataset:
characters = tfd.Dataset.from_tensor_slices(folders).shuffle(
1100, seed=self.seed, reshuffle_each_iteration=True)
def get_images_filenames(char):
all_images = tfio.matching_files(char + '/*.png')
return tfd.Dataset.from_tensor_slices(
tf.random.shuffle(all_images,
seed=self.seed)[:self.num_samples_per_class + 1])
# Use interleave to read the relevant .png files as we iterate through the
# 1100 different chars. Set block_length to num_samples_per_class so that
# we can next batch images from same char together.
image_filenames = characters.interleave(
get_images_filenames,
num_parallel_calls=tfd.AUTOTUNE,
block_length=self.num_samples_per_class + 1).repeat()
def load_image(image_filename):
img = tfio.read_file(image_filename)
img = tfio.decode_png(img, channels=1)
img = tfi.resize(img, self.img_size)
img = tf.cast(img, dtypes.float32) / 255.0
img = 1.0 - img
return img
# Unbatch map and batch to allow tf to read images concurrently. Class
# grouping is maintained.
shots = image_filenames.map(
load_image,
num_parallel_calls=tfd.AUTOTUNE).batch(self.num_samples_per_class + 1)
ways = shots.batch(self.num_classes)
tasks = ways.batch(self.meta_batch_size)
def to_support_and_query_sets(batch):
support_x, query_x = tf.split(
tf.transpose(batch, (0, 2, 1, 3, 4, 5)),
(self.num_samples_per_class, 1),
axis=1)
support_y, query_y = tf.split(
tf.eye(
self.num_classes,
batch_shape=(self.meta_batch_size,
self.num_samples_per_class + 1)),
(self.num_samples_per_class, 1),
axis=1)
ids = tf.range(0, self.num_classes, dtype=dtypes.int32)
ids = tf.random.shuffle(ids, seed=self.seed)
query_x = tf.gather(query_x, ids, axis=2)
query_y = tf.gather(query_y, ids, axis=2)
new_shape = lambda x: tf.concat([(self.meta_batch_size, -1),
tf.shape(x)[3:]], 0)
reshape = lambda x: tf.reshape(x, new_shape(x))
return (reshape(support_x), reshape(support_y)), (reshape(query_x),
reshape(query_y))
return tasks.map(
to_support_and_query_sets,
num_parallel_calls=tfd.AUTOTUNE).prefetch(tfd.AUTOTUNE)
| 36.574627 | 79 | 0.63232 | from typing import Iterator, List, Tuple
import os
import random
import numpy as np
from tensorflow import data as tfd
from tensorflow import image as tfi
from tensorflow import io as tfio
from tensorflow import dtypes
import tensorflow as tf
from google_drive_downloader import GoogleDriveDownloader
class Omniglot:
def __init__(self,
meta_batch_size: int,
num_classes: int,
num_samples_per_class: int,
seed: int = 666):
self.meta_batch_size = meta_batch_size
self.num_samples_per_class = num_samples_per_class
self.num_classes = num_classes
self.seed = seed
if not os.path.isdir('./omniglot_resized'):
GoogleDriveDownloader.download_file_from_google_drive(
file_id='1iaSFXIYC3AB8q9K_M-oVMa4pmB7yKMtI',
dest_path='./omniglot_resized.zip',
unzip=True)
data_folder = './omniglot_resized'
self.img_size = 28, 28
character_folders = [
os.path.join(data_folder, family, character)
for family in os.listdir(data_folder)
if os.path.isdir(os.path.join(data_folder, family))
for character in os.listdir(os.path.join(data_folder, family))
if os.path.isdir(os.path.join(data_folder, family, character))
]
random.seed(1)
random.shuffle(character_folders)
num_val = 100
num_train = 1100
self.metatrain = self._make_dataset(character_folders[:num_train])
self.metaval = self._make_dataset(character_folders[num_train:num_train +
num_val])
self.metatest = self._make_dataset(character_folders[num_train + num_val:])
@property
def train_set(
self
) -> Iterator[Tuple[Tuple[np.ndarray, np.ndarray], Tuple[np.ndarray,
np.ndarray]]]:
yield from self.metatrain.as_numpy_iterator()
@property
def eval_set(
self
) -> Iterator[Tuple[Tuple[np.ndarray, np.ndarray], Tuple[np.ndarray,
np.ndarray]]]:
yield from self.metaval.as_numpy_iterator()
@property
def test_set(
self
) -> Iterator[Tuple[Tuple[np.ndarray, np.ndarray], Tuple[np.ndarray,
np.ndarray]]]:
yield from self.metatest.as_numpy_iterator()
def _make_dataset(self, folders: List[str]) -> tfd.Dataset:
characters = tfd.Dataset.from_tensor_slices(folders).shuffle(
1100, seed=self.seed, reshuffle_each_iteration=True)
def get_images_filenames(char):
all_images = tfio.matching_files(char + '/*.png')
return tfd.Dataset.from_tensor_slices(
tf.random.shuffle(all_images,
seed=self.seed)[:self.num_samples_per_class + 1])
image_filenames = characters.interleave(
get_images_filenames,
num_parallel_calls=tfd.AUTOTUNE,
block_length=self.num_samples_per_class + 1).repeat()
def load_image(image_filename):
img = tfio.read_file(image_filename)
img = tfio.decode_png(img, channels=1)
img = tfi.resize(img, self.img_size)
img = tf.cast(img, dtypes.float32) / 255.0
img = 1.0 - img
return img
shots = image_filenames.map(
load_image,
num_parallel_calls=tfd.AUTOTUNE).batch(self.num_samples_per_class + 1)
ways = shots.batch(self.num_classes)
tasks = ways.batch(self.meta_batch_size)
def to_support_and_query_sets(batch):
support_x, query_x = tf.split(
tf.transpose(batch, (0, 2, 1, 3, 4, 5)),
(self.num_samples_per_class, 1),
axis=1)
support_y, query_y = tf.split(
tf.eye(
self.num_classes,
batch_shape=(self.meta_batch_size,
self.num_samples_per_class + 1)),
(self.num_samples_per_class, 1),
axis=1)
ids = tf.range(0, self.num_classes, dtype=dtypes.int32)
ids = tf.random.shuffle(ids, seed=self.seed)
query_x = tf.gather(query_x, ids, axis=2)
query_y = tf.gather(query_y, ids, axis=2)
new_shape = lambda x: tf.concat([(self.meta_batch_size, -1),
tf.shape(x)[3:]], 0)
reshape = lambda x: tf.reshape(x, new_shape(x))
return (reshape(support_x), reshape(support_y)), (reshape(query_x),
reshape(query_y))
return tasks.map(
to_support_and_query_sets,
num_parallel_calls=tfd.AUTOTUNE).prefetch(tfd.AUTOTUNE)
| true | true |
f737051d6a59d19696cc9c1e64c8fb827a9600d5 | 13,274 | py | Python | modeling/backbones/senet.py | zyxwvu321/Classifer_SSL_Longtail | e6c09414c49e695b0f4221a3c6245ae3929a1788 | [
"MIT"
] | null | null | null | modeling/backbones/senet.py | zyxwvu321/Classifer_SSL_Longtail | e6c09414c49e695b0f4221a3c6245ae3929a1788 | [
"MIT"
] | null | null | null | modeling/backbones/senet.py | zyxwvu321/Classifer_SSL_Longtail | e6c09414c49e695b0f4221a3c6245ae3929a1788 | [
"MIT"
] | null | null | null | """
ResNet code gently borrowed from
https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
"""
from __future__ import print_function, division, absolute_import
from collections import OrderedDict
import math
import torch
import torch.nn as nn
from torch.utils import model_zoo
__all__ = ['SENet', 'senet154', 'se_resnet50', 'se_resnet101', 'se_resnet152',
'se_resnext50_32x4d', 'se_resnext101_32x4d']
pretrained_settings = {
'senet154': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/senet154-c7b49a05.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnet50': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnet50-ce0d4300.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnet101': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnet101-7e38fcc6.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnet152': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnet152-d17c99b7.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnext50_32x4d': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnext50_32x4d-a260b3a4.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnext101_32x4d': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnext101_32x4d-3b2fe3d8.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
}
class SEModule(nn.Module):
def __init__(self, channels, reduction):
super(SEModule, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc1 = nn.Conv2d(channels, channels // reduction, kernel_size=1,
padding=0)
self.relu = nn.ReLU(inplace=True)
self.fc2 = nn.Conv2d(channels // reduction, channels, kernel_size=1,
padding=0)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
module_input = x
x = self.avg_pool(x)
x = self.fc1(x)
x = self.relu(x)
x = self.fc2(x)
x = self.sigmoid(x)
return module_input * x
class Bottleneck(nn.Module):
"""
Base class for bottlenecks that implements `forward()` method.
"""
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out = self.se_module(out) + residual
out = self.relu(out)
return out
class SEBottleneck(Bottleneck):
"""
Bottleneck for SENet154.
"""
expansion = 4
def __init__(self, inplanes, planes, groups, reduction, stride=1,
downsample=None):
super(SEBottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes * 2, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes * 2)
self.conv2 = nn.Conv2d(planes * 2, planes * 4, kernel_size=3,
stride=stride, padding=1, groups=groups,
bias=False)
self.bn2 = nn.BatchNorm2d(planes * 4)
self.conv3 = nn.Conv2d(planes * 4, planes * 4, kernel_size=1,
bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.se_module = SEModule(planes * 4, reduction=reduction)
self.downsample = downsample
self.stride = stride
class SEResNetBottleneck(Bottleneck):
"""
ResNet bottleneck with a Squeeze-and-Excitation module. It follows Caffe
implementation and uses `stride=stride` in `conv1` and not in `conv2`
(the latter is used in the torchvision implementation of ResNet).
"""
expansion = 4
def __init__(self, inplanes, planes, groups, reduction, stride=1,
downsample=None):
super(SEResNetBottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False,
stride=stride)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, padding=1,
groups=groups, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.se_module = SEModule(planes * 4, reduction=reduction)
self.downsample = downsample
self.stride = stride
class SEResNeXtBottleneck(Bottleneck):
"""
ResNeXt bottleneck type C with a Squeeze-and-Excitation module.
"""
expansion = 4
def __init__(self, inplanes, planes, groups, reduction, stride=1,
downsample=None, base_width=4):
super(SEResNeXtBottleneck, self).__init__()
width = math.floor(planes * (base_width / 64)) * groups
self.conv1 = nn.Conv2d(inplanes, width, kernel_size=1, bias=False,
stride=1)
self.bn1 = nn.BatchNorm2d(width)
self.conv2 = nn.Conv2d(width, width, kernel_size=3, stride=stride,
padding=1, groups=groups, bias=False)
self.bn2 = nn.BatchNorm2d(width)
self.conv3 = nn.Conv2d(width, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.se_module = SEModule(planes * 4, reduction=reduction)
self.downsample = downsample
self.stride = stride
class SENet(nn.Module):
def __init__(self, block, layers, groups, reduction, dropout_p=0.2,
inplanes=128, input_3x3=True, downsample_kernel_size=3,
downsample_padding=1, last_stride=2, last2_stride = 2):
"""
Parameters
----------
block (nn.Module): Bottleneck class.
- For SENet154: SEBottleneck
- For SE-ResNet models: SEResNetBottleneck
- For SE-ResNeXt models: SEResNeXtBottleneck
layers (list of ints): Number of residual blocks for 4 layers of the
network (layer1...layer4).
groups (int): Number of groups for the 3x3 convolution in each
bottleneck block.
- For SENet154: 64
- For SE-ResNet models: 1
- For SE-ResNeXt models: 32
reduction (int): Reduction ratio for Squeeze-and-Excitation modules.
- For all models: 16
dropout_p (float or None): Drop probability for the Dropout layer.
If `None` the Dropout layer is not used.
- For SENet154: 0.2
- For SE-ResNet models: None
- For SE-ResNeXt models: None
inplanes (int): Number of input channels for layer1.
- For SENet154: 128
- For SE-ResNet models: 64
- For SE-ResNeXt models: 64
input_3x3 (bool): If `True`, use three 3x3 convolutions instead of
a single 7x7 convolution in layer0.
- For SENet154: True
- For SE-ResNet models: False
- For SE-ResNeXt models: False
downsample_kernel_size (int): Kernel size for downsampling convolutions
in layer2, layer3 and layer4.
- For SENet154: 3
- For SE-ResNet models: 1
- For SE-ResNeXt models: 1
downsample_padding (int): Padding for downsampling convolutions in
layer2, layer3 and layer4.
- For SENet154: 1
- For SE-ResNet models: 0
- For SE-ResNeXt models: 0
num_classes (int): Number of outputs in `last_linear` layer.
- For all models: 1000
"""
super(SENet, self).__init__()
self.inplanes = inplanes
if input_3x3:
layer0_modules = [
('conv1', nn.Conv2d(3, 64, 3, stride=2, padding=1,
bias=False)),
('bn1', nn.BatchNorm2d(64)),
('relu1', nn.ReLU(inplace=True)),
('conv2', nn.Conv2d(64, 64, 3, stride=1, padding=1,
bias=False)),
('bn2', nn.BatchNorm2d(64)),
('relu2', nn.ReLU(inplace=True)),
('conv3', nn.Conv2d(64, inplanes, 3, stride=1, padding=1,
bias=False)),
('bn3', nn.BatchNorm2d(inplanes)),
('relu3', nn.ReLU(inplace=True)),
]
else:
layer0_modules = [
('conv1', nn.Conv2d(3, inplanes, kernel_size=7, stride=2,
padding=3, bias=False)),
('bn1', nn.BatchNorm2d(inplanes)),
('relu1', nn.ReLU(inplace=True)),
]
# To preserve compatibility with Caffe weights `ceil_mode=True`
# is used instead of `padding=1`.
layer0_modules.append(('pool', nn.MaxPool2d(3, stride=2,
ceil_mode=True)))
self.layer0 = nn.Sequential(OrderedDict(layer0_modules))
self.layer1 = self._make_layer(
block,
planes=64,
blocks=layers[0],
groups=groups,
reduction=reduction,
downsample_kernel_size=1,
downsample_padding=0
)
self.layer2 = self._make_layer(
block,
planes=128,
blocks=layers[1],
stride=2,
groups=groups,
reduction=reduction,
downsample_kernel_size=downsample_kernel_size,
downsample_padding=downsample_padding
)
self.layer3 = self._make_layer(
block,
planes=256,
blocks=layers[2],
stride=last2_stride,
groups=groups,
reduction=reduction,
downsample_kernel_size=downsample_kernel_size,
downsample_padding=downsample_padding
)
self.layer4 = self._make_layer(
block,
planes=512,
blocks=layers[3],
stride=last_stride,
groups=groups,
reduction=reduction,
downsample_kernel_size=downsample_kernel_size,
downsample_padding=downsample_padding
)
self.avg_pool = nn.AvgPool2d(7, stride=1)
self.dropout = nn.Dropout(dropout_p) if dropout_p is not None else None
def _make_layer(self, block, planes, blocks, groups, reduction, stride=1,
downsample_kernel_size=1, downsample_padding=0):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=downsample_kernel_size, stride=stride,
padding=downsample_padding, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, groups, reduction, stride,
downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, groups, reduction))
return nn.Sequential(*layers)
def load_param(self, model_path):
param_dict = torch.load(model_path)
for i in param_dict:
if 'last_linear' in i:
continue
self.state_dict()[i].copy_(param_dict[i])
def forward(self, x):
x = self.layer0(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
return x | 36.97493 | 98 | 0.547989 | from __future__ import print_function, division, absolute_import
from collections import OrderedDict
import math
import torch
import torch.nn as nn
from torch.utils import model_zoo
__all__ = ['SENet', 'senet154', 'se_resnet50', 'se_resnet101', 'se_resnet152',
'se_resnext50_32x4d', 'se_resnext101_32x4d']
pretrained_settings = {
'senet154': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/senet154-c7b49a05.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnet50': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnet50-ce0d4300.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnet101': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnet101-7e38fcc6.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnet152': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnet152-d17c99b7.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnext50_32x4d': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnext50_32x4d-a260b3a4.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnext101_32x4d': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnext101_32x4d-3b2fe3d8.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
}
class SEModule(nn.Module):
def __init__(self, channels, reduction):
super(SEModule, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc1 = nn.Conv2d(channels, channels // reduction, kernel_size=1,
padding=0)
self.relu = nn.ReLU(inplace=True)
self.fc2 = nn.Conv2d(channels // reduction, channels, kernel_size=1,
padding=0)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
module_input = x
x = self.avg_pool(x)
x = self.fc1(x)
x = self.relu(x)
x = self.fc2(x)
x = self.sigmoid(x)
return module_input * x
class Bottleneck(nn.Module):
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out = self.se_module(out) + residual
out = self.relu(out)
return out
class SEBottleneck(Bottleneck):
expansion = 4
def __init__(self, inplanes, planes, groups, reduction, stride=1,
downsample=None):
super(SEBottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes * 2, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes * 2)
self.conv2 = nn.Conv2d(planes * 2, planes * 4, kernel_size=3,
stride=stride, padding=1, groups=groups,
bias=False)
self.bn2 = nn.BatchNorm2d(planes * 4)
self.conv3 = nn.Conv2d(planes * 4, planes * 4, kernel_size=1,
bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.se_module = SEModule(planes * 4, reduction=reduction)
self.downsample = downsample
self.stride = stride
class SEResNetBottleneck(Bottleneck):
expansion = 4
def __init__(self, inplanes, planes, groups, reduction, stride=1,
downsample=None):
super(SEResNetBottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False,
stride=stride)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, padding=1,
groups=groups, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.se_module = SEModule(planes * 4, reduction=reduction)
self.downsample = downsample
self.stride = stride
class SEResNeXtBottleneck(Bottleneck):
expansion = 4
def __init__(self, inplanes, planes, groups, reduction, stride=1,
downsample=None, base_width=4):
super(SEResNeXtBottleneck, self).__init__()
width = math.floor(planes * (base_width / 64)) * groups
self.conv1 = nn.Conv2d(inplanes, width, kernel_size=1, bias=False,
stride=1)
self.bn1 = nn.BatchNorm2d(width)
self.conv2 = nn.Conv2d(width, width, kernel_size=3, stride=stride,
padding=1, groups=groups, bias=False)
self.bn2 = nn.BatchNorm2d(width)
self.conv3 = nn.Conv2d(width, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.se_module = SEModule(planes * 4, reduction=reduction)
self.downsample = downsample
self.stride = stride
class SENet(nn.Module):
def __init__(self, block, layers, groups, reduction, dropout_p=0.2,
inplanes=128, input_3x3=True, downsample_kernel_size=3,
downsample_padding=1, last_stride=2, last2_stride = 2):
super(SENet, self).__init__()
self.inplanes = inplanes
if input_3x3:
layer0_modules = [
('conv1', nn.Conv2d(3, 64, 3, stride=2, padding=1,
bias=False)),
('bn1', nn.BatchNorm2d(64)),
('relu1', nn.ReLU(inplace=True)),
('conv2', nn.Conv2d(64, 64, 3, stride=1, padding=1,
bias=False)),
('bn2', nn.BatchNorm2d(64)),
('relu2', nn.ReLU(inplace=True)),
('conv3', nn.Conv2d(64, inplanes, 3, stride=1, padding=1,
bias=False)),
('bn3', nn.BatchNorm2d(inplanes)),
('relu3', nn.ReLU(inplace=True)),
]
else:
layer0_modules = [
('conv1', nn.Conv2d(3, inplanes, kernel_size=7, stride=2,
padding=3, bias=False)),
('bn1', nn.BatchNorm2d(inplanes)),
('relu1', nn.ReLU(inplace=True)),
]
layer0_modules.append(('pool', nn.MaxPool2d(3, stride=2,
ceil_mode=True)))
self.layer0 = nn.Sequential(OrderedDict(layer0_modules))
self.layer1 = self._make_layer(
block,
planes=64,
blocks=layers[0],
groups=groups,
reduction=reduction,
downsample_kernel_size=1,
downsample_padding=0
)
self.layer2 = self._make_layer(
block,
planes=128,
blocks=layers[1],
stride=2,
groups=groups,
reduction=reduction,
downsample_kernel_size=downsample_kernel_size,
downsample_padding=downsample_padding
)
self.layer3 = self._make_layer(
block,
planes=256,
blocks=layers[2],
stride=last2_stride,
groups=groups,
reduction=reduction,
downsample_kernel_size=downsample_kernel_size,
downsample_padding=downsample_padding
)
self.layer4 = self._make_layer(
block,
planes=512,
blocks=layers[3],
stride=last_stride,
groups=groups,
reduction=reduction,
downsample_kernel_size=downsample_kernel_size,
downsample_padding=downsample_padding
)
self.avg_pool = nn.AvgPool2d(7, stride=1)
self.dropout = nn.Dropout(dropout_p) if dropout_p is not None else None
def _make_layer(self, block, planes, blocks, groups, reduction, stride=1,
downsample_kernel_size=1, downsample_padding=0):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=downsample_kernel_size, stride=stride,
padding=downsample_padding, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, groups, reduction, stride,
downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, groups, reduction))
return nn.Sequential(*layers)
def load_param(self, model_path):
param_dict = torch.load(model_path)
for i in param_dict:
if 'last_linear' in i:
continue
self.state_dict()[i].copy_(param_dict[i])
def forward(self, x):
x = self.layer0(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
return x | true | true |
f73705d5d78315808f4d74a40540e894170a1775 | 476 | py | Python | main.py | ram-nad/autodiff | a4ea49d15ae730ddfa79c3615807285006d5e7d7 | [
"MIT"
] | 2 | 2020-09-21T04:02:37.000Z | 2020-09-21T04:02:38.000Z | main.py | gayatri-a-b/autodiff | 98bdfea087cb67dd6e2a1a399e90bbd7ac4eb326 | [
"MIT"
] | 1 | 2020-06-22T07:15:49.000Z | 2020-06-22T07:15:49.000Z | main.py | gayatri-a-b/autodiff | 98bdfea087cb67dd6e2a1a399e90bbd7ac4eb326 | [
"MIT"
] | 3 | 2020-02-26T13:14:42.000Z | 2020-05-25T03:52:45.000Z | def declare_variables(variables, macro):
"""
This is the hook for the functions
- variables: the dictionary that contains the variables
- macro: a decorator function, to declare a macro.
"""
@macro
def inputcode(filename, language):
f = open(filename, 'r')
text = f.read()
textblock = f'```{language}\n{text}\n```'
return textblock
@macro
def inputcpp(filename):
return inputcode(filename, 'cpp')
| 25.052632 | 59 | 0.611345 | def declare_variables(variables, macro):
@macro
def inputcode(filename, language):
f = open(filename, 'r')
text = f.read()
textblock = f'```{language}\n{text}\n```'
return textblock
@macro
def inputcpp(filename):
return inputcode(filename, 'cpp')
| true | true |
f73705f5af8b25d3b769ff5aa372f92b61fbe44e | 951 | py | Python | app/celery/broadcast_message_tasks.py | caduvieira/notifications-api | ae3bf624e67be25fcab97aa0427749e5671bba3d | [
"MIT"
] | null | null | null | app/celery/broadcast_message_tasks.py | caduvieira/notifications-api | ae3bf624e67be25fcab97aa0427749e5671bba3d | [
"MIT"
] | null | null | null | app/celery/broadcast_message_tasks.py | caduvieira/notifications-api | ae3bf624e67be25fcab97aa0427749e5671bba3d | [
"MIT"
] | null | null | null | import requests
from flask import current_app
from notifications_utils.statsd_decorators import statsd
from app import notify_celery
from app.dao.broadcast_message_dao import dao_get_broadcast_event_by_id
@notify_celery.task(name="send-broadcast-event")
@statsd(namespace="tasks")
def send_broadcast_event(broadcast_event_id, provider='stub-1'):
broadcast_event = dao_get_broadcast_event_by_id(broadcast_event_id)
current_app.logger.info(
f'sending broadcast_event {broadcast_event.reference} '
f'msgType {broadcast_event.message_type} to {provider}'
)
payload = broadcast_event.serialize()
resp = requests.post(
f'{current_app.config["CBC_PROXY_URL"]}/broadcasts/events/{provider}',
json=payload
)
resp.raise_for_status()
current_app.logger.info(
f'broadcast_event {broadcast_event.reference} '
f'msgType {broadcast_event.message_type} sent to {provider}'
)
| 29.71875 | 78 | 0.75184 | import requests
from flask import current_app
from notifications_utils.statsd_decorators import statsd
from app import notify_celery
from app.dao.broadcast_message_dao import dao_get_broadcast_event_by_id
@notify_celery.task(name="send-broadcast-event")
@statsd(namespace="tasks")
def send_broadcast_event(broadcast_event_id, provider='stub-1'):
broadcast_event = dao_get_broadcast_event_by_id(broadcast_event_id)
current_app.logger.info(
f'sending broadcast_event {broadcast_event.reference} '
f'msgType {broadcast_event.message_type} to {provider}'
)
payload = broadcast_event.serialize()
resp = requests.post(
f'{current_app.config["CBC_PROXY_URL"]}/broadcasts/events/{provider}',
json=payload
)
resp.raise_for_status()
current_app.logger.info(
f'broadcast_event {broadcast_event.reference} '
f'msgType {broadcast_event.message_type} sent to {provider}'
)
| true | true |
f737062a090b27a72610975252e5a63c44b8cc09 | 4,549 | py | Python | Machine Learning Prediction Software Based on Classification and Regression Based on Processor [CPU] Specifications.py | emirhanai/Machine-Learning-Prediction-Software-Based-on-Classification-and-Regression-Based-on-Processor-CPU- | 051be998eb9195dccf28c2e7607ead0812c79cf1 | [
"MIT"
] | 5 | 2021-09-08T20:47:10.000Z | 2021-11-24T08:51:45.000Z | Machine Learning Prediction Software Based on Classification and Regression Based on Processor [CPU] Specifications.py | emirhanai/Machine-Learning-Prediction-Software-Based-on-Classification-and-Regression-Based-on-Processor-CPU | 051be998eb9195dccf28c2e7607ead0812c79cf1 | [
"MIT"
] | null | null | null | Machine Learning Prediction Software Based on Classification and Regression Based on Processor [CPU] Specifications.py | emirhanai/Machine-Learning-Prediction-Software-Based-on-Classification-and-Regression-Based-on-Processor-CPU | 051be998eb9195dccf28c2e7607ead0812c79cf1 | [
"MIT"
] | null | null | null | import pandas as pd
import numpy as np
from sklearn.tree import *
from sklearn.ensemble import *
from sklearn.preprocessing import *
from sklearn.model_selection import *
from sklearn.metrics import *
data = pd.read_csv('data.csv')
X = data.drop(['Company','Processor Name'],axis='columns')
y = data.drop(['Turbo Speed (GHz)','Processor Name','Processor Cores','Processor Threads','Typical TDP (W)','Average CPU Mark'],axis='columns')
#load of change function for columns changing.
y_data = LabelEncoder()
#print(y)
y['Company_Change'] = y_data.fit_transform(y['Company'])
y_update_data = y.drop(['Company'],axis='columns')
float_y_update_data = np.float64(y_update_data)
#print(float_y_update_data)
#for i in np.arange(0,1,1):
#X_train,X_test,y_train and y_test files of creating (with suitable parameters).
X_train, X_test, y_train, y_test = train_test_split(X, y_update_data, test_size=0.2, random_state=15, shuffle=True,
stratify=None)
# model - processor classifier
model_processor = ExtraTreeClassifier(criterion="gini", splitter="random")
# model - processor regression
model_processor_regression = ExtraTreesRegressor(n_estimators=1)
# model - processor fit
model_processor_regression.fit(X_train, y_train)
# model - processor classifier fit
model_processor.fit(X_train, y_train)
# ""CLASSIFIER OF SCORE AND RESULT""
# model - processor classifier y_pred
y_pred_of_model = model_processor.predict(X_test)
# model classifier score of result
# print("Select of X {} ".format(i))
print("Classifier Accuracy Score: {} ".format(accuracy_score(y_test,y_pred_of_model)))
print("Classifier Precision Score: {} ".format(precision_score(y_test,y_pred_of_model)))
print("Classifier Recall Score: {} ".format(recall_score(y_test,y_pred_of_model)))
print("Classifier F1 Score: {} ".format(f1_score(y_test,y_pred_of_model)))
a,b,_ = roc_curve(y_test,y_pred_of_model)
print("Classifier AUC Score: {} ".format(auc(a,b)))
print("Classifier Confision Matrix: {} ".format(confusion_matrix(y_test,y_pred_of_model)))
# ""REGRESSION OF SCORE AND RESULT""
y_pred_of_regression_in_model = model_processor_regression.predict(X_test)
# print("Select of X {} ".format(i))
print("Regression Accuracy Score: {} ".format(accuracy_score(y_test, y_pred_of_regression_in_model)))
print("Regression Precision Score: {} ".format(precision_score(y_test, y_pred_of_regression_in_model)))
print("Regression Recall Score: {} ".format(recall_score(y_test, y_pred_of_regression_in_model)))
print("Regression F1 Score: {} ".format(f1_score(y_test, y_pred_of_regression_in_model)))
a, b, _ = roc_curve(y_test, y_pred_of_regression_in_model)
print("Regression AUC Score: {} ".format(auc(a, b)))
print("Regression Confision Matrix: {} ".format(confusion_matrix(y_test, y_pred_of_regression_in_model)))
# Enter you random value for Features :)
Processor_Cores = int(input("Enter, Processor Cores: "))
Processor_Threads = int(input("Enter, Processor Threads: "))
Turbo_Speed_GHz = float(input("Enter, Turbo Speed (GHz): "))
Typical_TDP_W = int(input("Enter, Typical TDP (W): "))
Average_CPU_Mark = int(input("Enter, Average CPU Mark: "))
# prediction, random value of Company!
prediction_of_company_random_value = model_processor_regression.predict(
[[Processor_Cores, Processor_Threads, Turbo_Speed_GHz, Typical_TDP_W, Average_CPU_Mark]])
# I create of algorithm :)
data_class = pd.read_csv('class.csv', index_col=None, na_values=None)
class_value_detect = data_class.columns.values[int(prediction_of_company_random_value)]
print('Prediction company: {} '.format(class_value_detect))
# model classifier save of format to .dot file :)
from graphviz import Source
dotfile = open("emirhan_project.dot",'w')
graph_of_data_dot = Source(export_graphviz(model_processor,
filled=True,
rounded=True,
out_file=dotfile,
feature_names=X.columns,
class_names=['AMD = 0','INTEL = 1']))
dotfile.close()
#CLASSIFICATION RESULT
#Classifier Accuracy Score: 1.0
#Classifier Precision Score: 1.0
#Classifier Recall Score: 1.0
#Classifier F1 Score: 1.0
#Classifier AUC Score: 1.0
#Classifier Confision Matrix: [[5 0]
#[0 2]]
#REGRESSION RESULT
#Regression Accuracy Score: 1.0
#Regression Precision Score: 1.0
#Regression Recall Score: 1.0
#Regression F1 Score: 1.0
#Regression AUC Score: 1.0
#Regression Confision Matrix: [[5 0]
#[0 2]]
| 37.595041 | 144 | 0.729171 | import pandas as pd
import numpy as np
from sklearn.tree import *
from sklearn.ensemble import *
from sklearn.preprocessing import *
from sklearn.model_selection import *
from sklearn.metrics import *
data = pd.read_csv('data.csv')
X = data.drop(['Company','Processor Name'],axis='columns')
y = data.drop(['Turbo Speed (GHz)','Processor Name','Processor Cores','Processor Threads','Typical TDP (W)','Average CPU Mark'],axis='columns')
y_data = LabelEncoder()
y['Company_Change'] = y_data.fit_transform(y['Company'])
y_update_data = y.drop(['Company'],axis='columns')
float_y_update_data = np.float64(y_update_data)
X_train, X_test, y_train, y_test = train_test_split(X, y_update_data, test_size=0.2, random_state=15, shuffle=True,
stratify=None)
model_processor = ExtraTreeClassifier(criterion="gini", splitter="random")
model_processor_regression = ExtraTreesRegressor(n_estimators=1)
model_processor_regression.fit(X_train, y_train)
model_processor.fit(X_train, y_train)
y_pred_of_model = model_processor.predict(X_test)
print("Classifier Accuracy Score: {} ".format(accuracy_score(y_test,y_pred_of_model)))
print("Classifier Precision Score: {} ".format(precision_score(y_test,y_pred_of_model)))
print("Classifier Recall Score: {} ".format(recall_score(y_test,y_pred_of_model)))
print("Classifier F1 Score: {} ".format(f1_score(y_test,y_pred_of_model)))
a,b,_ = roc_curve(y_test,y_pred_of_model)
print("Classifier AUC Score: {} ".format(auc(a,b)))
print("Classifier Confision Matrix: {} ".format(confusion_matrix(y_test,y_pred_of_model)))
y_pred_of_regression_in_model = model_processor_regression.predict(X_test)
print("Regression Accuracy Score: {} ".format(accuracy_score(y_test, y_pred_of_regression_in_model)))
print("Regression Precision Score: {} ".format(precision_score(y_test, y_pred_of_regression_in_model)))
print("Regression Recall Score: {} ".format(recall_score(y_test, y_pred_of_regression_in_model)))
print("Regression F1 Score: {} ".format(f1_score(y_test, y_pred_of_regression_in_model)))
a, b, _ = roc_curve(y_test, y_pred_of_regression_in_model)
print("Regression AUC Score: {} ".format(auc(a, b)))
print("Regression Confision Matrix: {} ".format(confusion_matrix(y_test, y_pred_of_regression_in_model)))
Processor_Cores = int(input("Enter, Processor Cores: "))
Processor_Threads = int(input("Enter, Processor Threads: "))
Turbo_Speed_GHz = float(input("Enter, Turbo Speed (GHz): "))
Typical_TDP_W = int(input("Enter, Typical TDP (W): "))
Average_CPU_Mark = int(input("Enter, Average CPU Mark: "))
prediction_of_company_random_value = model_processor_regression.predict(
[[Processor_Cores, Processor_Threads, Turbo_Speed_GHz, Typical_TDP_W, Average_CPU_Mark]])
data_class = pd.read_csv('class.csv', index_col=None, na_values=None)
class_value_detect = data_class.columns.values[int(prediction_of_company_random_value)]
print('Prediction company: {} '.format(class_value_detect))
from graphviz import Source
dotfile = open("emirhan_project.dot",'w')
graph_of_data_dot = Source(export_graphviz(model_processor,
filled=True,
rounded=True,
out_file=dotfile,
feature_names=X.columns,
class_names=['AMD = 0','INTEL = 1']))
dotfile.close()
| true | true |
f737068faf59526a08f6a390982dde3033bd0fc4 | 3,907 | py | Python | src/core/tests/frontend/paddlepaddle/test_models/gen_scripts/generate_yolo_box.py | ytorzuk-altran/openvino | 68d460a3bb578a738ba0e4d0e1f2e321afa73ab0 | [
"Apache-2.0"
] | 2,406 | 2020-04-22T15:47:54.000Z | 2022-03-31T10:27:37.000Z | ngraph/test/frontend/paddlepaddle/test_models/gen_scripts/generate_yolo_box.py | thomas-yanxin/openvino | 031e998a15ec738c64cc2379d7f30fb73087c272 | [
"Apache-2.0"
] | 4,948 | 2020-04-22T15:12:39.000Z | 2022-03-31T18:45:42.000Z | ngraph/test/frontend/paddlepaddle/test_models/gen_scripts/generate_yolo_box.py | thomas-yanxin/openvino | 031e998a15ec738c64cc2379d7f30fb73087c272 | [
"Apache-2.0"
] | 991 | 2020-04-23T18:21:09.000Z | 2022-03-31T18:40:57.000Z | #
# pool2d paddle model generator
#
import numpy as np
from save_model import saveModel
import sys
def yolo_box(name : str, x, img_size, attrs : dict):
import paddle as pdpd
pdpd.enable_static()
with pdpd.static.program_guard(pdpd.static.Program(), pdpd.static.Program()):
node_x = pdpd.static.data(name='x', shape=x.shape, dtype=x.dtype)
node_img_size = pdpd.static.data(name='img_size', shape=img_size.shape, dtype=img_size.dtype)
boxes, scores = pdpd.vision.ops.yolo_box(node_x,
node_img_size,
anchors=attrs['anchors'],
class_num=attrs['class_num'],
conf_thresh=attrs['conf_thresh'],
downsample_ratio=attrs['downsample_ratio'],
clip_bbox=attrs['clip_bbox'],
name=None,
scale_x_y=attrs['scale_x_y'])
cpu = pdpd.static.cpu_places(1)
exe = pdpd.static.Executor(cpu[0])
# startup program will call initializer to initialize the parameters.
exe.run(pdpd.static.default_startup_program())
outs = exe.run(
feed={'x': x, 'img_size': img_size},
fetch_list=[boxes, scores])
# Save inputs in order of ngraph function, to facilite Fuzzy test,
# which accepts inputs and outputs in this order as well.
saveModel(name, exe, feedkeys=['x', 'img_size'], fetchlist=[boxes, scores],
inputs=[x, img_size], outputs=outs, target_dir=sys.argv[1])
return outs
def TEST1():
# yolo_box
pdpd_attrs = {
'name': "yolo_box_default",
'anchors': [10, 13, 16, 30, 33, 23],
'class_num': 2,
'conf_thresh': 0.5,
'downsample_ratio': 32,
'clip_bbox': False,
'scale_x_y': 1.0
}
pdpd_attrs_clip_box = {
'name': "yolo_box_clip_box",
'anchors': [10, 13, 16, 30, 33, 23],
'class_num': 2,
'conf_thresh': 0.5,
'downsample_ratio': 32,
'clip_bbox': True,
'scale_x_y': 1.0
}
pdpd_attrs_scale_xy = {
'name': "yolo_box_scale_xy",
'anchors': [10, 13, 16, 30, 33, 23],
'class_num': 2,
'conf_thresh': 0.5,
'downsample_ratio': 32,
'clip_bbox': True,
'scale_x_y': 1.2
}
pdpd_attrs_list = [pdpd_attrs, pdpd_attrs_clip_box, pdpd_attrs_scale_xy]
N = 32
num_anchors = int(len(pdpd_attrs['anchors'])//2)
x_shape = (N, num_anchors * (5 + pdpd_attrs['class_num']), 13, 13)
imgsize_shape = (N, 2)
data = np.random.random(x_shape).astype('float32')
data_ImSize = np.random.randint(10, 20, imgsize_shape).astype('int32')
for item in pdpd_attrs_list:
pred_pdpd = yolo_box(item['name'], data, data_ImSize, item)
def TEST2():
# yolo_box uneven spatial width and height
pdpd_attrs = {
'name': "yolo_box_uneven_wh",
'anchors': [10, 13, 16, 30, 33, 23],
'class_num': 2,
'conf_thresh': 0.5,
'downsample_ratio': 32,
'clip_bbox': False,
'scale_x_y': 1.0
}
N = 16
SPATIAL_WIDTH = 13
SPATIAL_HEIGHT = 9
num_anchors = int(len(pdpd_attrs['anchors'])//2)
x_shape = (N, num_anchors * (5 + pdpd_attrs['class_num']), SPATIAL_HEIGHT, SPATIAL_WIDTH)
imgsize_shape = (N, 2)
data = np.random.random(x_shape).astype('float32')
data_ImSize = np.random.randint(10, 20, imgsize_shape).astype('int32')
pred_pdpd = yolo_box(pdpd_attrs['name'], data, data_ImSize, pdpd_attrs)
if __name__ == "__main__":
TEST1()
TEST2() | 34.27193 | 101 | 0.545175 |
import numpy as np
from save_model import saveModel
import sys
def yolo_box(name : str, x, img_size, attrs : dict):
import paddle as pdpd
pdpd.enable_static()
with pdpd.static.program_guard(pdpd.static.Program(), pdpd.static.Program()):
node_x = pdpd.static.data(name='x', shape=x.shape, dtype=x.dtype)
node_img_size = pdpd.static.data(name='img_size', shape=img_size.shape, dtype=img_size.dtype)
boxes, scores = pdpd.vision.ops.yolo_box(node_x,
node_img_size,
anchors=attrs['anchors'],
class_num=attrs['class_num'],
conf_thresh=attrs['conf_thresh'],
downsample_ratio=attrs['downsample_ratio'],
clip_bbox=attrs['clip_bbox'],
name=None,
scale_x_y=attrs['scale_x_y'])
cpu = pdpd.static.cpu_places(1)
exe = pdpd.static.Executor(cpu[0])
exe.run(pdpd.static.default_startup_program())
outs = exe.run(
feed={'x': x, 'img_size': img_size},
fetch_list=[boxes, scores])
saveModel(name, exe, feedkeys=['x', 'img_size'], fetchlist=[boxes, scores],
inputs=[x, img_size], outputs=outs, target_dir=sys.argv[1])
return outs
def TEST1():
pdpd_attrs = {
'name': "yolo_box_default",
'anchors': [10, 13, 16, 30, 33, 23],
'class_num': 2,
'conf_thresh': 0.5,
'downsample_ratio': 32,
'clip_bbox': False,
'scale_x_y': 1.0
}
pdpd_attrs_clip_box = {
'name': "yolo_box_clip_box",
'anchors': [10, 13, 16, 30, 33, 23],
'class_num': 2,
'conf_thresh': 0.5,
'downsample_ratio': 32,
'clip_bbox': True,
'scale_x_y': 1.0
}
pdpd_attrs_scale_xy = {
'name': "yolo_box_scale_xy",
'anchors': [10, 13, 16, 30, 33, 23],
'class_num': 2,
'conf_thresh': 0.5,
'downsample_ratio': 32,
'clip_bbox': True,
'scale_x_y': 1.2
}
pdpd_attrs_list = [pdpd_attrs, pdpd_attrs_clip_box, pdpd_attrs_scale_xy]
N = 32
num_anchors = int(len(pdpd_attrs['anchors'])//2)
x_shape = (N, num_anchors * (5 + pdpd_attrs['class_num']), 13, 13)
imgsize_shape = (N, 2)
data = np.random.random(x_shape).astype('float32')
data_ImSize = np.random.randint(10, 20, imgsize_shape).astype('int32')
for item in pdpd_attrs_list:
pred_pdpd = yolo_box(item['name'], data, data_ImSize, item)
def TEST2():
pdpd_attrs = {
'name': "yolo_box_uneven_wh",
'anchors': [10, 13, 16, 30, 33, 23],
'class_num': 2,
'conf_thresh': 0.5,
'downsample_ratio': 32,
'clip_bbox': False,
'scale_x_y': 1.0
}
N = 16
SPATIAL_WIDTH = 13
SPATIAL_HEIGHT = 9
num_anchors = int(len(pdpd_attrs['anchors'])//2)
x_shape = (N, num_anchors * (5 + pdpd_attrs['class_num']), SPATIAL_HEIGHT, SPATIAL_WIDTH)
imgsize_shape = (N, 2)
data = np.random.random(x_shape).astype('float32')
data_ImSize = np.random.randint(10, 20, imgsize_shape).astype('int32')
pred_pdpd = yolo_box(pdpd_attrs['name'], data, data_ImSize, pdpd_attrs)
if __name__ == "__main__":
TEST1()
TEST2() | true | true |
f73706a6fea89b94319fbeef53c5b69ad611d050 | 4,078 | py | Python | examples/DeepQNetwork/common.py | Sangyeob-Kim/tensorpack_rev | bf4020892edc123a09e08de784da0448464529b2 | [
"Apache-2.0"
] | 5 | 2018-05-04T02:04:15.000Z | 2020-04-02T05:38:48.000Z | examples/DeepQNetwork/common.py | Sangyeob-Kim/tensorpack_rev | bf4020892edc123a09e08de784da0448464529b2 | [
"Apache-2.0"
] | null | null | null | examples/DeepQNetwork/common.py | Sangyeob-Kim/tensorpack_rev | bf4020892edc123a09e08de784da0448464529b2 | [
"Apache-2.0"
] | 2 | 2018-04-23T13:43:10.000Z | 2019-10-30T09:56:54.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: common.py
# Author: Yuxin Wu <ppwwyyxxc@gmail.com>
import random
import time
import multiprocessing
from tqdm import tqdm
from six.moves import queue
from tensorpack.utils.concurrency import StoppableThread, ShareSessionThread
from tensorpack.callbacks import Callback
from tensorpack.utils import logger
from tensorpack.utils.stats import StatCounter
from tensorpack.utils.utils import get_tqdm_kwargs
def play_one_episode(env, func, render=False):
def predict(s):
"""
Map from observation to action, with 0.01 greedy.
"""
act = func(s[None, :, :, :])[0][0].argmax()
if random.random() < 0.01:
spc = env.action_space
act = spc.sample()
return act
ob = env.reset()
sum_r = 0
while True:
act = predict(ob)
ob, r, isOver, info = env.step(act)
if render:
env.render()
sum_r += r
if isOver:
return sum_r
def play_n_episodes(player, predfunc, nr, render=False):
logger.info("Start Playing ... ")
for k in range(nr):
score = play_one_episode(player, predfunc, render=render)
print("{}/{}, score={}".format(k, nr, score))
def eval_with_funcs(predictors, nr_eval, get_player_fn, verbose=False):
"""
Args:
predictors ([PredictorBase])
"""
class Worker(StoppableThread, ShareSessionThread):
def __init__(self, func, queue):
super(Worker, self).__init__()
self._func = func
self.q = queue
def func(self, *args, **kwargs):
if self.stopped():
raise RuntimeError("stopped!")
return self._func(*args, **kwargs)
def run(self):
with self.default_sess():
player = get_player_fn(train=False)
while not self.stopped():
try:
score = play_one_episode(player, self.func)
except RuntimeError:
return
self.queue_put_stoppable(self.q, score)
q = queue.Queue()
threads = [Worker(f, q) for f in predictors]
for k in threads:
k.start()
time.sleep(0.1) # avoid simulator bugs
stat = StatCounter()
def fetch():
r = q.get()
stat.feed(r)
if verbose:
logger.info("Score: {}".format(r))
for _ in tqdm(range(nr_eval), **get_tqdm_kwargs()):
fetch()
logger.info("Waiting for all the workers to finish the last run...")
for k in threads:
k.stop()
for k in threads:
k.join()
while q.qsize():
fetch()
if stat.count > 0:
return (stat.average, stat.max)
return (0, 0)
def eval_model_multithread(pred, nr_eval, get_player_fn):
"""
Args:
pred (OfflinePredictor): state -> [#action]
"""
NR_PROC = min(multiprocessing.cpu_count() // 2, 8)
with pred.sess.as_default():
mean, max = eval_with_funcs(
[pred] * NR_PROC, nr_eval,
get_player_fn, verbose=True)
logger.info("Average Score: {}; Max Score: {}".format(mean, max))
class Evaluator(Callback):
def __init__(self, nr_eval, input_names, output_names, get_player_fn):
self.eval_episode = nr_eval
self.input_names = input_names
self.output_names = output_names
self.get_player_fn = get_player_fn
def _setup_graph(self):
NR_PROC = min(multiprocessing.cpu_count() // 2, 20)
self.pred_funcs = [self.trainer.get_predictor(
self.input_names, self.output_names)] * NR_PROC
def _trigger(self):
t = time.time()
mean, max = eval_with_funcs(
self.pred_funcs, self.eval_episode, self.get_player_fn)
t = time.time() - t
if t > 10 * 60: # eval takes too long
self.eval_episode = int(self.eval_episode * 0.94)
self.trainer.monitors.put_scalar('mean_score', mean)
self.trainer.monitors.put_scalar('max_score', max)
| 29.766423 | 76 | 0.590976 |
import random
import time
import multiprocessing
from tqdm import tqdm
from six.moves import queue
from tensorpack.utils.concurrency import StoppableThread, ShareSessionThread
from tensorpack.callbacks import Callback
from tensorpack.utils import logger
from tensorpack.utils.stats import StatCounter
from tensorpack.utils.utils import get_tqdm_kwargs
def play_one_episode(env, func, render=False):
def predict(s):
act = func(s[None, :, :, :])[0][0].argmax()
if random.random() < 0.01:
spc = env.action_space
act = spc.sample()
return act
ob = env.reset()
sum_r = 0
while True:
act = predict(ob)
ob, r, isOver, info = env.step(act)
if render:
env.render()
sum_r += r
if isOver:
return sum_r
def play_n_episodes(player, predfunc, nr, render=False):
logger.info("Start Playing ... ")
for k in range(nr):
score = play_one_episode(player, predfunc, render=render)
print("{}/{}, score={}".format(k, nr, score))
def eval_with_funcs(predictors, nr_eval, get_player_fn, verbose=False):
class Worker(StoppableThread, ShareSessionThread):
def __init__(self, func, queue):
super(Worker, self).__init__()
self._func = func
self.q = queue
def func(self, *args, **kwargs):
if self.stopped():
raise RuntimeError("stopped!")
return self._func(*args, **kwargs)
def run(self):
with self.default_sess():
player = get_player_fn(train=False)
while not self.stopped():
try:
score = play_one_episode(player, self.func)
except RuntimeError:
return
self.queue_put_stoppable(self.q, score)
q = queue.Queue()
threads = [Worker(f, q) for f in predictors]
for k in threads:
k.start()
time.sleep(0.1)
stat = StatCounter()
def fetch():
r = q.get()
stat.feed(r)
if verbose:
logger.info("Score: {}".format(r))
for _ in tqdm(range(nr_eval), **get_tqdm_kwargs()):
fetch()
logger.info("Waiting for all the workers to finish the last run...")
for k in threads:
k.stop()
for k in threads:
k.join()
while q.qsize():
fetch()
if stat.count > 0:
return (stat.average, stat.max)
return (0, 0)
def eval_model_multithread(pred, nr_eval, get_player_fn):
NR_PROC = min(multiprocessing.cpu_count() // 2, 8)
with pred.sess.as_default():
mean, max = eval_with_funcs(
[pred] * NR_PROC, nr_eval,
get_player_fn, verbose=True)
logger.info("Average Score: {}; Max Score: {}".format(mean, max))
class Evaluator(Callback):
def __init__(self, nr_eval, input_names, output_names, get_player_fn):
self.eval_episode = nr_eval
self.input_names = input_names
self.output_names = output_names
self.get_player_fn = get_player_fn
def _setup_graph(self):
NR_PROC = min(multiprocessing.cpu_count() // 2, 20)
self.pred_funcs = [self.trainer.get_predictor(
self.input_names, self.output_names)] * NR_PROC
def _trigger(self):
t = time.time()
mean, max = eval_with_funcs(
self.pred_funcs, self.eval_episode, self.get_player_fn)
t = time.time() - t
if t > 10 * 60:
self.eval_episode = int(self.eval_episode * 0.94)
self.trainer.monitors.put_scalar('mean_score', mean)
self.trainer.monitors.put_scalar('max_score', max)
| true | true |
f737072d2a3c2c849c28edc97c05c1834c6a7b1f | 1,016 | py | Python | ava_connect_clouds.py | TotallyAProgrammer/ava | e1e0f332f6278cd41d68d48a12e88190c73cd8f6 | [
"Apache-2.0"
] | null | null | null | ava_connect_clouds.py | TotallyAProgrammer/ava | e1e0f332f6278cd41d68d48a12e88190c73cd8f6 | [
"Apache-2.0"
] | 2 | 2021-04-06T18:23:11.000Z | 2021-06-02T03:38:46.000Z | ava_connect_clouds.py | TotallyAProgrammer/ava | e1e0f332f6278cd41d68d48a12e88190c73cd8f6 | [
"Apache-2.0"
] | 1 | 2020-12-12T04:51:56.000Z | 2020-12-12T04:51:56.000Z | import socket
from cmd_functions import is_ip_valid, is_port_valid
'''
A library that allows AVA to connect to various cloud services
'''
def send_to_cloud(socket, data):
"""
Send data over the specified socket to the associated cloud
socket = any socket object
data = a string or int to be sent over the specified socket
"""
try:
data = data.encode()
socket.send((str(data) +"\n").encode())
return True
except Exception as exp:
print("Exception: " + str(exp))
return False
def connect_ava_cloud(ip, port=25680):
"""
Connect to AVA Cloud and return prepared socket to the caller
ip = AVA Cloud's IP Address
port = AVA Cloud's Port (Optional, default is 25680)
"""
if is_ip_valid(ip) and is_port_valid(port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((ip, int(port)))
data = s.recv(1024)
print("Received: ", repr(data))
return s
else:
return False
| 28.222222 | 65 | 0.637795 | import socket
from cmd_functions import is_ip_valid, is_port_valid
def send_to_cloud(socket, data):
try:
data = data.encode()
socket.send((str(data) +"\n").encode())
return True
except Exception as exp:
print("Exception: " + str(exp))
return False
def connect_ava_cloud(ip, port=25680):
if is_ip_valid(ip) and is_port_valid(port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((ip, int(port)))
data = s.recv(1024)
print("Received: ", repr(data))
return s
else:
return False
| true | true |
f737075279a0091da91a94dafc37152943b3a225 | 16,846 | py | Python | blackbox/test_docs.py | infoverload/crate | 31233d1a4a65cfaa50f319e1ca707a9e643e6072 | [
"Apache-2.0"
] | null | null | null | blackbox/test_docs.py | infoverload/crate | 31233d1a4a65cfaa50f319e1ca707a9e643e6072 | [
"Apache-2.0"
] | null | null | null | blackbox/test_docs.py | infoverload/crate | 31233d1a4a65cfaa50f319e1ca707a9e643e6072 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
import doctest
import zc.customdoctests
from cr8.run_crate import CrateNode
import os
import time
import shutil
import json
import re
import random
import tempfile
import subprocess
import unittest
from functools import partial
from testutils.paths import crate_path, project_path
from crate.crash.command import CrateShell
from crate.crash.printer import PrintWrapper, ColorPrinter
from crate.client import connect
ITEST_FILE_NAME_FILTER = os.environ.get('ITEST_FILE_NAME_FILTER')
if ITEST_FILE_NAME_FILTER:
print("Applying file name filter: {}".format(ITEST_FILE_NAME_FILTER))
def is_target_file_name(item):
return not ITEST_FILE_NAME_FILTER or (item and ITEST_FILE_NAME_FILTER in item)
CRATE_SETTINGS = {
'psql.port': 0,
'transport.tcp.port': 0,
'node.name': 'crate',
'cluster.name': 'Testing-CrateDB'
}
class CrateTestShell(CrateShell):
def __init__(self):
super(CrateTestShell, self).__init__(is_tty=False)
self.logger = ColorPrinter(False, stream=PrintWrapper(), line_end='\n')
cmd = CrateTestShell()
def pretty_print(s):
try:
d = json.loads(s)
print(json.dumps(d, indent=2))
except json.decoder.JSONDecodeError:
print(s)
class ConnectingCrateLayer(CrateNode):
def __init__(self, *args, **kwargs):
kwargs.setdefault('settings', {})
self.repo_path = kwargs['settings']['path.repo'] = tempfile.mkdtemp()
super().__init__(*args, **kwargs)
def start(self):
super().start()
cmd._connect(self.http_url)
def stop(self):
print('')
print('ConnectingCrateLayer.stop()')
shutil.rmtree(self.repo_path, ignore_errors=True)
super().stop()
crate = ConnectingCrateLayer(
crate_dir=crate_path(),
env={
'JAVA_HOME': os.environ.get('JAVA_HOME', ''),
'CRATE_JAVA_OPTS': '-Dio.netty.leakDetection.level=paranoid',
},
settings=CRATE_SETTINGS,
version=(4, 0, 0)
)
def crash_transform(s):
# The examples in the docs show the real port '4200' to a reader.
# Our test suite requires the port to be '44200' to avoid conflicts.
# Therefore, we need to replace the ports before a test is being run.
if s.startswith('_'):
return s[1:]
if hasattr(crate, 'addresses'):
s = s.replace(':4200', ':{0}'.format(crate.addresses.http.port))
return u'cmd.process({0})'.format(repr(s.strip().rstrip(';')))
def bash_transform(s):
# The examples in the docs show the real port '4200' to a reader.
# Our test suite requires the port to be '44200' to avoid conflicts.
# Therefore, we need to replace the ports before a test is being run.
if hasattr(crate, 'addresses'):
s = s.replace(':4200', ':{0}'.format(crate.addresses.http.port))
if s.startswith("crash"):
s = re.search(r"crash\s+-c\s+\"(.*?)\"", s).group(1)
return u'cmd.process({0})'.format(repr(s.strip().rstrip(';')))
return (r'pretty_print(sh(r"""%s""").stdout.decode("utf-8"))' % s) + '\n'
bash_parser = zc.customdoctests.DocTestParser(
ps1='sh\\$', comment_prefix='#', transform=bash_transform)
crash_parser = zc.customdoctests.DocTestParser(
ps1='cr>', comment_prefix='#', transform=crash_transform)
def _execute_sql(stmt):
"""
Invoke a single SQL statement and automatically close the HTTP connection
when done.
"""
with connect(crate.http_url) as conn:
c = conn.cursor()
c.execute(stmt)
def wait_for_schema_update(schema, table, column):
with connect(crate.http_url) as conn:
c = conn.cursor()
count = 0
while count == 0:
c.execute(
('select count(*) from information_schema.columns '
'where table_schema = ? and table_name = ? '
'and column_name = ?'),
(schema, table, column)
)
count = c.fetchone()[0]
def wait_for_function(signature):
with connect(crate.http_url) as conn:
c = conn.cursor()
wait = 0.0
while True:
try:
c.execute('SELECT ' + signature)
except Exception as e:
wait += 0.1
if wait >= 2.0:
raise e
else:
time.sleep(0.1)
else:
break
def setUpLocations(test):
setUp(test)
_execute_sql("""
create table locations (
id integer primary key,
name string,
"date" timestamp with time zone,
kind string,
position integer,
description string,
inhabitants object(dynamic) as (
interests array(string),
description string,
name string
),
information array(object as (
population long,
evolution_level short
)
),
landmarks array(string),
index name_description_ft using fulltext(name, description) with (analyzer='english')
) clustered by(id) into 2 shards with (number_of_replicas=0)""".strip())
_execute_sql("delete from locations")
locations_file = get_abspath("locations.json")
_execute_sql("""copy locations from '{0}'""".format(locations_file))
_execute_sql("""refresh table locations""")
import_failures_dir = '/tmp/import_data/locations_with_failure'
os.makedirs(import_failures_dir, exist_ok=True)
shutil.copy(
get_abspath("locations_import_summary1.json"),
os.path.join(import_failures_dir, "locations1.json")
)
shutil.copy(
get_abspath("locations_import_summary2.json"),
os.path.join(import_failures_dir, "locations2.json")
)
def setUpUserVisits(test):
setUp(test)
_execute_sql("""
create table uservisits (
id integer primary key,
name string,
visits integer,
last_visit timestamp with time zone
)
""".strip())
uservisits_file = get_abspath("uservisits.json")
_execute_sql("""copy uservisits from '{0}'""".format(uservisits_file))
_execute_sql("""refresh table uservisits""")
def setUpArticles(test):
setUp(test)
_execute_sql("""
create table articles (
id integer primary key,
name string,
price float
) clustered by(id) into 2 shards with (number_of_replicas=0)""".strip())
articles_file = get_abspath("articles.json")
_execute_sql("""copy articles from '{0}'""".format(articles_file))
_execute_sql("""refresh table articles""")
def setUpColors(test):
setUp(test)
_execute_sql("""
create table colors (
id integer primary key,
name string,
rgb string,
coolness float
) with (number_of_replicas=0)""".strip())
colors_file = get_abspath("colors.json")
_execute_sql("""copy colors from '{0}'""".format(colors_file))
_execute_sql("""refresh table colors""")
def setUpEmployees(test):
setUp(test)
_execute_sql("""
create table employees (
id integer primary key,
name string,
surname string,
dept_id integer,
sex string
) with (number_of_replicas=0)""".strip())
emp_file = get_abspath("employees.json")
_execute_sql("""copy employees from '{0}'""".format(emp_file))
_execute_sql("""refresh table employees""")
def setUpDepartments(test):
setUp(test)
_execute_sql("""
create table departments (
id integer primary key,
name string,
manager_id integer,
location integer
) with (number_of_replicas=0)""".strip())
dept_file = get_abspath("departments.json")
_execute_sql("""copy departments from '{0}'""".format(dept_file))
_execute_sql("""refresh table departments""")
def setUpQuotes(test):
setUp(test)
_execute_sql("""
create table quotes (
id integer primary key,
quote string,
index quote_ft using fulltext (quote)
) clustered by(id) into 2 shards with(number_of_replicas=0)""")
import_dir = '/tmp/import_data'
if not os.path.isdir(import_dir):
os.mkdir(import_dir)
shutil.copy(
project_path('server/src/test/resources/essetup/data/copy',
'test_copy_from.json'),
os.path.join(import_dir, "quotes.json")
)
def setUpPhotos(test):
setUp(test)
_execute_sql("""
create table photos (
name string,
location geo_point
) with(number_of_replicas=0)""".strip())
dept_file = get_abspath("photos.json")
_execute_sql("""copy photos from '{0}'""".format(dept_file))
_execute_sql("""refresh table photos""")
def setUpCountries(test):
setUp(test)
_execute_sql("""
create table countries (
name string,
"geo" geo_shape INDEX using GEOHASH with (precision='1km'),
population long
) with(number_of_replicas=0)""".strip())
dept_file = get_abspath("countries.json")
_execute_sql("""copy countries from '{0}'""".format(dept_file))
_execute_sql("""refresh table countries""")
def setUpLocationsAndQuotes(test):
setUpLocations(test)
setUpQuotes(test)
def setUpColorsAndArticles(test):
setUpColors(test)
setUpArticles(test)
def setUpLocationsQuotesAndUserVisits(test):
setUpLocationsAndQuotes(test)
setUpUserVisits(test)
def setUpEmployeesAndDepartments(test):
setUpEmployees(test)
setUpDepartments(test)
def setUpPhotosAndCountries(test):
setUpPhotos(test)
setUpCountries(test)
def setUpEmpDeptAndColourArticlesAndGeo(test):
setUpEmployeesAndDepartments(test)
setUpColorsAndArticles(test)
setUpPhotosAndCountries(test)
def setUpTutorials(test):
setUp(test)
import_dir = '/tmp/best_practice_data'
source_dir = 'sql/src/test/resources/essetup/data/best_practice'
if not os.path.isdir(import_dir):
os.mkdir(import_dir)
shutil.copy(project_path(source_dir, 'data_import.json'),
os.path.join(import_dir, "users.json"))
shutil.copy(project_path(source_dir, 'data_import.json.gz'),
os.path.join(import_dir, "users.json.gz"))
shutil.copy(project_path(source_dir, 'data_import_1408312800.json'),
os.path.join(import_dir, "users_1408312800.json"))
def setUp(test):
test.globs['cmd'] = cmd
test.globs['wait_for_schema_update'] = wait_for_schema_update
test.globs['wait_for_function'] = wait_for_function
def tearDown(test):
# drop leftover tables after each test
with connect(crate.http_url) as conn:
c = conn.cursor()
c.execute("""
SELECT table_schema, table_name
FROM information_schema.tables
WHERE table_schema NOT IN ('blob', 'sys', 'information_schema', 'pg_catalog')
""")
for schema, table in c.fetchall():
try:
c.execute("""
DROP TABLE IF EXISTS "{}"."{}"
""".format(schema, table))
except Exception as e:
print('Failed to drop table {}.{}: {}'.format(schema, table, e))
docsuite = partial(doctest.DocFileSuite,
tearDown=tearDown,
parser=crash_parser,
optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS,
encoding='utf-8')
doctest_file = partial(os.path.join, 'docs')
def doctest_files(*items):
return (doctest_file(item) for item in items if is_target_file_name(item))
def get_abspath(name):
return os.path.abspath(
os.path.join(os.path.dirname(__file__), 'testdata', name)
)
class DocTests(unittest.TestSuite):
def run(self, result, debug=False):
crate.start()
try:
super().run(result, debug)
finally:
crate.stop()
cmd.close()
def load_tests(loader, suite, ignore):
tests = []
for fn in doctest_files('general/blobs.rst', 'interfaces/http.rst',):
tests.append(
docsuite(
fn,
parser=bash_parser,
setUp=setUpLocations,
globs={
'sh': partial(
subprocess.run,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
timeout=60,
shell=True
),
'pretty_print': pretty_print
}
)
)
for fn in doctest_files('general/ddl/create-table.rst',
'general/ddl/generated-columns.rst',
'general/ddl/constraints.rst',
'general/ddl/sharding.rst',
'general/ddl/replication.rst',
'general/ddl/column-policy.rst',
'general/ddl/system-columns.rst',
'general/ddl/alter-table.rst',
'general/ddl/storage.rst',
'general/ddl/fulltext-indices.rst',
'admin/runtime-config.rst',
'general/ddl/show-create-table.rst',
'admin/snapshots.rst',
'general/dql/index.rst',
'general/dql/refresh.rst',
'admin/optimization.rst',
'general/dql/fulltext.rst',
'general/ddl/data-types.rst',
'general/ddl/partitioned-tables.rst',
'general/builtins/arithmetic.rst',
'general/builtins/table-functions.rst',
'general/builtins/array-comparisons.rst',
'general/dql/selects.rst',
'interfaces/postgres.rst',
'general/ddl/views.rst',
'sql/general/value-expressions.rst',
'sql/general/lexical-structure.rst',
'general/user-defined-functions.rst',
'general/information-schema.rst',
'general/builtins/aggregation.rst',
'general/builtins/scalar.rst',
'admin/user-management.rst',
'admin/system-information.rst',
'admin/privileges.rst',
'sql/statements/values.rst'):
tests.append(docsuite(fn, setUp=setUpLocationsAndQuotes))
for fn in doctest_files('general/occ.rst', 'sql/statements/refresh.rst'):
tests.append(docsuite(fn, setUp=setUp))
for fn in doctest_files('general/dql/geo.rst',):
tests.append(docsuite(fn, setUp=setUpCountries))
for fn in doctest_files('general/builtins/window-functions.rst',
'general/dql/joins.rst',
'general/builtins/subquery-expressions.rst'):
tests.append(docsuite(fn, setUp=setUpEmpDeptAndColourArticlesAndGeo))
for fn in doctest_files('general/dml.rst',):
tests.append(docsuite(fn, setUp=setUpLocationsQuotesAndUserVisits))
for fn in doctest_files('general/dql/union.rst',):
tests.append(docsuite(fn, setUp=setUpPhotosAndCountries))
if not tests:
raise ValueError("ITEST_FILE_NAME_FILTER, no matches for: {}".format(ITEST_FILE_NAME_FILTER))
# randomize order of tests to make sure they don't depend on each other
random.shuffle(tests)
return DocTests(tests)
| 32.774319 | 101 | 0.604595 |
import doctest
import zc.customdoctests
from cr8.run_crate import CrateNode
import os
import time
import shutil
import json
import re
import random
import tempfile
import subprocess
import unittest
from functools import partial
from testutils.paths import crate_path, project_path
from crate.crash.command import CrateShell
from crate.crash.printer import PrintWrapper, ColorPrinter
from crate.client import connect
ITEST_FILE_NAME_FILTER = os.environ.get('ITEST_FILE_NAME_FILTER')
if ITEST_FILE_NAME_FILTER:
print("Applying file name filter: {}".format(ITEST_FILE_NAME_FILTER))
def is_target_file_name(item):
return not ITEST_FILE_NAME_FILTER or (item and ITEST_FILE_NAME_FILTER in item)
CRATE_SETTINGS = {
'psql.port': 0,
'transport.tcp.port': 0,
'node.name': 'crate',
'cluster.name': 'Testing-CrateDB'
}
class CrateTestShell(CrateShell):
def __init__(self):
super(CrateTestShell, self).__init__(is_tty=False)
self.logger = ColorPrinter(False, stream=PrintWrapper(), line_end='\n')
cmd = CrateTestShell()
def pretty_print(s):
try:
d = json.loads(s)
print(json.dumps(d, indent=2))
except json.decoder.JSONDecodeError:
print(s)
class ConnectingCrateLayer(CrateNode):
def __init__(self, *args, **kwargs):
kwargs.setdefault('settings', {})
self.repo_path = kwargs['settings']['path.repo'] = tempfile.mkdtemp()
super().__init__(*args, **kwargs)
def start(self):
super().start()
cmd._connect(self.http_url)
def stop(self):
print('')
print('ConnectingCrateLayer.stop()')
shutil.rmtree(self.repo_path, ignore_errors=True)
super().stop()
crate = ConnectingCrateLayer(
crate_dir=crate_path(),
env={
'JAVA_HOME': os.environ.get('JAVA_HOME', ''),
'CRATE_JAVA_OPTS': '-Dio.netty.leakDetection.level=paranoid',
},
settings=CRATE_SETTINGS,
version=(4, 0, 0)
)
def crash_transform(s):
if s.startswith('_'):
return s[1:]
if hasattr(crate, 'addresses'):
s = s.replace(':4200', ':{0}'.format(crate.addresses.http.port))
return u'cmd.process({0})'.format(repr(s.strip().rstrip(';')))
def bash_transform(s):
if hasattr(crate, 'addresses'):
s = s.replace(':4200', ':{0}'.format(crate.addresses.http.port))
if s.startswith("crash"):
s = re.search(r"crash\s+-c\s+\"(.*?)\"", s).group(1)
return u'cmd.process({0})'.format(repr(s.strip().rstrip(';')))
return (r'pretty_print(sh(r"""%s""").stdout.decode("utf-8"))' % s) + '\n'
bash_parser = zc.customdoctests.DocTestParser(
ps1='sh\\$', comment_prefix='#', transform=bash_transform)
crash_parser = zc.customdoctests.DocTestParser(
ps1='cr>', comment_prefix='#', transform=crash_transform)
def _execute_sql(stmt):
with connect(crate.http_url) as conn:
c = conn.cursor()
c.execute(stmt)
def wait_for_schema_update(schema, table, column):
with connect(crate.http_url) as conn:
c = conn.cursor()
count = 0
while count == 0:
c.execute(
('select count(*) from information_schema.columns '
'where table_schema = ? and table_name = ? '
'and column_name = ?'),
(schema, table, column)
)
count = c.fetchone()[0]
def wait_for_function(signature):
with connect(crate.http_url) as conn:
c = conn.cursor()
wait = 0.0
while True:
try:
c.execute('SELECT ' + signature)
except Exception as e:
wait += 0.1
if wait >= 2.0:
raise e
else:
time.sleep(0.1)
else:
break
def setUpLocations(test):
setUp(test)
_execute_sql("""
create table locations (
id integer primary key,
name string,
"date" timestamp with time zone,
kind string,
position integer,
description string,
inhabitants object(dynamic) as (
interests array(string),
description string,
name string
),
information array(object as (
population long,
evolution_level short
)
),
landmarks array(string),
index name_description_ft using fulltext(name, description) with (analyzer='english')
) clustered by(id) into 2 shards with (number_of_replicas=0)""".strip())
_execute_sql("delete from locations")
locations_file = get_abspath("locations.json")
_execute_sql("""copy locations from '{0}'""".format(locations_file))
_execute_sql("""refresh table locations""")
import_failures_dir = '/tmp/import_data/locations_with_failure'
os.makedirs(import_failures_dir, exist_ok=True)
shutil.copy(
get_abspath("locations_import_summary1.json"),
os.path.join(import_failures_dir, "locations1.json")
)
shutil.copy(
get_abspath("locations_import_summary2.json"),
os.path.join(import_failures_dir, "locations2.json")
)
def setUpUserVisits(test):
setUp(test)
_execute_sql("""
create table uservisits (
id integer primary key,
name string,
visits integer,
last_visit timestamp with time zone
)
""".strip())
uservisits_file = get_abspath("uservisits.json")
_execute_sql("""copy uservisits from '{0}'""".format(uservisits_file))
_execute_sql("""refresh table uservisits""")
def setUpArticles(test):
setUp(test)
_execute_sql("""
create table articles (
id integer primary key,
name string,
price float
) clustered by(id) into 2 shards with (number_of_replicas=0)""".strip())
articles_file = get_abspath("articles.json")
_execute_sql("""copy articles from '{0}'""".format(articles_file))
_execute_sql("""refresh table articles""")
def setUpColors(test):
setUp(test)
_execute_sql("""
create table colors (
id integer primary key,
name string,
rgb string,
coolness float
) with (number_of_replicas=0)""".strip())
colors_file = get_abspath("colors.json")
_execute_sql("""copy colors from '{0}'""".format(colors_file))
_execute_sql("""refresh table colors""")
def setUpEmployees(test):
setUp(test)
_execute_sql("""
create table employees (
id integer primary key,
name string,
surname string,
dept_id integer,
sex string
) with (number_of_replicas=0)""".strip())
emp_file = get_abspath("employees.json")
_execute_sql("""copy employees from '{0}'""".format(emp_file))
_execute_sql("""refresh table employees""")
def setUpDepartments(test):
setUp(test)
_execute_sql("""
create table departments (
id integer primary key,
name string,
manager_id integer,
location integer
) with (number_of_replicas=0)""".strip())
dept_file = get_abspath("departments.json")
_execute_sql("""copy departments from '{0}'""".format(dept_file))
_execute_sql("""refresh table departments""")
def setUpQuotes(test):
setUp(test)
_execute_sql("""
create table quotes (
id integer primary key,
quote string,
index quote_ft using fulltext (quote)
) clustered by(id) into 2 shards with(number_of_replicas=0)""")
import_dir = '/tmp/import_data'
if not os.path.isdir(import_dir):
os.mkdir(import_dir)
shutil.copy(
project_path('server/src/test/resources/essetup/data/copy',
'test_copy_from.json'),
os.path.join(import_dir, "quotes.json")
)
def setUpPhotos(test):
setUp(test)
_execute_sql("""
create table photos (
name string,
location geo_point
) with(number_of_replicas=0)""".strip())
dept_file = get_abspath("photos.json")
_execute_sql("""copy photos from '{0}'""".format(dept_file))
_execute_sql("""refresh table photos""")
def setUpCountries(test):
setUp(test)
_execute_sql("""
create table countries (
name string,
"geo" geo_shape INDEX using GEOHASH with (precision='1km'),
population long
) with(number_of_replicas=0)""".strip())
dept_file = get_abspath("countries.json")
_execute_sql("""copy countries from '{0}'""".format(dept_file))
_execute_sql("""refresh table countries""")
def setUpLocationsAndQuotes(test):
setUpLocations(test)
setUpQuotes(test)
def setUpColorsAndArticles(test):
setUpColors(test)
setUpArticles(test)
def setUpLocationsQuotesAndUserVisits(test):
setUpLocationsAndQuotes(test)
setUpUserVisits(test)
def setUpEmployeesAndDepartments(test):
setUpEmployees(test)
setUpDepartments(test)
def setUpPhotosAndCountries(test):
setUpPhotos(test)
setUpCountries(test)
def setUpEmpDeptAndColourArticlesAndGeo(test):
setUpEmployeesAndDepartments(test)
setUpColorsAndArticles(test)
setUpPhotosAndCountries(test)
def setUpTutorials(test):
setUp(test)
import_dir = '/tmp/best_practice_data'
source_dir = 'sql/src/test/resources/essetup/data/best_practice'
if not os.path.isdir(import_dir):
os.mkdir(import_dir)
shutil.copy(project_path(source_dir, 'data_import.json'),
os.path.join(import_dir, "users.json"))
shutil.copy(project_path(source_dir, 'data_import.json.gz'),
os.path.join(import_dir, "users.json.gz"))
shutil.copy(project_path(source_dir, 'data_import_1408312800.json'),
os.path.join(import_dir, "users_1408312800.json"))
def setUp(test):
test.globs['cmd'] = cmd
test.globs['wait_for_schema_update'] = wait_for_schema_update
test.globs['wait_for_function'] = wait_for_function
def tearDown(test):
with connect(crate.http_url) as conn:
c = conn.cursor()
c.execute("""
SELECT table_schema, table_name
FROM information_schema.tables
WHERE table_schema NOT IN ('blob', 'sys', 'information_schema', 'pg_catalog')
""")
for schema, table in c.fetchall():
try:
c.execute("""
DROP TABLE IF EXISTS "{}"."{}"
""".format(schema, table))
except Exception as e:
print('Failed to drop table {}.{}: {}'.format(schema, table, e))
docsuite = partial(doctest.DocFileSuite,
tearDown=tearDown,
parser=crash_parser,
optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS,
encoding='utf-8')
doctest_file = partial(os.path.join, 'docs')
def doctest_files(*items):
return (doctest_file(item) for item in items if is_target_file_name(item))
def get_abspath(name):
return os.path.abspath(
os.path.join(os.path.dirname(__file__), 'testdata', name)
)
class DocTests(unittest.TestSuite):
def run(self, result, debug=False):
crate.start()
try:
super().run(result, debug)
finally:
crate.stop()
cmd.close()
def load_tests(loader, suite, ignore):
tests = []
for fn in doctest_files('general/blobs.rst', 'interfaces/http.rst',):
tests.append(
docsuite(
fn,
parser=bash_parser,
setUp=setUpLocations,
globs={
'sh': partial(
subprocess.run,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
timeout=60,
shell=True
),
'pretty_print': pretty_print
}
)
)
for fn in doctest_files('general/ddl/create-table.rst',
'general/ddl/generated-columns.rst',
'general/ddl/constraints.rst',
'general/ddl/sharding.rst',
'general/ddl/replication.rst',
'general/ddl/column-policy.rst',
'general/ddl/system-columns.rst',
'general/ddl/alter-table.rst',
'general/ddl/storage.rst',
'general/ddl/fulltext-indices.rst',
'admin/runtime-config.rst',
'general/ddl/show-create-table.rst',
'admin/snapshots.rst',
'general/dql/index.rst',
'general/dql/refresh.rst',
'admin/optimization.rst',
'general/dql/fulltext.rst',
'general/ddl/data-types.rst',
'general/ddl/partitioned-tables.rst',
'general/builtins/arithmetic.rst',
'general/builtins/table-functions.rst',
'general/builtins/array-comparisons.rst',
'general/dql/selects.rst',
'interfaces/postgres.rst',
'general/ddl/views.rst',
'sql/general/value-expressions.rst',
'sql/general/lexical-structure.rst',
'general/user-defined-functions.rst',
'general/information-schema.rst',
'general/builtins/aggregation.rst',
'general/builtins/scalar.rst',
'admin/user-management.rst',
'admin/system-information.rst',
'admin/privileges.rst',
'sql/statements/values.rst'):
tests.append(docsuite(fn, setUp=setUpLocationsAndQuotes))
for fn in doctest_files('general/occ.rst', 'sql/statements/refresh.rst'):
tests.append(docsuite(fn, setUp=setUp))
for fn in doctest_files('general/dql/geo.rst',):
tests.append(docsuite(fn, setUp=setUpCountries))
for fn in doctest_files('general/builtins/window-functions.rst',
'general/dql/joins.rst',
'general/builtins/subquery-expressions.rst'):
tests.append(docsuite(fn, setUp=setUpEmpDeptAndColourArticlesAndGeo))
for fn in doctest_files('general/dml.rst',):
tests.append(docsuite(fn, setUp=setUpLocationsQuotesAndUserVisits))
for fn in doctest_files('general/dql/union.rst',):
tests.append(docsuite(fn, setUp=setUpPhotosAndCountries))
if not tests:
raise ValueError("ITEST_FILE_NAME_FILTER, no matches for: {}".format(ITEST_FILE_NAME_FILTER))
random.shuffle(tests)
return DocTests(tests)
| true | true |
f7370875a57284055f7aabb5f9047486b7ba55a3 | 8,303 | py | Python | tests/test_assignment.py | PennState/canvasapi | 077cbd51516484a5c44834c8aa3d0c4425e4ffcf | [
"MIT"
] | 1 | 2019-07-08T14:04:45.000Z | 2019-07-08T14:04:45.000Z | tests/test_assignment.py | PennState/canvasapi | 077cbd51516484a5c44834c8aa3d0c4425e4ffcf | [
"MIT"
] | null | null | null | tests/test_assignment.py | PennState/canvasapi | 077cbd51516484a5c44834c8aa3d0c4425e4ffcf | [
"MIT"
] | null | null | null | from __future__ import absolute_import, division, print_function, unicode_literals
import unittest
import uuid
import requests_mock
from canvasapi import Canvas
from canvasapi.assignment import Assignment, AssignmentGroup
from canvasapi.exceptions import CanvasException, RequiredFieldMissing
from canvasapi.progress import Progress
from canvasapi.submission import Submission
from canvasapi.user import UserDisplay
from tests import settings
from tests.util import register_uris, cleanup_file
@requests_mock.Mocker()
class TestAssignment(unittest.TestCase):
def setUp(self):
self.canvas = Canvas(settings.BASE_URL, settings.API_KEY)
with requests_mock.Mocker() as m:
register_uris({'course': ['get_by_id', 'get_assignment_by_id']}, m)
self.course = self.canvas.get_course(1)
self.assignment = self.course.get_assignment(1)
# delete()
def test_delete_assignments(self, m):
register_uris({'assignment': ['delete_assignment']}, m)
deleted_assignment = self.assignment.delete()
self.assertIsInstance(deleted_assignment, Assignment)
# edit()
def test_edit_assignment(self, m):
register_uris({'assignment': ['edit_assignment']}, m)
name = 'New Name'
edited_assignment = self.assignment.edit(assignment={'name': name})
self.assertIsInstance(edited_assignment, Assignment)
self.assertTrue(hasattr(edited_assignment, 'name'))
self.assertEqual(edited_assignment.name, name)
# get_gradeable_students()
def test_get_gradeable_students(self, m):
register_uris({'course': ['list_gradeable_students']}, m)
students = self.assignment.get_gradeable_students()
student_list = [student for student in students]
self.assertEqual(len(student_list), 2)
self.assertIsInstance(student_list[0], UserDisplay)
# get_submission()
def test_get_submission(self, m):
register_uris({
'submission': ['get_by_id_course'],
'user': ['get_by_id']
}, m)
user_id = 1
submission_by_id = self.assignment.get_submission(user_id)
self.assertIsInstance(submission_by_id, Submission)
self.assertTrue(hasattr(submission_by_id, 'submission_type'))
user = self.canvas.get_user(user_id)
submission_by_obj = self.assignment.get_submission(user)
self.assertIsInstance(submission_by_obj, Submission)
self.assertTrue(hasattr(submission_by_obj, 'submission_type'))
# get_submissions()
def test_get_submissions(self, m):
register_uris({'submission': ['list_submissions']}, m)
submissions = self.assignment.get_submissions()
submission_list_by_id = [submission for submission in submissions]
self.assertEqual(len(submission_list_by_id), 2)
self.assertIsInstance(submission_list_by_id[0], Submission)
# submit()
def test_submit(self, m):
register_uris({'assignment': ['submit']}, m)
sub_type = "online_upload"
sub_dict = {'submission_type': sub_type}
submission = self.assignment.submit(sub_dict)
self.assertIsInstance(submission, Submission)
self.assertTrue(hasattr(submission, 'submission_type'))
self.assertEqual(submission.submission_type, sub_type)
def test_submit_fail(self, m):
with self.assertRaises(RequiredFieldMissing):
self.assignment.submit({})
def test_submit_file(self, m):
register_uris({'assignment': ['submit', 'upload', 'upload_final']}, m)
filename = 'testfile_assignment_{}'.format(uuid.uuid4().hex)
try:
with open(filename, 'w+') as file:
sub_type = "online_upload"
sub_dict = {'submission_type': sub_type}
submission = self.assignment.submit(sub_dict, file)
self.assertIsInstance(submission, Submission)
self.assertTrue(hasattr(submission, 'submission_type'))
self.assertEqual(submission.submission_type, sub_type)
finally:
cleanup_file(filename)
def test_submit_file_wrong_type(self, m):
filename = 'testfile_assignment_{}'.format(uuid.uuid4().hex)
sub_type = "online_text_entry"
sub_dict = {'submission_type': sub_type}
with self.assertRaises(ValueError):
self.assignment.submit(sub_dict, filename)
def test_submit_file_upload_failure(self, m):
register_uris({'assignment': ['submit', 'upload', 'upload_fail']}, m)
filename = 'testfile_assignment_{}'.format(uuid.uuid4().hex)
try:
with open(filename, 'w+') as file:
sub_type = "online_upload"
sub_dict = {'submission_type': sub_type}
with self.assertRaises(CanvasException):
self.assignment.submit(sub_dict, file)
finally:
cleanup_file(filename)
# __str__()
def test__str__(self, m):
string = str(self.assignment)
self.assertIsInstance(string, str)
# submissions_bulk_update()
def test_submissions_bulk_update(self, m):
register_uris({'assignment': ['update_submissions']}, m)
register_uris({'progress': ['course_progress']}, m)
progress = self.assignment.submissions_bulk_update(grade_data={
'1': {
'posted_grade': 97
},
'2': {
'posted_grade': 98
}
})
self.assertIsInstance(progress, Progress)
self.assertTrue(progress.context_type == "Course")
progress = progress.query()
self.assertTrue(progress.context_type == "Course")
# upload_to_submission()
def test_upload_to_submission_self(self, m):
register_uris({'assignment': ['upload', 'upload_final']}, m)
filename = 'testfile_assignment_{}'.format(uuid.uuid4().hex)
try:
with open(filename, 'w+') as file:
response = self.assignment.upload_to_submission(file)
self.assertTrue(response[0])
self.assertIsInstance(response[1], dict)
self.assertIn('url', response[1])
finally:
cleanup_file(filename)
def test_upload_to_submission_user(self, m):
register_uris({'assignment': ['upload_by_id', 'upload_final']}, m)
filename = 'testfile_assignment_{}'.format(uuid.uuid4().hex)
user_id = 1
try:
with open(filename, 'w+') as file:
response = self.assignment.upload_to_submission(file, user_id)
self.assertTrue(response[0])
self.assertIsInstance(response[1], dict)
self.assertIn('url', response[1])
finally:
cleanup_file(filename)
@requests_mock.Mocker()
class TestAssignmentGroup(unittest.TestCase):
def setUp(self):
self.canvas = Canvas(settings.BASE_URL, settings.API_KEY)
with requests_mock.Mocker() as m:
register_uris({
'course': ['get_by_id'],
'assignment': ['get_assignment_group']
}, m)
self.course = self.canvas.get_course(1)
self.assignment_group = self.course.get_assignment_group(5)
# edit()
def test_edit_assignment_group(self, m):
register_uris({'assignment': ['edit_assignment_group']}, m)
name = 'New Name'
edited_assignment_group = self.assignment_group.edit(
assignment_group={'name': name}
)
self.assertIsInstance(edited_assignment_group, AssignmentGroup)
self.assertTrue(hasattr(edited_assignment_group, 'name'))
self.assertEqual(edited_assignment_group.name, name)
# delete()
def test_delete_assignment_group(self, m):
register_uris({'assignment': ['delete_assignment_group']}, m)
deleted_assignment_group = self.assignment_group.delete()
self.assertIsInstance(deleted_assignment_group, AssignmentGroup)
self.assertTrue(hasattr(deleted_assignment_group, 'name'))
self.assertEqual(deleted_assignment_group.name, 'Assignment Group 5')
# __str__()
def test__str__(self, m):
string = str(self.assignment_group)
self.assertIsInstance(string, str)
| 34.740586 | 82 | 0.65386 | from __future__ import absolute_import, division, print_function, unicode_literals
import unittest
import uuid
import requests_mock
from canvasapi import Canvas
from canvasapi.assignment import Assignment, AssignmentGroup
from canvasapi.exceptions import CanvasException, RequiredFieldMissing
from canvasapi.progress import Progress
from canvasapi.submission import Submission
from canvasapi.user import UserDisplay
from tests import settings
from tests.util import register_uris, cleanup_file
@requests_mock.Mocker()
class TestAssignment(unittest.TestCase):
def setUp(self):
self.canvas = Canvas(settings.BASE_URL, settings.API_KEY)
with requests_mock.Mocker() as m:
register_uris({'course': ['get_by_id', 'get_assignment_by_id']}, m)
self.course = self.canvas.get_course(1)
self.assignment = self.course.get_assignment(1)
def test_delete_assignments(self, m):
register_uris({'assignment': ['delete_assignment']}, m)
deleted_assignment = self.assignment.delete()
self.assertIsInstance(deleted_assignment, Assignment)
def test_edit_assignment(self, m):
register_uris({'assignment': ['edit_assignment']}, m)
name = 'New Name'
edited_assignment = self.assignment.edit(assignment={'name': name})
self.assertIsInstance(edited_assignment, Assignment)
self.assertTrue(hasattr(edited_assignment, 'name'))
self.assertEqual(edited_assignment.name, name)
def test_get_gradeable_students(self, m):
register_uris({'course': ['list_gradeable_students']}, m)
students = self.assignment.get_gradeable_students()
student_list = [student for student in students]
self.assertEqual(len(student_list), 2)
self.assertIsInstance(student_list[0], UserDisplay)
def test_get_submission(self, m):
register_uris({
'submission': ['get_by_id_course'],
'user': ['get_by_id']
}, m)
user_id = 1
submission_by_id = self.assignment.get_submission(user_id)
self.assertIsInstance(submission_by_id, Submission)
self.assertTrue(hasattr(submission_by_id, 'submission_type'))
user = self.canvas.get_user(user_id)
submission_by_obj = self.assignment.get_submission(user)
self.assertIsInstance(submission_by_obj, Submission)
self.assertTrue(hasattr(submission_by_obj, 'submission_type'))
def test_get_submissions(self, m):
register_uris({'submission': ['list_submissions']}, m)
submissions = self.assignment.get_submissions()
submission_list_by_id = [submission for submission in submissions]
self.assertEqual(len(submission_list_by_id), 2)
self.assertIsInstance(submission_list_by_id[0], Submission)
def test_submit(self, m):
register_uris({'assignment': ['submit']}, m)
sub_type = "online_upload"
sub_dict = {'submission_type': sub_type}
submission = self.assignment.submit(sub_dict)
self.assertIsInstance(submission, Submission)
self.assertTrue(hasattr(submission, 'submission_type'))
self.assertEqual(submission.submission_type, sub_type)
def test_submit_fail(self, m):
with self.assertRaises(RequiredFieldMissing):
self.assignment.submit({})
def test_submit_file(self, m):
register_uris({'assignment': ['submit', 'upload', 'upload_final']}, m)
filename = 'testfile_assignment_{}'.format(uuid.uuid4().hex)
try:
with open(filename, 'w+') as file:
sub_type = "online_upload"
sub_dict = {'submission_type': sub_type}
submission = self.assignment.submit(sub_dict, file)
self.assertIsInstance(submission, Submission)
self.assertTrue(hasattr(submission, 'submission_type'))
self.assertEqual(submission.submission_type, sub_type)
finally:
cleanup_file(filename)
def test_submit_file_wrong_type(self, m):
filename = 'testfile_assignment_{}'.format(uuid.uuid4().hex)
sub_type = "online_text_entry"
sub_dict = {'submission_type': sub_type}
with self.assertRaises(ValueError):
self.assignment.submit(sub_dict, filename)
def test_submit_file_upload_failure(self, m):
register_uris({'assignment': ['submit', 'upload', 'upload_fail']}, m)
filename = 'testfile_assignment_{}'.format(uuid.uuid4().hex)
try:
with open(filename, 'w+') as file:
sub_type = "online_upload"
sub_dict = {'submission_type': sub_type}
with self.assertRaises(CanvasException):
self.assignment.submit(sub_dict, file)
finally:
cleanup_file(filename)
def test__str__(self, m):
string = str(self.assignment)
self.assertIsInstance(string, str)
def test_submissions_bulk_update(self, m):
register_uris({'assignment': ['update_submissions']}, m)
register_uris({'progress': ['course_progress']}, m)
progress = self.assignment.submissions_bulk_update(grade_data={
'1': {
'posted_grade': 97
},
'2': {
'posted_grade': 98
}
})
self.assertIsInstance(progress, Progress)
self.assertTrue(progress.context_type == "Course")
progress = progress.query()
self.assertTrue(progress.context_type == "Course")
def test_upload_to_submission_self(self, m):
register_uris({'assignment': ['upload', 'upload_final']}, m)
filename = 'testfile_assignment_{}'.format(uuid.uuid4().hex)
try:
with open(filename, 'w+') as file:
response = self.assignment.upload_to_submission(file)
self.assertTrue(response[0])
self.assertIsInstance(response[1], dict)
self.assertIn('url', response[1])
finally:
cleanup_file(filename)
def test_upload_to_submission_user(self, m):
register_uris({'assignment': ['upload_by_id', 'upload_final']}, m)
filename = 'testfile_assignment_{}'.format(uuid.uuid4().hex)
user_id = 1
try:
with open(filename, 'w+') as file:
response = self.assignment.upload_to_submission(file, user_id)
self.assertTrue(response[0])
self.assertIsInstance(response[1], dict)
self.assertIn('url', response[1])
finally:
cleanup_file(filename)
@requests_mock.Mocker()
class TestAssignmentGroup(unittest.TestCase):
def setUp(self):
self.canvas = Canvas(settings.BASE_URL, settings.API_KEY)
with requests_mock.Mocker() as m:
register_uris({
'course': ['get_by_id'],
'assignment': ['get_assignment_group']
}, m)
self.course = self.canvas.get_course(1)
self.assignment_group = self.course.get_assignment_group(5)
def test_edit_assignment_group(self, m):
register_uris({'assignment': ['edit_assignment_group']}, m)
name = 'New Name'
edited_assignment_group = self.assignment_group.edit(
assignment_group={'name': name}
)
self.assertIsInstance(edited_assignment_group, AssignmentGroup)
self.assertTrue(hasattr(edited_assignment_group, 'name'))
self.assertEqual(edited_assignment_group.name, name)
def test_delete_assignment_group(self, m):
register_uris({'assignment': ['delete_assignment_group']}, m)
deleted_assignment_group = self.assignment_group.delete()
self.assertIsInstance(deleted_assignment_group, AssignmentGroup)
self.assertTrue(hasattr(deleted_assignment_group, 'name'))
self.assertEqual(deleted_assignment_group.name, 'Assignment Group 5')
def test__str__(self, m):
string = str(self.assignment_group)
self.assertIsInstance(string, str)
| true | true |
f7370ac2fcd22d48c14bbb5915a279a965212500 | 9,348 | py | Python | make_tests.py | guidefloripa/kerasify | cbb2ea6cae61ccd551b0f5327433d23e8e8050ee | [
"MIT"
] | 185 | 2016-11-08T11:45:35.000Z | 2022-03-30T12:53:30.000Z | make_tests.py | guidefloripa/kerasify | cbb2ea6cae61ccd551b0f5327433d23e8e8050ee | [
"MIT"
] | 12 | 2016-12-25T00:25:52.000Z | 2021-05-07T04:02:26.000Z | make_tests.py | guidefloripa/kerasify | cbb2ea6cae61ccd551b0f5327433d23e8e8050ee | [
"MIT"
] | 61 | 2016-11-16T19:15:34.000Z | 2021-11-05T19:33:59.000Z | import numpy as np
import pprint
from keras.models import Sequential
from keras.layers import Convolution2D, Dense, Flatten, Activation, MaxPooling2D, Dropout
from keras.layers.recurrent import LSTM
from keras.layers.advanced_activations import ELU
from keras.layers.embeddings import Embedding
from kerasify import export_model
np.set_printoptions(precision=25, threshold=np.nan)
def c_array(a):
s = pprint.pformat(a.flatten())
s = s.replace('[', '{').replace(']', '}').replace('array(', '').replace(')', '').replace(', dtype=float32', '')
shape = ''
if a.shape == ():
s = '{%s}' % s
shape = '(1)'
else:
shape = repr(a.shape).replace(',)', ')')
return shape, s
TEST_CASE = '''
bool test_%s(double* load_time, double* apply_time)
{
printf("TEST %s\\n");
KASSERT(load_time, "Invalid double");
KASSERT(apply_time, "Invalid double");
Tensor in%s;
in.data_ = %s;
Tensor out%s;
out.data_ = %s;
KerasTimer load_timer;
load_timer.Start();
KerasModel model;
KASSERT(model.LoadModel("test_%s.model"), "Failed to load model");
*load_time = load_timer.Stop();
KerasTimer apply_timer;
apply_timer.Start();
Tensor predict = out;
KASSERT(model.Apply(&in, &out), "Failed to apply");
*apply_time = apply_timer.Stop();
for (int i = 0; i < out.dims_[0]; i++)
{
KASSERT_EQ(out(i), predict(i), %s);
}
return true;
}
'''
def output_testcase(model, test_x, test_y, name, eps):
print("Processing %s" % name)
model.compile(loss='mean_squared_error', optimizer='adamax')
model.fit(test_x, test_y, nb_epoch=1, verbose=False)
predict_y = model.predict(test_x).astype('f')
print(model.summary())
export_model(model, 'test_%s.model' % name)
with open('test_%s.h' % name, 'w') as f:
x_shape, x_data = c_array(test_x[0])
y_shape, y_data = c_array(predict_y[0])
f.write(TEST_CASE % (name, name, x_shape, x_data, y_shape, y_data, name, eps))
''' Dense 1x1 '''
test_x = np.arange(10)
test_y = test_x * 10 + 1
model = Sequential()
model.add(Dense(1, input_dim=1))
output_testcase(model, test_x, test_y, 'dense_1x1', '1e-6')
''' Dense 10x1 '''
test_x = np.random.rand(10, 10).astype('f')
test_y = np.random.rand(10).astype('f')
model = Sequential()
model.add(Dense(1, input_dim=10))
output_testcase(model, test_x, test_y, 'dense_10x1', '1e-6')
''' Dense 2x2 '''
test_x = np.random.rand(10, 2).astype('f')
test_y = np.random.rand(10).astype('f')
model = Sequential()
model.add(Dense(2, input_dim=2))
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'dense_2x2', '1e-6')
''' Dense 10x10 '''
test_x = np.random.rand(10, 10).astype('f')
test_y = np.random.rand(10).astype('f')
model = Sequential()
model.add(Dense(10, input_dim=10))
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'dense_10x10', '1e-6')
''' Dense 10x10x10 '''
test_x = np.random.rand(10, 10).astype('f')
test_y = np.random.rand(10, 10).astype('f')
model = Sequential()
model.add(Dense(10, input_dim=10))
model.add(Dense(10))
output_testcase(model, test_x, test_y, 'dense_10x10x10', '1e-6')
''' Conv 2x2 '''
test_x = np.random.rand(10, 1, 2, 2).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(Convolution2D(1, 2, 2, input_shape=(1, 2, 2)))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'conv_2x2', '1e-6')
''' Conv 3x3 '''
test_x = np.random.rand(10, 1, 3, 3).astype('f').astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(Convolution2D(1, 3, 3, input_shape=(1, 3, 3)))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'conv_3x3', '1e-6')
''' Conv 3x3x3 '''
test_x = np.random.rand(10, 3, 10, 10).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(Convolution2D(3, 3, 3, input_shape=(3, 10, 10)))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'conv_3x3x3', '1e-6')
''' Activation ELU '''
test_x = np.random.rand(1, 10).astype('f')
test_y = np.random.rand(1, 1).astype('f')
model = Sequential()
model.add(Dense(10, input_dim=10))
model.add(ELU(alpha=0.5))
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'elu_10', '1e-6')
''' Activation relu '''
test_x = np.random.rand(1, 10).astype('f')
test_y = np.random.rand(1, 10).astype('f')
model = Sequential()
model.add(Dense(10, input_dim=10))
model.add(Activation('relu'))
output_testcase(model, test_x, test_y, 'relu_10', '1e-6')
''' Dense relu '''
test_x = np.random.rand(1, 10).astype('f')
test_y = np.random.rand(1, 10).astype('f')
model = Sequential()
model.add(Dense(10, input_dim=10, activation='relu'))
model.add(Dense(10, input_dim=10, activation='relu'))
model.add(Dense(10, input_dim=10, activation='relu'))
output_testcase(model, test_x, test_y, 'dense_relu_10', '1e-6')
''' Dense relu '''
test_x = np.random.rand(1, 10).astype('f')
test_y = np.random.rand(1, 10).astype('f')
model = Sequential()
model.add(Dense(10, input_dim=10, activation='tanh'))
model.add(Dense(10, input_dim=10, activation='tanh'))
model.add(Dense(10, input_dim=10, activation='tanh'))
output_testcase(model, test_x, test_y, 'dense_tanh_10', '1e-6')
''' Conv softplus '''
test_x = np.random.rand(10, 1, 2, 2).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(Convolution2D(1, 2, 2, input_shape=(1, 2, 2), activation='softplus'))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'conv_softplus_2x2', '1e-6')
''' Conv hardsigmoid '''
test_x = np.random.rand(10, 1, 2, 2).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(Convolution2D(1, 2, 2, input_shape=(1, 2, 2), activation='hard_sigmoid'))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'conv_hard_sigmoid_2x2', '1e-6')
''' Conv sigmoid '''
test_x = np.random.rand(10, 1, 2, 2).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(Convolution2D(1, 2, 2, input_shape=(1, 2, 2), activation='sigmoid'))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'conv_sigmoid_2x2', '1e-6')
''' Maxpooling2D 1x1'''
test_x = np.random.rand(10, 1, 10, 10).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(MaxPooling2D(pool_size=(1, 1), input_shape=(1, 10, 10)))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'maxpool2d_1x1', '1e-6')
''' Maxpooling2D 2x2'''
test_x = np.random.rand(10, 1, 10, 10).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(MaxPooling2D(pool_size=(2, 2), input_shape=(1, 10, 10)))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'maxpool2d_2x2', '1e-6')
''' Maxpooling2D 3x2x2'''
test_x = np.random.rand(10, 3, 10, 10).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(MaxPooling2D(pool_size=(2, 2), input_shape=(3, 10, 10)))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'maxpool2d_3x2x2', '1e-6')
''' Maxpooling2D 3x3x3'''
test_x = np.random.rand(10, 3, 10, 10).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(MaxPooling2D(pool_size=(3, 3), input_shape=(3, 10, 10)))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'maxpool2d_3x3x3', '1e-6')
''' LSTM simple 7x20 '''
test_x = np.random.rand(10, 7, 20).astype('f')
test_y = np.random.rand(10, 3).astype('f')
model = Sequential()
model.add(LSTM(3, return_sequences=False, input_shape=(7, 20)))
output_testcase(model, test_x, test_y, 'lstm_simple_7x20', '1e-6')
''' LSTM simple stacked 20x9 '''
test_x = np.random.rand(10, 20, 9).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(LSTM(32, return_sequences=False, input_shape=(20, 9)))
model.add(Dense(3, input_dim=32, activation='tanh'))
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'lstm_simple_stacked20x9', '1e-6')
''' LSTM stacked 150x83 '''
test_x = np.random.rand(10, 150, 83).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(LSTM(32, return_sequences=True, input_shape=(150, 83)))
model.add(LSTM(32, return_sequences=False))
model.add(Dense(1, activation='sigmoid'))
output_testcase(model, test_x, test_y, 'lstm_stacked150x83', '1e-6')
''' Embedding 64 '''
np.random.seed(10)
test_x = np.random.randint(100, size=(32, 10)).astype('f')
test_y = np.random.rand(32, 20).astype('f')
model = Sequential()
model.add(Embedding(100, 64, input_length=10))
model.add(Flatten())
#model.add(Dropout(0.5))
model.add(Dense(20, activation='sigmoid'))
output_testcase(model, test_x, test_y, 'embedding64', '1e-6')
''' Benchmark '''
test_x = np.random.rand(1, 3, 128, 128).astype('f')
test_y = np.random.rand(1, 10).astype('f')
model = Sequential()
model.add(Convolution2D(16, 7, 7, input_shape=(3, 128, 128), activation='relu'))
model.add(MaxPooling2D(pool_size=(3, 3)))
model.add(ELU())
model.add(Convolution2D(8, 3, 3))
model.add(Flatten())
model.add(Dense(1000, activation='relu'))
model.add(Dense(10))
output_testcase(model, test_x, test_y, 'benchmark', '1e-3')
| 28.587156 | 115 | 0.674476 | import numpy as np
import pprint
from keras.models import Sequential
from keras.layers import Convolution2D, Dense, Flatten, Activation, MaxPooling2D, Dropout
from keras.layers.recurrent import LSTM
from keras.layers.advanced_activations import ELU
from keras.layers.embeddings import Embedding
from kerasify import export_model
np.set_printoptions(precision=25, threshold=np.nan)
def c_array(a):
s = pprint.pformat(a.flatten())
s = s.replace('[', '{').replace(']', '}').replace('array(', '').replace(')', '').replace(', dtype=float32', '')
shape = ''
if a.shape == ():
s = '{%s}' % s
shape = '(1)'
else:
shape = repr(a.shape).replace(',)', ')')
return shape, s
TEST_CASE = '''
bool test_%s(double* load_time, double* apply_time)
{
printf("TEST %s\\n");
KASSERT(load_time, "Invalid double");
KASSERT(apply_time, "Invalid double");
Tensor in%s;
in.data_ = %s;
Tensor out%s;
out.data_ = %s;
KerasTimer load_timer;
load_timer.Start();
KerasModel model;
KASSERT(model.LoadModel("test_%s.model"), "Failed to load model");
*load_time = load_timer.Stop();
KerasTimer apply_timer;
apply_timer.Start();
Tensor predict = out;
KASSERT(model.Apply(&in, &out), "Failed to apply");
*apply_time = apply_timer.Stop();
for (int i = 0; i < out.dims_[0]; i++)
{
KASSERT_EQ(out(i), predict(i), %s);
}
return true;
}
'''
def output_testcase(model, test_x, test_y, name, eps):
print("Processing %s" % name)
model.compile(loss='mean_squared_error', optimizer='adamax')
model.fit(test_x, test_y, nb_epoch=1, verbose=False)
predict_y = model.predict(test_x).astype('f')
print(model.summary())
export_model(model, 'test_%s.model' % name)
with open('test_%s.h' % name, 'w') as f:
x_shape, x_data = c_array(test_x[0])
y_shape, y_data = c_array(predict_y[0])
f.write(TEST_CASE % (name, name, x_shape, x_data, y_shape, y_data, name, eps))
test_x = np.arange(10)
test_y = test_x * 10 + 1
model = Sequential()
model.add(Dense(1, input_dim=1))
output_testcase(model, test_x, test_y, 'dense_1x1', '1e-6')
test_x = np.random.rand(10, 10).astype('f')
test_y = np.random.rand(10).astype('f')
model = Sequential()
model.add(Dense(1, input_dim=10))
output_testcase(model, test_x, test_y, 'dense_10x1', '1e-6')
test_x = np.random.rand(10, 2).astype('f')
test_y = np.random.rand(10).astype('f')
model = Sequential()
model.add(Dense(2, input_dim=2))
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'dense_2x2', '1e-6')
test_x = np.random.rand(10, 10).astype('f')
test_y = np.random.rand(10).astype('f')
model = Sequential()
model.add(Dense(10, input_dim=10))
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'dense_10x10', '1e-6')
test_x = np.random.rand(10, 10).astype('f')
test_y = np.random.rand(10, 10).astype('f')
model = Sequential()
model.add(Dense(10, input_dim=10))
model.add(Dense(10))
output_testcase(model, test_x, test_y, 'dense_10x10x10', '1e-6')
test_x = np.random.rand(10, 1, 2, 2).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(Convolution2D(1, 2, 2, input_shape=(1, 2, 2)))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'conv_2x2', '1e-6')
test_x = np.random.rand(10, 1, 3, 3).astype('f').astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(Convolution2D(1, 3, 3, input_shape=(1, 3, 3)))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'conv_3x3', '1e-6')
test_x = np.random.rand(10, 3, 10, 10).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(Convolution2D(3, 3, 3, input_shape=(3, 10, 10)))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'conv_3x3x3', '1e-6')
test_x = np.random.rand(1, 10).astype('f')
test_y = np.random.rand(1, 1).astype('f')
model = Sequential()
model.add(Dense(10, input_dim=10))
model.add(ELU(alpha=0.5))
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'elu_10', '1e-6')
test_x = np.random.rand(1, 10).astype('f')
test_y = np.random.rand(1, 10).astype('f')
model = Sequential()
model.add(Dense(10, input_dim=10))
model.add(Activation('relu'))
output_testcase(model, test_x, test_y, 'relu_10', '1e-6')
test_x = np.random.rand(1, 10).astype('f')
test_y = np.random.rand(1, 10).astype('f')
model = Sequential()
model.add(Dense(10, input_dim=10, activation='relu'))
model.add(Dense(10, input_dim=10, activation='relu'))
model.add(Dense(10, input_dim=10, activation='relu'))
output_testcase(model, test_x, test_y, 'dense_relu_10', '1e-6')
test_x = np.random.rand(1, 10).astype('f')
test_y = np.random.rand(1, 10).astype('f')
model = Sequential()
model.add(Dense(10, input_dim=10, activation='tanh'))
model.add(Dense(10, input_dim=10, activation='tanh'))
model.add(Dense(10, input_dim=10, activation='tanh'))
output_testcase(model, test_x, test_y, 'dense_tanh_10', '1e-6')
test_x = np.random.rand(10, 1, 2, 2).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(Convolution2D(1, 2, 2, input_shape=(1, 2, 2), activation='softplus'))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'conv_softplus_2x2', '1e-6')
test_x = np.random.rand(10, 1, 2, 2).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(Convolution2D(1, 2, 2, input_shape=(1, 2, 2), activation='hard_sigmoid'))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'conv_hard_sigmoid_2x2', '1e-6')
test_x = np.random.rand(10, 1, 2, 2).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(Convolution2D(1, 2, 2, input_shape=(1, 2, 2), activation='sigmoid'))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'conv_sigmoid_2x2', '1e-6')
test_x = np.random.rand(10, 1, 10, 10).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(MaxPooling2D(pool_size=(1, 1), input_shape=(1, 10, 10)))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'maxpool2d_1x1', '1e-6')
test_x = np.random.rand(10, 1, 10, 10).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(MaxPooling2D(pool_size=(2, 2), input_shape=(1, 10, 10)))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'maxpool2d_2x2', '1e-6')
test_x = np.random.rand(10, 3, 10, 10).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(MaxPooling2D(pool_size=(2, 2), input_shape=(3, 10, 10)))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'maxpool2d_3x2x2', '1e-6')
test_x = np.random.rand(10, 3, 10, 10).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(MaxPooling2D(pool_size=(3, 3), input_shape=(3, 10, 10)))
model.add(Flatten())
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'maxpool2d_3x3x3', '1e-6')
test_x = np.random.rand(10, 7, 20).astype('f')
test_y = np.random.rand(10, 3).astype('f')
model = Sequential()
model.add(LSTM(3, return_sequences=False, input_shape=(7, 20)))
output_testcase(model, test_x, test_y, 'lstm_simple_7x20', '1e-6')
test_x = np.random.rand(10, 20, 9).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(LSTM(32, return_sequences=False, input_shape=(20, 9)))
model.add(Dense(3, input_dim=32, activation='tanh'))
model.add(Dense(1))
output_testcase(model, test_x, test_y, 'lstm_simple_stacked20x9', '1e-6')
test_x = np.random.rand(10, 150, 83).astype('f')
test_y = np.random.rand(10, 1).astype('f')
model = Sequential()
model.add(LSTM(32, return_sequences=True, input_shape=(150, 83)))
model.add(LSTM(32, return_sequences=False))
model.add(Dense(1, activation='sigmoid'))
output_testcase(model, test_x, test_y, 'lstm_stacked150x83', '1e-6')
np.random.seed(10)
test_x = np.random.randint(100, size=(32, 10)).astype('f')
test_y = np.random.rand(32, 20).astype('f')
model = Sequential()
model.add(Embedding(100, 64, input_length=10))
model.add(Flatten())
model.add(Dense(20, activation='sigmoid'))
output_testcase(model, test_x, test_y, 'embedding64', '1e-6')
test_x = np.random.rand(1, 3, 128, 128).astype('f')
test_y = np.random.rand(1, 10).astype('f')
model = Sequential()
model.add(Convolution2D(16, 7, 7, input_shape=(3, 128, 128), activation='relu'))
model.add(MaxPooling2D(pool_size=(3, 3)))
model.add(ELU())
model.add(Convolution2D(8, 3, 3))
model.add(Flatten())
model.add(Dense(1000, activation='relu'))
model.add(Dense(10))
output_testcase(model, test_x, test_y, 'benchmark', '1e-3')
| true | true |
f7370d3a7ba6b398302266319fded87169772eec | 1,227 | py | Python | marqeta/response_models/address_verification_model.py | marqeta/marqeta-python | 66fa690eb910825c510a391720b0fe717fac0234 | [
"MIT"
] | 21 | 2019-04-12T09:02:17.000Z | 2022-02-18T11:39:06.000Z | marqeta/response_models/address_verification_model.py | marqeta/marqeta-python | 66fa690eb910825c510a391720b0fe717fac0234 | [
"MIT"
] | 1 | 2020-07-22T21:27:40.000Z | 2020-07-23T17:38:43.000Z | marqeta/response_models/address_verification_model.py | marqeta/marqeta-python | 66fa690eb910825c510a391720b0fe717fac0234 | [
"MIT"
] | 10 | 2019-05-08T14:20:37.000Z | 2021-09-20T18:09:26.000Z | from datetime import datetime, date
from marqeta.response_models.avs_information import AvsInformation
from marqeta.response_models.avs_information import AvsInformation
from marqeta.response_models.response import Response
from marqeta.response_models import datetime_object
import json
import re
class AddressVerificationModel(object):
def __init__(self, json_response):
self.json_response = json_response
def __str__(self):
return json.dumps(self.json_response, default=self.json_serial)
@staticmethod
def json_serial(o):
if isinstance(o, datetime) or isinstance(o, date):
return o.__str__()
@property
def request(self):
if 'request' in self.json_response:
return AvsInformation(self.json_response['request'])
@property
def on_file(self):
if 'on_file' in self.json_response:
return AvsInformation(self.json_response['on_file'])
@property
def response(self):
if 'response' in self.json_response:
return Response(self.json_response['response'])
def __repr__(self):
return '<Marqeta.response_models.address_verification_model.AddressVerificationModel>' + self.__str__()
| 31.461538 | 112 | 0.723716 | from datetime import datetime, date
from marqeta.response_models.avs_information import AvsInformation
from marqeta.response_models.avs_information import AvsInformation
from marqeta.response_models.response import Response
from marqeta.response_models import datetime_object
import json
import re
class AddressVerificationModel(object):
def __init__(self, json_response):
self.json_response = json_response
def __str__(self):
return json.dumps(self.json_response, default=self.json_serial)
@staticmethod
def json_serial(o):
if isinstance(o, datetime) or isinstance(o, date):
return o.__str__()
@property
def request(self):
if 'request' in self.json_response:
return AvsInformation(self.json_response['request'])
@property
def on_file(self):
if 'on_file' in self.json_response:
return AvsInformation(self.json_response['on_file'])
@property
def response(self):
if 'response' in self.json_response:
return Response(self.json_response['response'])
def __repr__(self):
return '<Marqeta.response_models.address_verification_model.AddressVerificationModel>' + self.__str__()
| true | true |
f7370e0352afbfdab72ada882dad548f334521bd | 6,015 | py | Python | sanic_thumbnails/thumbnail.py | q8977452/sanic-thumbnails | 75a9e9aab4918a47162b80af8eb41fe30ad64742 | [
"MIT"
] | null | null | null | sanic_thumbnails/thumbnail.py | q8977452/sanic-thumbnails | 75a9e9aab4918a47162b80af8eb41fe30ad64742 | [
"MIT"
] | null | null | null | sanic_thumbnails/thumbnail.py | q8977452/sanic-thumbnails | 75a9e9aab4918a47162b80af8eb41fe30ad64742 | [
"MIT"
] | null | null | null | from __future__ import unicode_literals
import os
from io import BytesIO
try:
from PIL import Image, ImageOps
except ImportError:
raise RuntimeError('Get Pillow at https://pypi.python.org/pypi/Pillow '
'or run command "pip install Pillow".')
from .utils import import_from_string, generate_filename, parse_size, aspect_to_string
class Thumbnail(object):
def __init__(self, app=None, configure_jinja=True):
self.app = app
self._configure_jinja = configure_jinja
self._default_root_directory = 'media'
self._default_thumbnail_directory = 'media'
self._default_root_url = '/'
self._default_thumbnail_root_url = '/'
self._default_format = 'JPEG'
self._default_storage_backend = 'sanic_thumbnails.storage_backends.FilesystemStorageBackend'
if app is not None:
self.init_app(app)
async def init_app(self, app):
if self.app is None:
self.app = app
app.thumbnail_instance = self
if not hasattr(app, 'extensions'):
app.extensions = {}
if 'thumbnail' in app.extensions:
raise RuntimeError('Sanic-thumbnail extension already initialized')
app.extensions['thumbnail'] = self
app.config.setdefault('THUMBNAIL_MEDIA_ROOT', self._default_root_directory)
app.config.setdefault('THUMBNAIL_MEDIA_THUMBNAIL_ROOT', self._default_thumbnail_directory)
app.config.setdefault('THUMBNAIL_MEDIA_URL', self._default_root_url)
app.config.setdefault('THUMBNAIL_MEDIA_THUMBNAIL_URL', self._default_thumbnail_root_url)
app.config.setdefault('THUMBNAIL_STORAGE_BACKEND', self._default_storage_backend)
app.config.setdefault('THUMBNAIL_DEFAUL_FORMAT', self._default_format)
if self._configure_jinja:
app.jinja_env.filters.update(
thumbnail=await self.get_thumbnail,
)
@property
def root_directory(self):
path = self.app.config['THUMBNAIL_MEDIA_ROOT']
if os.path.isabs(path):
return path
else:
return os.path.join(self.app.root_path, path)
@property
def thumbnail_directory(self):
path = self.app.config['THUMBNAIL_MEDIA_THUMBNAIL_ROOT']
if os.path.isabs(path):
return path
else:
return os.path.join(self.app.root_path, path)
@property
def root_url(self):
return self.app.config['THUMBNAIL_MEDIA_URL']
@property
def thumbnail_url(self):
return self.app.config['THUMBNAIL_MEDIA_THUMBNAIL_URL']
@property
def storage_backend(self):
return self.app.config['THUMBNAIL_STORAGE_BACKEND']
async def get_storage_backend(self):
backend_class = import_from_string(self.storage_backend)
return backend_class(app=self.app)
async def get_thumbnail(self, original, size, **options):
storage = self.get_storage_backend()
crop = options.get('crop', 'fit')
background = options.get('background')
quality = options.get('quality', 90)
thumbnail_size = parse_size(size)
original_path, original_filename = os.path.split(original)
thumbnail_filename = generate_filename(original_filename, aspect_to_string(size), crop, background, quality)
original_filepath = os.path.join(self.root_directory, original_path, original_filename)
thumbnail_filepath = os.path.join(self.thumbnail_directory, original_path, thumbnail_filename)
thumbnail_url = os.path.join(self.thumbnail_url, original_path, thumbnail_filename)
if storage.exists(thumbnail_filepath):
return thumbnail_url
image = Image.open(BytesIO(storage.read(original_filepath)))
try:
image.load()
except (IOError, OSError):
self.app.logger.warning('Thumbnail not load image: %s', original_filepath)
return thumbnail_url
# get original image format
options['format'] = options.get('format', image.format)
image = await self._create_thumbnail(image, thumbnail_size, crop,
background=background)
raw_data = await self.get_raw_data(image, **options)
storage.save(thumbnail_filepath, raw_data)
return thumbnail_url
async def get_raw_data(self, image, **options):
data = {
'format': self._get_format(image, **options),
'quality': options.get('quality', 90),
}
_file = BytesIO()
image.save(_file, **data)
return _file.getvalue()
@staticmethod
def colormode(image, colormode='RGB'):
if colormode == 'RGB' or colormode == 'RGBA':
if image.mode == 'RGBA':
return image
if image.mode == 'LA':
return image.convert('RGBA')
return image.convert(colormode)
if colormode == 'GRAY':
return image.convert('L')
return image.convert(colormode)
@staticmethod
def background(original_image, color=0xff):
size = (max(original_image.size),) * 2
image = Image.new('L', size, color)
image.paste(original_image, tuple(map(lambda x: (x[0] - x[1]) / 2, zip(size, original_image.size))))
return image
async def _get_format(self, image, **options):
if options.get('format'):
return options.get('format')
if image.format:
return image.format
return self.app.config['THUMBNAIL_DEFAUL_FORMAT']
async def _create_thumbnail(self, image, size, crop='fit', background=None):
if crop == 'fit':
image = ImageOps.fit(image, size, Image.ANTIALIAS)
else:
image = image.copy()
image.thumbnail(size, resample=Image.ANTIALIAS)
if background is not None:
image = self.background(image)
image = self.colormode(image)
return image
| 33.983051 | 116 | 0.644888 | from __future__ import unicode_literals
import os
from io import BytesIO
try:
from PIL import Image, ImageOps
except ImportError:
raise RuntimeError('Get Pillow at https://pypi.python.org/pypi/Pillow '
'or run command "pip install Pillow".')
from .utils import import_from_string, generate_filename, parse_size, aspect_to_string
class Thumbnail(object):
def __init__(self, app=None, configure_jinja=True):
self.app = app
self._configure_jinja = configure_jinja
self._default_root_directory = 'media'
self._default_thumbnail_directory = 'media'
self._default_root_url = '/'
self._default_thumbnail_root_url = '/'
self._default_format = 'JPEG'
self._default_storage_backend = 'sanic_thumbnails.storage_backends.FilesystemStorageBackend'
if app is not None:
self.init_app(app)
async def init_app(self, app):
if self.app is None:
self.app = app
app.thumbnail_instance = self
if not hasattr(app, 'extensions'):
app.extensions = {}
if 'thumbnail' in app.extensions:
raise RuntimeError('Sanic-thumbnail extension already initialized')
app.extensions['thumbnail'] = self
app.config.setdefault('THUMBNAIL_MEDIA_ROOT', self._default_root_directory)
app.config.setdefault('THUMBNAIL_MEDIA_THUMBNAIL_ROOT', self._default_thumbnail_directory)
app.config.setdefault('THUMBNAIL_MEDIA_URL', self._default_root_url)
app.config.setdefault('THUMBNAIL_MEDIA_THUMBNAIL_URL', self._default_thumbnail_root_url)
app.config.setdefault('THUMBNAIL_STORAGE_BACKEND', self._default_storage_backend)
app.config.setdefault('THUMBNAIL_DEFAUL_FORMAT', self._default_format)
if self._configure_jinja:
app.jinja_env.filters.update(
thumbnail=await self.get_thumbnail,
)
@property
def root_directory(self):
path = self.app.config['THUMBNAIL_MEDIA_ROOT']
if os.path.isabs(path):
return path
else:
return os.path.join(self.app.root_path, path)
@property
def thumbnail_directory(self):
path = self.app.config['THUMBNAIL_MEDIA_THUMBNAIL_ROOT']
if os.path.isabs(path):
return path
else:
return os.path.join(self.app.root_path, path)
@property
def root_url(self):
return self.app.config['THUMBNAIL_MEDIA_URL']
@property
def thumbnail_url(self):
return self.app.config['THUMBNAIL_MEDIA_THUMBNAIL_URL']
@property
def storage_backend(self):
return self.app.config['THUMBNAIL_STORAGE_BACKEND']
async def get_storage_backend(self):
backend_class = import_from_string(self.storage_backend)
return backend_class(app=self.app)
async def get_thumbnail(self, original, size, **options):
storage = self.get_storage_backend()
crop = options.get('crop', 'fit')
background = options.get('background')
quality = options.get('quality', 90)
thumbnail_size = parse_size(size)
original_path, original_filename = os.path.split(original)
thumbnail_filename = generate_filename(original_filename, aspect_to_string(size), crop, background, quality)
original_filepath = os.path.join(self.root_directory, original_path, original_filename)
thumbnail_filepath = os.path.join(self.thumbnail_directory, original_path, thumbnail_filename)
thumbnail_url = os.path.join(self.thumbnail_url, original_path, thumbnail_filename)
if storage.exists(thumbnail_filepath):
return thumbnail_url
image = Image.open(BytesIO(storage.read(original_filepath)))
try:
image.load()
except (IOError, OSError):
self.app.logger.warning('Thumbnail not load image: %s', original_filepath)
return thumbnail_url
options['format'] = options.get('format', image.format)
image = await self._create_thumbnail(image, thumbnail_size, crop,
background=background)
raw_data = await self.get_raw_data(image, **options)
storage.save(thumbnail_filepath, raw_data)
return thumbnail_url
async def get_raw_data(self, image, **options):
data = {
'format': self._get_format(image, **options),
'quality': options.get('quality', 90),
}
_file = BytesIO()
image.save(_file, **data)
return _file.getvalue()
@staticmethod
def colormode(image, colormode='RGB'):
if colormode == 'RGB' or colormode == 'RGBA':
if image.mode == 'RGBA':
return image
if image.mode == 'LA':
return image.convert('RGBA')
return image.convert(colormode)
if colormode == 'GRAY':
return image.convert('L')
return image.convert(colormode)
@staticmethod
def background(original_image, color=0xff):
size = (max(original_image.size),) * 2
image = Image.new('L', size, color)
image.paste(original_image, tuple(map(lambda x: (x[0] - x[1]) / 2, zip(size, original_image.size))))
return image
async def _get_format(self, image, **options):
if options.get('format'):
return options.get('format')
if image.format:
return image.format
return self.app.config['THUMBNAIL_DEFAUL_FORMAT']
async def _create_thumbnail(self, image, size, crop='fit', background=None):
if crop == 'fit':
image = ImageOps.fit(image, size, Image.ANTIALIAS)
else:
image = image.copy()
image.thumbnail(size, resample=Image.ANTIALIAS)
if background is not None:
image = self.background(image)
image = self.colormode(image)
return image
| true | true |
f7370e1afaa6e24e00c83149ce63506f03dc9fa0 | 222,734 | py | Python | hydrus/client/gui/ClientGUIScrolledPanelsManagement.py | SiNevesh/hydrus | 39f3a0589e5d9fe665babb7dc7535cf69d4bc582 | [
"WTFPL"
] | null | null | null | hydrus/client/gui/ClientGUIScrolledPanelsManagement.py | SiNevesh/hydrus | 39f3a0589e5d9fe665babb7dc7535cf69d4bc582 | [
"WTFPL"
] | null | null | null | hydrus/client/gui/ClientGUIScrolledPanelsManagement.py | SiNevesh/hydrus | 39f3a0589e5d9fe665babb7dc7535cf69d4bc582 | [
"WTFPL"
] | null | null | null | import collections
import os
import random
import re
import traceback
from qtpy import QtCore as QC
from qtpy import QtWidgets as QW
from qtpy import QtGui as QG
from hydrus.core import HydrusConstants as HC
from hydrus.core import HydrusData
from hydrus.core import HydrusExceptions
from hydrus.core import HydrusGlobals as HG
from hydrus.core import HydrusPaths
from hydrus.core import HydrusSerialisable
from hydrus.core import HydrusTags
from hydrus.core import HydrusText
from hydrus.client import ClientApplicationCommand as CAC
from hydrus.client import ClientConstants as CC
from hydrus.client.gui import ClientGUIDialogs
from hydrus.client.gui import ClientGUIDialogsQuick
from hydrus.client.gui import ClientGUIFunctions
from hydrus.client.gui import ClientGUIImport
from hydrus.client.gui import ClientGUIScrolledPanels
from hydrus.client.gui import ClientGUIScrolledPanelsEdit
from hydrus.client.gui import ClientGUIShortcuts
from hydrus.client.gui import ClientGUIStyle
from hydrus.client.gui import ClientGUITags
from hydrus.client.gui import ClientGUITagSorting
from hydrus.client.gui import ClientGUITime
from hydrus.client.gui import ClientGUITopLevelWindowsPanels
from hydrus.client.gui import QtPorting as QP
from hydrus.client.gui.lists import ClientGUIListBoxes
from hydrus.client.gui.lists import ClientGUIListConstants as CGLC
from hydrus.client.gui.lists import ClientGUIListCtrl
from hydrus.client.gui.pages import ClientGUIResultsSortCollect
from hydrus.client.gui.search import ClientGUIACDropdown
from hydrus.client.gui.search import ClientGUISearch
from hydrus.client.gui.widgets import ClientGUICommon
from hydrus.client.gui.widgets import ClientGUIControls
from hydrus.client.media import ClientMedia
from hydrus.client.metadata import ClientTags
from hydrus.client.networking import ClientNetworkingSessions
class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
def __init__( self, parent ):
ClientGUIScrolledPanels.ManagePanel.__init__( self, parent )
self._new_options = HG.client_controller.new_options
self._listbook = ClientGUICommon.ListBook( self )
self._listbook.AddPage( 'gui', 'gui', self._GUIPanel( self._listbook ) ) # leave this at the top, to make it default page
self._listbook.AddPage( 'gui pages', 'gui pages', self._GUIPagesPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'connection', 'connection', self._ConnectionPanel( self._listbook ) )
self._listbook.AddPage( 'external programs', 'external programs', self._ExternalProgramsPanel( self._listbook ) )
self._listbook.AddPage( 'files and trash', 'files and trash', self._FilesAndTrashPanel( self._listbook ) )
self._listbook.AddPage( 'file viewing statistics', 'file viewing statistics', self._FileViewingStatisticsPanel( self._listbook ) )
self._listbook.AddPage( 'speed and memory', 'speed and memory', self._SpeedAndMemoryPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'maintenance and processing', 'maintenance and processing', self._MaintenanceAndProcessingPanel( self._listbook ) )
self._listbook.AddPage( 'media', 'media', self._MediaPanel( self._listbook ) )
self._listbook.AddPage( 'audio', 'audio', self._AudioPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'system tray', 'system tray', self._SystemTrayPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'search', 'search', self._SearchPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'colours', 'colours', self._ColoursPanel( self._listbook ) )
self._listbook.AddPage( 'popups', 'popups', self._PopupPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'regex favourites', 'regex favourites', self._RegexPanel( self._listbook ) )
self._listbook.AddPage( 'sort/collect', 'sort/collect', self._SortCollectPanel( self._listbook ) )
self._listbook.AddPage( 'downloading', 'downloading', self._DownloadingPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'duplicates', 'duplicates', self._DuplicatesPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'importing', 'importing', self._ImportingPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'style', 'style', self._StylePanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'tag presentation', 'tag presentation', self._TagPresentationPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'tag suggestions', 'tag suggestions', self._TagSuggestionsPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'tags', 'tags', self._TagsPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'thumbnails', 'thumbnails', self._ThumbnailsPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'system', 'system', self._SystemPanel( self._listbook, self._new_options ) )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._listbook, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
class _AudioPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#self._media_viewer_uses_its_own_audio_volume = QW.QCheckBox( self )
self._preview_uses_its_own_audio_volume = QW.QCheckBox( self )
self._has_audio_label = QW.QLineEdit( self )
#
tt = 'If unchecked, this media canvas will use the \'global\' audio volume slider. If checked, this media canvas will have its own separate one.'
tt += os.linesep * 2
tt += 'Keep this on if you would like the preview viewer to be quieter than the main media viewer.'
#self._media_viewer_uses_its_own_audio_volume.setChecked( self._new_options.GetBoolean( 'media_viewer_uses_its_own_audio_volume' ) )
self._preview_uses_its_own_audio_volume.setChecked( self._new_options.GetBoolean( 'preview_uses_its_own_audio_volume' ) )
#self._media_viewer_uses_its_own_audio_volume.setToolTip( tt )
self._preview_uses_its_own_audio_volume.setToolTip( tt )
self._has_audio_label.setText( self._new_options.GetString( 'has_audio_label' ) )
#
vbox = QP.VBoxLayout()
rows = []
rows.append( ( 'The preview window has its own volume: ', self._preview_uses_its_own_audio_volume ) )
#rows.append( ( 'The media viewer has its own volume: ', self._media_viewer_uses_its_own_audio_volume ) )
rows.append( ( 'Label for files with audio: ', self._has_audio_label ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
#self._new_options.SetBoolean( 'media_viewer_uses_its_own_audio_volume', self._media_viewer_uses_its_own_audio_volume.isChecked() )
self._new_options.SetBoolean( 'preview_uses_its_own_audio_volume', self._preview_uses_its_own_audio_volume.isChecked() )
self._new_options.SetString( 'has_audio_label', self._has_audio_label.text() )
class _ColoursPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._new_options = HG.client_controller.new_options
coloursets_panel = ClientGUICommon.StaticBox( self, 'coloursets' )
self._current_colourset = ClientGUICommon.BetterChoice( coloursets_panel )
self._current_colourset.addItem( 'default', 'default' )
self._current_colourset.addItem( 'darkmode', 'darkmode' )
self._current_colourset.SetValue( self._new_options.GetString( 'current_colourset' ) )
self._notebook = QW.QTabWidget( coloursets_panel )
self._gui_colours = {}
for colourset in ( 'default', 'darkmode' ):
self._gui_colours[ colourset ] = {}
colour_panel = QW.QWidget( self._notebook )
colour_types = []
colour_types.append( CC.COLOUR_THUMB_BACKGROUND )
colour_types.append( CC.COLOUR_THUMB_BACKGROUND_SELECTED )
colour_types.append( CC.COLOUR_THUMB_BACKGROUND_REMOTE )
colour_types.append( CC.COLOUR_THUMB_BACKGROUND_REMOTE_SELECTED )
colour_types.append( CC.COLOUR_THUMB_BORDER )
colour_types.append( CC.COLOUR_THUMB_BORDER_SELECTED )
colour_types.append( CC.COLOUR_THUMB_BORDER_REMOTE )
colour_types.append( CC.COLOUR_THUMB_BORDER_REMOTE_SELECTED )
colour_types.append( CC.COLOUR_THUMBGRID_BACKGROUND )
colour_types.append( CC.COLOUR_AUTOCOMPLETE_BACKGROUND )
colour_types.append( CC.COLOUR_MEDIA_BACKGROUND )
colour_types.append( CC.COLOUR_MEDIA_TEXT )
colour_types.append( CC.COLOUR_TAGS_BOX )
for colour_type in colour_types:
ctrl = ClientGUICommon.BetterColourControl( colour_panel )
ctrl.setMaximumWidth( 20 )
ctrl.SetColour( self._new_options.GetColour( colour_type, colourset ) )
self._gui_colours[ colourset ][ colour_type ] = ctrl
#
rows = []
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BACKGROUND], CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BACKGROUND_SELECTED], CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BACKGROUND_REMOTE], CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BACKGROUND_REMOTE_SELECTED], CC.FLAGS_CENTER_PERPENDICULAR )
rows.append( ( 'thumbnail background (local: normal/selected, remote: normal/selected): ', hbox ) )
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BORDER], CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BORDER_SELECTED], CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BORDER_REMOTE], CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BORDER_REMOTE_SELECTED], CC.FLAGS_CENTER_PERPENDICULAR )
rows.append( ( 'thumbnail border (local: normal/selected, remote: normal/selected): ', hbox ) )
rows.append( ( 'thumbnail grid background: ', self._gui_colours[ colourset ][ CC.COLOUR_THUMBGRID_BACKGROUND ] ) )
rows.append( ( 'autocomplete background: ', self._gui_colours[ colourset ][ CC.COLOUR_AUTOCOMPLETE_BACKGROUND ] ) )
rows.append( ( 'media viewer background: ', self._gui_colours[ colourset ][ CC.COLOUR_MEDIA_BACKGROUND ] ) )
rows.append( ( 'media viewer text: ', self._gui_colours[ colourset ][ CC.COLOUR_MEDIA_TEXT ] ) )
rows.append( ( 'tags box background: ', self._gui_colours[ colourset ][ CC.COLOUR_TAGS_BOX ] ) )
gridbox = ClientGUICommon.WrapInGrid( colour_panel, rows )
colour_panel.setLayout( gridbox )
select = colourset == 'default'
self._notebook.addTab( colour_panel, colourset )
if select: self._notebook.setCurrentWidget( colour_panel )
#
coloursets_panel.Add( ClientGUICommon.WrapInText( self._current_colourset, coloursets_panel, 'current colourset: ' ), CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
coloursets_panel.Add( self._notebook, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, coloursets_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
for colourset in self._gui_colours:
for ( colour_type, ctrl ) in list(self._gui_colours[ colourset ].items()):
colour = ctrl.GetColour()
self._new_options.SetColour( colour_type, colourset, colour )
self._new_options.SetString( 'current_colourset', self._current_colourset.GetValue() )
class _ConnectionPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._new_options = HG.client_controller.new_options
general = ClientGUICommon.StaticBox( self, 'general' )
self._verify_regular_https = QW.QCheckBox( general )
if self._new_options.GetBoolean( 'advanced_mode' ):
network_timeout_min = 1
network_timeout_max = 86400 * 30
error_wait_time_min = 1
error_wait_time_max = 86400 * 30
max_network_jobs_max = 1000
max_network_jobs_per_domain_max = 100
else:
network_timeout_min = 3
network_timeout_max = 600
error_wait_time_min = 3
error_wait_time_max = 1800
max_network_jobs_max = 30
max_network_jobs_per_domain_max = 5
self._network_timeout = QP.MakeQSpinBox( general, min = network_timeout_min, max = network_timeout_max )
self._network_timeout.setToolTip( 'If a network connection cannot be made in this duration or, if once started, it experiences uninterrupted inactivity for six times this duration, it will be abandoned.' )
self._connection_error_wait_time = QP.MakeQSpinBox( general, min = error_wait_time_min, max = error_wait_time_max )
self._connection_error_wait_time.setToolTip( 'If a network connection times out as above, it will wait increasing multiples of this base time before retrying.' )
self._serverside_bandwidth_wait_time = QP.MakeQSpinBox( general, min = error_wait_time_min, max = error_wait_time_max )
self._serverside_bandwidth_wait_time.setToolTip( 'If a server returns a failure status code indicating it is short on bandwidth, the network job will wait increasing multiples of this base time before retrying.' )
self._domain_network_infrastructure_error_velocity = ClientGUITime.VelocityCtrl( general, 0, 100, 30, hours = True, minutes = True, seconds = True, per_phrase = 'within', unit = 'errors' )
self._max_network_jobs = QP.MakeQSpinBox( general, min = 1, max = max_network_jobs_max )
self._max_network_jobs_per_domain = QP.MakeQSpinBox( general, min = 1, max = max_network_jobs_per_domain_max )
#
proxy_panel = ClientGUICommon.StaticBox( self, 'proxy settings' )
self._http_proxy = ClientGUICommon.NoneableTextCtrl( proxy_panel )
self._https_proxy = ClientGUICommon.NoneableTextCtrl( proxy_panel )
self._no_proxy = ClientGUICommon.NoneableTextCtrl( proxy_panel )
#
self._verify_regular_https.setChecked( self._new_options.GetBoolean( 'verify_regular_https' ) )
self._http_proxy.SetValue( self._new_options.GetNoneableString( 'http_proxy' ) )
self._https_proxy.SetValue( self._new_options.GetNoneableString( 'https_proxy' ) )
self._no_proxy.SetValue( self._new_options.GetNoneableString( 'no_proxy' ) )
self._network_timeout.setValue( self._new_options.GetInteger( 'network_timeout' ) )
self._connection_error_wait_time.setValue( self._new_options.GetInteger( 'connection_error_wait_time' ) )
self._serverside_bandwidth_wait_time.setValue( self._new_options.GetInteger( 'serverside_bandwidth_wait_time' ) )
number = self._new_options.GetInteger( 'domain_network_infrastructure_error_number' )
time_delta = self._new_options.GetInteger( 'domain_network_infrastructure_error_time_delta' )
self._domain_network_infrastructure_error_velocity.SetValue( ( number, time_delta ) )
self._max_network_jobs.setValue( self._new_options.GetInteger( 'max_network_jobs' ) )
self._max_network_jobs_per_domain.setValue( self._new_options.GetInteger( 'max_network_jobs_per_domain' ) )
#
if self._new_options.GetBoolean( 'advanced_mode' ):
label = 'As you are in advanced mode, these options have very low and high limits. Be very careful about lowering delay time or raising max number of connections too far, as things will break.'
st = ClientGUICommon.BetterStaticText( general, label = label )
st.setObjectName( 'HydrusWarning' )
st.setWordWrap( True )
general.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'network timeout (seconds): ', self._network_timeout ) )
rows.append( ( 'connection error retry wait (seconds): ', self._connection_error_wait_time ) )
rows.append( ( 'serverside bandwidth retry wait (seconds): ', self._serverside_bandwidth_wait_time ) )
rows.append( ( 'Halt new jobs as long as this many network infrastructure errors on their domain (0 for never wait): ', self._domain_network_infrastructure_error_velocity ) )
rows.append( ( 'max number of simultaneous active network jobs: ', self._max_network_jobs ) )
rows.append( ( 'max number of simultaneous active network jobs per domain: ', self._max_network_jobs_per_domain ) )
rows.append( ( 'BUGFIX: verify regular https traffic:', self._verify_regular_https ) )
gridbox = ClientGUICommon.WrapInGrid( general, rows )
general.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
text = 'Enter strings such as "http://ip:port" or "http://user:pass@ip:port" to use for http and https traffic. It should take effect immediately on dialog ok.'
text += os.linesep * 2
text += 'NO PROXY DOES NOT WORK UNLESS YOU HAVE A CUSTOM BUILD OF REQUESTS, SORRY! no_proxy takes the form of comma-separated hosts/domains, just as in curl or the NO_PROXY environment variable. When http and/or https proxies are set, they will not be used for these.'
text += os.linesep * 2
if ClientNetworkingSessions.SOCKS_PROXY_OK:
text += 'It looks like you have socks support! You should also be able to enter (socks4 or) "socks5://ip:port".'
text += os.linesep
text += 'Use socks4a or socks5h to force remote DNS resolution, on the proxy server.'
else:
text += 'It does not look like you have socks support! If you want it, try adding "pysocks" (or "requests[socks]")!'
st = ClientGUICommon.BetterStaticText( proxy_panel, text )
st.setWordWrap( True )
proxy_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'http: ', self._http_proxy ) )
rows.append( ( 'https: ', self._https_proxy ) )
rows.append( ( 'no_proxy: ', self._no_proxy ) )
gridbox = ClientGUICommon.WrapInGrid( proxy_panel, rows )
proxy_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, general, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, proxy_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetBoolean( 'verify_regular_https', self._verify_regular_https.isChecked() )
self._new_options.SetNoneableString( 'http_proxy', self._http_proxy.GetValue() )
self._new_options.SetNoneableString( 'https_proxy', self._https_proxy.GetValue() )
self._new_options.SetNoneableString( 'no_proxy', self._no_proxy.GetValue() )
self._new_options.SetInteger( 'network_timeout', self._network_timeout.value() )
self._new_options.SetInteger( 'connection_error_wait_time', self._connection_error_wait_time.value() )
self._new_options.SetInteger( 'serverside_bandwidth_wait_time', self._serverside_bandwidth_wait_time.value() )
self._new_options.SetInteger( 'max_network_jobs', self._max_network_jobs.value() )
self._new_options.SetInteger( 'max_network_jobs_per_domain', self._max_network_jobs_per_domain.value() )
( number, time_delta ) = self._domain_network_infrastructure_error_velocity.GetValue()
self._new_options.SetInteger( 'domain_network_infrastructure_error_number', number )
self._new_options.SetInteger( 'domain_network_infrastructure_error_time_delta', time_delta )
class _DownloadingPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
gallery_downloader = ClientGUICommon.StaticBox( self, 'gallery downloader' )
gug_key_and_name = HG.client_controller.network_engine.domain_manager.GetDefaultGUGKeyAndName()
self._default_gug = ClientGUIImport.GUGKeyAndNameSelector( gallery_downloader, gug_key_and_name )
self._gallery_page_wait_period_pages = QP.MakeQSpinBox( gallery_downloader, min=1, max=120 )
self._gallery_file_limit = ClientGUICommon.NoneableSpinCtrl( gallery_downloader, none_phrase = 'no limit', min = 1, max = 1000000 )
self._highlight_new_query = QW.QCheckBox( gallery_downloader )
#
subscriptions = ClientGUICommon.StaticBox( self, 'subscriptions' )
self._gallery_page_wait_period_subscriptions = QP.MakeQSpinBox( subscriptions, min=1, max=30 )
self._max_simultaneous_subscriptions = QP.MakeQSpinBox( subscriptions, min=1, max=100 )
self._subscription_file_error_cancel_threshold = ClientGUICommon.NoneableSpinCtrl( subscriptions, min = 1, max = 1000000, unit = 'errors' )
self._subscription_file_error_cancel_threshold.setToolTip( 'This is a simple patch and will be replaced with a better "retry network errors later" system at some point, but is useful to increase if you have subs to unreliable websites.' )
self._process_subs_in_random_order = QW.QCheckBox( subscriptions )
self._process_subs_in_random_order.setToolTip( 'Processing in random order is useful whenever bandwidth is tight, as it stops an \'aardvark\' subscription from always getting first whack at what is available. Otherwise, they will be processed in alphabetical order.' )
checker_options = self._new_options.GetDefaultSubscriptionCheckerOptions()
self._subscription_checker_options = ClientGUIImport.CheckerOptionsButton( subscriptions, checker_options )
#
watchers = ClientGUICommon.StaticBox( self, 'watchers' )
self._watcher_page_wait_period = QP.MakeQSpinBox( watchers, min=1, max=120 )
self._highlight_new_watcher = QW.QCheckBox( watchers )
checker_options = self._new_options.GetDefaultWatcherCheckerOptions()
self._watcher_checker_options = ClientGUIImport.CheckerOptionsButton( watchers, checker_options )
#
misc = ClientGUICommon.StaticBox( self, 'misc' )
self._pause_character = QW.QLineEdit( misc )
self._stop_character = QW.QLineEdit( misc )
self._show_new_on_file_seed_short_summary = QW.QCheckBox( misc )
self._show_deleted_on_file_seed_short_summary = QW.QCheckBox( misc )
if self._new_options.GetBoolean( 'advanced_mode' ):
delay_min = 1
else:
delay_min = 600
self._subscription_network_error_delay = ClientGUITime.TimeDeltaButton( misc, min = delay_min, days = True, hours = True, minutes = True, seconds = True )
self._subscription_other_error_delay = ClientGUITime.TimeDeltaButton( misc, min = delay_min, days = True, hours = True, minutes = True, seconds = True )
self._downloader_network_error_delay = ClientGUITime.TimeDeltaButton( misc, min = delay_min, days = True, hours = True, minutes = True, seconds = True )
#
gallery_page_tt = 'Gallery page fetches are heavy requests with unusual fetch-time requirements. It is important they not wait too long, but it is also useful to throttle them:'
gallery_page_tt += os.linesep * 2
gallery_page_tt += '- So they do not compete with file downloads for bandwidth, leading to very unbalanced 20/4400-type queues.'
gallery_page_tt += os.linesep
gallery_page_tt += '- So you do not get 1000 items in your queue before realising you did not like that tag anyway.'
gallery_page_tt += os.linesep
gallery_page_tt += '- To give servers a break (some gallery pages can be CPU-expensive to generate).'
gallery_page_tt += os.linesep * 2
gallery_page_tt += 'These delays/lots are per-domain.'
gallery_page_tt += os.linesep * 2
gallery_page_tt += 'If you do not understand this stuff, you can just leave it alone.'
self._gallery_page_wait_period_pages.setValue( self._new_options.GetInteger( 'gallery_page_wait_period_pages' ) )
self._gallery_page_wait_period_pages.setToolTip( gallery_page_tt )
self._gallery_file_limit.SetValue( HC.options['gallery_file_limit'] )
self._highlight_new_query.setChecked( self._new_options.GetBoolean( 'highlight_new_query' ) )
self._gallery_page_wait_period_subscriptions.setValue( self._new_options.GetInteger( 'gallery_page_wait_period_subscriptions' ) )
self._gallery_page_wait_period_subscriptions.setToolTip( gallery_page_tt )
self._max_simultaneous_subscriptions.setValue( self._new_options.GetInteger( 'max_simultaneous_subscriptions' ) )
self._subscription_file_error_cancel_threshold.SetValue( self._new_options.GetNoneableInteger( 'subscription_file_error_cancel_threshold' ) )
self._process_subs_in_random_order.setChecked( self._new_options.GetBoolean( 'process_subs_in_random_order' ) )
self._pause_character.setText( self._new_options.GetString( 'pause_character' ) )
self._stop_character.setText( self._new_options.GetString( 'stop_character' ) )
self._show_new_on_file_seed_short_summary.setChecked( self._new_options.GetBoolean( 'show_new_on_file_seed_short_summary' ) )
self._show_deleted_on_file_seed_short_summary.setChecked( self._new_options.GetBoolean( 'show_deleted_on_file_seed_short_summary' ) )
self._watcher_page_wait_period.setValue( self._new_options.GetInteger( 'watcher_page_wait_period' ) )
self._watcher_page_wait_period.setToolTip( gallery_page_tt )
self._highlight_new_watcher.setChecked( self._new_options.GetBoolean( 'highlight_new_watcher' ) )
self._subscription_network_error_delay.SetValue( self._new_options.GetInteger( 'subscription_network_error_delay' ) )
self._subscription_other_error_delay.SetValue( self._new_options.GetInteger( 'subscription_other_error_delay' ) )
self._downloader_network_error_delay.SetValue( self._new_options.GetInteger( 'downloader_network_error_delay' ) )
#
rows = []
rows.append( ( 'Default download source:', self._default_gug ) )
rows.append( ( 'If new query entered and no current highlight, highlight the new query:', self._highlight_new_query ) )
rows.append( ( 'Additional fixed time (in seconds) to wait between gallery page fetches:', self._gallery_page_wait_period_pages ) )
rows.append( ( 'By default, stop searching once this many files are found:', self._gallery_file_limit ) )
gridbox = ClientGUICommon.WrapInGrid( gallery_downloader, rows )
gallery_downloader.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
rows = []
rows.append( ( 'Additional fixed time (in seconds) to wait between gallery page fetches:', self._gallery_page_wait_period_subscriptions ) )
rows.append( ( 'Maximum number of subscriptions that can sync simultaneously:', self._max_simultaneous_subscriptions ) )
rows.append( ( 'If a subscription has this many failed file imports, stop and continue later:', self._subscription_file_error_cancel_threshold ) )
rows.append( ( 'Sync subscriptions in random order:', self._process_subs_in_random_order ) )
gridbox = ClientGUICommon.WrapInGrid( subscriptions, rows )
subscriptions.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
subscriptions.Add( self._subscription_checker_options, CC.FLAGS_EXPAND_PERPENDICULAR )
#
rows = []
rows.append( ( 'Additional fixed time (in seconds) to wait between watcher checks:', self._watcher_page_wait_period ) )
rows.append( ( 'If new watcher entered and no current highlight, highlight the new watcher:', self._highlight_new_watcher ) )
gridbox = ClientGUICommon.WrapInGrid( watchers, rows )
watchers.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
watchers.Add( self._watcher_checker_options, CC.FLAGS_EXPAND_PERPENDICULAR )
#
rows = []
rows.append( ( 'Pause character:', self._pause_character ) )
rows.append( ( 'Stop character:', self._stop_character ) )
rows.append( ( 'Show a \'N\' (for \'new\') count on short file import summaries:', self._show_new_on_file_seed_short_summary ) )
rows.append( ( 'Show a \'D\' (for \'deleted\') count on short file import summaries:', self._show_deleted_on_file_seed_short_summary ) )
rows.append( ( 'Delay time on a gallery/watcher network error:', self._downloader_network_error_delay ) )
rows.append( ( 'Delay time on a subscription network error:', self._subscription_network_error_delay ) )
rows.append( ( 'Delay time on a subscription other error:', self._subscription_other_error_delay ) )
gridbox = ClientGUICommon.WrapInGrid( misc, rows )
misc.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, gallery_downloader, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, subscriptions, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, watchers, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, misc, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
HG.client_controller.network_engine.domain_manager.SetDefaultGUGKeyAndName( self._default_gug.GetValue() )
self._new_options.SetInteger( 'gallery_page_wait_period_pages', self._gallery_page_wait_period_pages.value() )
HC.options[ 'gallery_file_limit' ] = self._gallery_file_limit.GetValue()
self._new_options.SetBoolean( 'highlight_new_query', self._highlight_new_query.isChecked() )
self._new_options.SetInteger( 'gallery_page_wait_period_subscriptions', self._gallery_page_wait_period_subscriptions.value() )
self._new_options.SetInteger( 'max_simultaneous_subscriptions', self._max_simultaneous_subscriptions.value() )
self._new_options.SetNoneableInteger( 'subscription_file_error_cancel_threshold', self._subscription_file_error_cancel_threshold.GetValue() )
self._new_options.SetBoolean( 'process_subs_in_random_order', self._process_subs_in_random_order.isChecked() )
self._new_options.SetInteger( 'watcher_page_wait_period', self._watcher_page_wait_period.value() )
self._new_options.SetBoolean( 'highlight_new_watcher', self._highlight_new_watcher.isChecked() )
self._new_options.SetDefaultWatcherCheckerOptions( self._watcher_checker_options.GetValue() )
self._new_options.SetDefaultSubscriptionCheckerOptions( self._subscription_checker_options.GetValue() )
self._new_options.SetString( 'pause_character', self._pause_character.text() )
self._new_options.SetString( 'stop_character', self._stop_character.text() )
self._new_options.SetBoolean( 'show_new_on_file_seed_short_summary', self._show_new_on_file_seed_short_summary.isChecked() )
self._new_options.SetBoolean( 'show_deleted_on_file_seed_short_summary', self._show_deleted_on_file_seed_short_summary.isChecked() )
self._new_options.SetInteger( 'subscription_network_error_delay', self._subscription_network_error_delay.GetValue() )
self._new_options.SetInteger( 'subscription_other_error_delay', self._subscription_other_error_delay.GetValue() )
self._new_options.SetInteger( 'downloader_network_error_delay', self._downloader_network_error_delay.GetValue() )
class _DuplicatesPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
weights_panel = ClientGUICommon.StaticBox( self, 'duplicate filter comparison score weights' )
self._duplicate_comparison_score_higher_jpeg_quality = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_much_higher_jpeg_quality = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_higher_filesize = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_much_higher_filesize = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_higher_resolution = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_much_higher_resolution = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_more_tags = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_older = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_nicer_ratio = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_nicer_ratio.setToolTip( 'For instance, 16:9 vs 640:357.')
self._duplicate_filter_max_batch_size = QP.MakeQSpinBox( self, min = 10, max = 1024 )
#
self._duplicate_comparison_score_higher_jpeg_quality.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_higher_jpeg_quality' ) )
self._duplicate_comparison_score_much_higher_jpeg_quality.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_much_higher_jpeg_quality' ) )
self._duplicate_comparison_score_higher_filesize.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_higher_filesize' ) )
self._duplicate_comparison_score_much_higher_filesize.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_much_higher_filesize' ) )
self._duplicate_comparison_score_higher_resolution.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_higher_resolution' ) )
self._duplicate_comparison_score_much_higher_resolution.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_much_higher_resolution' ) )
self._duplicate_comparison_score_more_tags.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_more_tags' ) )
self._duplicate_comparison_score_older.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_older' ) )
self._duplicate_comparison_score_nicer_ratio.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_nicer_ratio' ) )
self._duplicate_filter_max_batch_size.setValue( self._new_options.GetInteger( 'duplicate_filter_max_batch_size' ) )
#
rows = []
rows.append( ( 'Score for jpeg with non-trivially higher jpeg quality:', self._duplicate_comparison_score_higher_jpeg_quality ) )
rows.append( ( 'Score for jpeg with significantly higher jpeg quality:', self._duplicate_comparison_score_much_higher_jpeg_quality ) )
rows.append( ( 'Score for file with non-trivially higher filesize:', self._duplicate_comparison_score_higher_filesize ) )
rows.append( ( 'Score for file with significantly higher filesize:', self._duplicate_comparison_score_much_higher_filesize ) )
rows.append( ( 'Score for file with higher resolution (as num pixels):', self._duplicate_comparison_score_higher_resolution ) )
rows.append( ( 'Score for file with significantly higher resolution (as num pixels):', self._duplicate_comparison_score_much_higher_resolution ) )
rows.append( ( 'Score for file with more tags:', self._duplicate_comparison_score_more_tags ) )
rows.append( ( 'Score for file with non-trivially earlier import time:', self._duplicate_comparison_score_older ) )
rows.append( ( 'Score for file with \'nicer\' resolution ratio:', self._duplicate_comparison_score_nicer_ratio ) )
gridbox = ClientGUICommon.WrapInGrid( weights_panel, rows )
label = 'When processing potential duplicate pairs in the duplicate filter, the client tries to present the \'best\' file first. It judges the two files on a variety of potential differences, each with a score. The file with the greatest total score is presented first. Here you can tinker with these scores.'
label += os.linesep * 2
label += 'I recommend you leave all these as positive numbers, but if you wish, you can set a negative number to reduce the score.'
st = ClientGUICommon.BetterStaticText( weights_panel, label )
st.setWordWrap( True )
weights_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
weights_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, weights_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'Max size of duplicate filter pair batches:', self._duplicate_filter_max_batch_size ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetInteger( 'duplicate_comparison_score_higher_jpeg_quality', self._duplicate_comparison_score_higher_jpeg_quality.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_much_higher_jpeg_quality', self._duplicate_comparison_score_much_higher_jpeg_quality.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_higher_filesize', self._duplicate_comparison_score_higher_filesize.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_much_higher_filesize', self._duplicate_comparison_score_much_higher_filesize.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_higher_resolution', self._duplicate_comparison_score_higher_resolution.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_much_higher_resolution', self._duplicate_comparison_score_much_higher_resolution.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_more_tags', self._duplicate_comparison_score_more_tags.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_older', self._duplicate_comparison_score_older.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_nicer_ratio', self._duplicate_comparison_score_nicer_ratio.value() )
self._new_options.SetInteger( 'duplicate_filter_max_batch_size', self._duplicate_filter_max_batch_size.value() )
class _ExternalProgramsPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._new_options = HG.client_controller.new_options
mime_panel = ClientGUICommon.StaticBox( self, '\'open externally\' launch paths' )
self._web_browser_path = QW.QLineEdit( mime_panel )
self._mime_launch_listctrl = ClientGUIListCtrl.BetterListCtrl( mime_panel, CGLC.COLUMN_LIST_EXTERNAL_PROGRAMS.ID, 15, self._ConvertMimeToListCtrlTuples, activation_callback = self._EditMimeLaunch )
#
web_browser_path = self._new_options.GetNoneableString( 'web_browser_path' )
if web_browser_path is not None:
self._web_browser_path.setText( web_browser_path )
for mime in HC.SEARCHABLE_MIMES:
launch_path = self._new_options.GetMimeLaunch( mime )
self._mime_launch_listctrl.AddDatas( [ ( mime, launch_path ) ] )
self._mime_launch_listctrl.Sort()
#
vbox = QP.VBoxLayout()
text = 'Setting a specific web browser path here--like \'C:\\program files\\firefox\\firefox.exe "%path%"\'--can help with the \'share->open->in web browser\' command, which is buggy working with OS defaults, particularly on Windows. It also fixes #anchors, which are dropped in some OSes using default means. Use the same %path% format for the \'open externally\' commands below.'
st = ClientGUICommon.BetterStaticText( mime_panel, text )
st.setWordWrap( True )
mime_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'Manual web browser launch path: ', self._web_browser_path ) )
gridbox = ClientGUICommon.WrapInGrid( mime_panel, rows )
mime_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
mime_panel.Add( self._mime_launch_listctrl, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, mime_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
def _ConvertMimeToListCtrlTuples( self, data ):
( mime, launch_path ) = data
pretty_mime = HC.mime_string_lookup[ mime ]
if launch_path is None:
pretty_launch_path = 'default: {}'.format( HydrusPaths.GetDefaultLaunchPath() )
else:
pretty_launch_path = launch_path
display_tuple = ( pretty_mime, pretty_launch_path )
sort_tuple = display_tuple
return ( display_tuple, sort_tuple )
def _EditMimeLaunch( self ):
for ( mime, launch_path ) in self._mime_launch_listctrl.GetData( only_selected = True ):
message = 'Enter the new launch path for {}'.format( HC.mime_string_lookup[ mime ] )
message += os.linesep * 2
message += 'Hydrus will insert the file\'s full path wherever you put %path%, even multiple times!'
message += os.linesep * 2
message += 'Set as blank to reset to default.'
if launch_path is None:
default = 'program "%path%"'
else:
default = launch_path
with ClientGUIDialogs.DialogTextEntry( self, message, default = default, allow_blank = True ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
new_launch_path = dlg.GetValue()
if new_launch_path == '':
new_launch_path = None
if new_launch_path not in ( launch_path, default ):
self._mime_launch_listctrl.DeleteDatas( [ ( mime, launch_path ) ] )
self._mime_launch_listctrl.AddDatas( [ ( mime, new_launch_path ) ] )
else:
break
self._mime_launch_listctrl.Sort()
def UpdateOptions( self ):
web_browser_path = self._web_browser_path.text()
if web_browser_path == '':
web_browser_path = None
self._new_options.SetNoneableString( 'web_browser_path', web_browser_path )
for ( mime, launch_path ) in self._mime_launch_listctrl.GetData():
self._new_options.SetMimeLaunch( mime, launch_path )
class _FilesAndTrashPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._new_options = HG.client_controller.new_options
self._export_location = QP.DirPickerCtrl( self )
self._prefix_hash_when_copying = QW.QCheckBox( self )
self._prefix_hash_when_copying.setToolTip( 'If you often paste hashes into boorus, check this to automatically prefix with the type, like "md5:2496dabcbd69e3c56a5d8caabb7acde5".' )
self._delete_to_recycle_bin = QW.QCheckBox( self )
self._confirm_trash = QW.QCheckBox( self )
self._confirm_archive = QW.QCheckBox( self )
self._remove_filtered_files = QW.QCheckBox( self )
self._remove_trashed_files = QW.QCheckBox( self )
self._trash_max_age = ClientGUICommon.NoneableSpinCtrl( self, '', none_phrase = 'no age limit', min = 0, max = 8640 )
self._trash_max_size = ClientGUICommon.NoneableSpinCtrl( self, '', none_phrase = 'no size limit', min = 0, max = 20480 )
delete_lock_panel = ClientGUICommon.StaticBox( self, 'delete lock' )
self._delete_lock_for_archived_files = QW.QCheckBox( delete_lock_panel )
advanced_file_deletion_panel = ClientGUICommon.StaticBox( self, 'advanced file deletion and custom reasons' )
self._use_advanced_file_deletion_dialog = QW.QCheckBox( advanced_file_deletion_panel )
self._use_advanced_file_deletion_dialog.setToolTip( 'If this is set, the client will present a more complicated file deletion confirmation dialog that will permit you to set your own deletion reason and perform \'clean\' deletes that leave no deletion record (making later re-import easier).' )
self._remember_last_advanced_file_deletion_special_action = QW.QCheckBox( advanced_file_deletion_panel )
self._remember_last_advanced_file_deletion_special_action.setToolTip( 'This will try to remember and restore the last action you set, whether that was trash, physical delete, or physical delete and clear history.')
self._remember_last_advanced_file_deletion_reason = QW.QCheckBox( advanced_file_deletion_panel )
self._remember_last_advanced_file_deletion_reason.setToolTip( 'This will remember and restore the last reason you set for a delete.' )
self._advanced_file_deletion_reasons = ClientGUIListBoxes.QueueListBox( advanced_file_deletion_panel, 5, str, add_callable = self._AddAFDR, edit_callable = self._EditAFDR )
#
if HC.options[ 'export_path' ] is not None:
abs_path = HydrusPaths.ConvertPortablePathToAbsPath( HC.options[ 'export_path' ] )
if abs_path is not None:
self._export_location.SetPath( abs_path )
self._prefix_hash_when_copying.setChecked( self._new_options.GetBoolean( 'prefix_hash_when_copying' ) )
self._delete_to_recycle_bin.setChecked( HC.options[ 'delete_to_recycle_bin' ] )
self._confirm_trash.setChecked( HC.options[ 'confirm_trash' ] )
self._confirm_archive.setChecked( HC.options[ 'confirm_archive' ] )
self._remove_filtered_files.setChecked( HC.options[ 'remove_filtered_files' ] )
self._remove_trashed_files.setChecked( HC.options[ 'remove_trashed_files' ] )
self._trash_max_age.SetValue( HC.options[ 'trash_max_age' ] )
self._trash_max_size.SetValue( HC.options[ 'trash_max_size' ] )
self._delete_lock_for_archived_files.setChecked( self._new_options.GetBoolean( 'delete_lock_for_archived_files' ) )
self._use_advanced_file_deletion_dialog.setChecked( self._new_options.GetBoolean( 'use_advanced_file_deletion_dialog' ) )
self._use_advanced_file_deletion_dialog.clicked.connect( self._UpdateAdvancedControls )
self._remember_last_advanced_file_deletion_special_action.setChecked( HG.client_controller.new_options.GetBoolean( 'remember_last_advanced_file_deletion_special_action' ) )
self._remember_last_advanced_file_deletion_reason.setChecked( HG.client_controller.new_options.GetBoolean( 'remember_last_advanced_file_deletion_reason' ) )
self._advanced_file_deletion_reasons.AddDatas( self._new_options.GetStringList( 'advanced_file_deletion_reasons' ) )
self._UpdateAdvancedControls()
#
vbox = QP.VBoxLayout()
text = 'If you set the default export directory blank, the client will use \'hydrus_export\' under the current user\'s home directory.'
QP.AddToLayout( vbox, ClientGUICommon.BetterStaticText(self,text), CC.FLAGS_CENTER )
rows = []
rows.append( ( 'When copying a file hashes, prefix with booru-friendly hash type: ', self._prefix_hash_when_copying ) )
rows.append( ( 'Confirm sending files to trash: ', self._confirm_trash ) )
rows.append( ( 'Confirm sending more than one file to archive or inbox: ', self._confirm_archive ) )
rows.append( ( 'When deleting files or folders, send them to the OS\'s recycle bin: ', self._delete_to_recycle_bin ) )
rows.append( ( 'Remove files from view when they are filtered: ', self._remove_filtered_files ) )
rows.append( ( 'Remove files from view when they are sent to the trash: ', self._remove_trashed_files ) )
rows.append( ( 'Number of hours a file can be in the trash before being deleted: ', self._trash_max_age ) )
rows.append( ( 'Maximum size of trash (MB): ', self._trash_max_size ) )
rows.append( ( 'Default export directory: ', self._export_location ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
rows = []
rows.append( ( 'Do not permit archived files to be trashed or deleted: ', self._delete_lock_for_archived_files ) )
gridbox = ClientGUICommon.WrapInGrid( delete_lock_panel, rows )
delete_lock_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, delete_lock_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
rows = []
rows.append( ( 'Use the advanced file deletion dialog: ', self._use_advanced_file_deletion_dialog ) )
rows.append( ( 'Remember the last action: ', self._remember_last_advanced_file_deletion_special_action ) )
rows.append( ( 'Remember the last reason: ', self._remember_last_advanced_file_deletion_reason ) )
gridbox = ClientGUICommon.WrapInGrid( advanced_file_deletion_panel, rows )
advanced_file_deletion_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
advanced_file_deletion_panel.Add( self._advanced_file_deletion_reasons, CC.FLAGS_EXPAND_BOTH_WAYS )
#
QP.AddToLayout( vbox, advanced_file_deletion_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
def _AddAFDR( self ):
reason = 'I do not like the file.'
return self._EditAFDR( reason )
def _EditAFDR( self, reason ):
with ClientGUIDialogs.DialogTextEntry( self, 'enter the reason', default = reason, allow_blank = False ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
reason = dlg.GetValue()
return reason
else:
raise HydrusExceptions.VetoException()
def _UpdateAdvancedControls( self ):
advanced_enabled = self._use_advanced_file_deletion_dialog.isChecked()
self._remember_last_advanced_file_deletion_special_action.setEnabled( advanced_enabled )
self._remember_last_advanced_file_deletion_reason.setEnabled( advanced_enabled )
self._advanced_file_deletion_reasons.setEnabled( advanced_enabled )
def UpdateOptions( self ):
HC.options[ 'export_path' ] = HydrusPaths.ConvertAbsPathToPortablePath( self._export_location.GetPath() )
self._new_options.SetBoolean( 'prefix_hash_when_copying', self._prefix_hash_when_copying.isChecked() )
HC.options[ 'delete_to_recycle_bin' ] = self._delete_to_recycle_bin.isChecked()
HC.options[ 'confirm_trash' ] = self._confirm_trash.isChecked()
HC.options[ 'confirm_archive' ] = self._confirm_archive.isChecked()
HC.options[ 'remove_filtered_files' ] = self._remove_filtered_files.isChecked()
HC.options[ 'remove_trashed_files' ] = self._remove_trashed_files.isChecked()
HC.options[ 'trash_max_age' ] = self._trash_max_age.GetValue()
HC.options[ 'trash_max_size' ] = self._trash_max_size.GetValue()
self._new_options.SetBoolean( 'delete_lock_for_archived_files', self._delete_lock_for_archived_files.isChecked() )
self._new_options.SetBoolean( 'use_advanced_file_deletion_dialog', self._use_advanced_file_deletion_dialog.isChecked() )
self._new_options.SetStringList( 'advanced_file_deletion_reasons', self._advanced_file_deletion_reasons.GetData() )
HG.client_controller.new_options.SetBoolean( 'remember_last_advanced_file_deletion_special_action', self._remember_last_advanced_file_deletion_special_action.isChecked() )
HG.client_controller.new_options.SetBoolean( 'remember_last_advanced_file_deletion_reason', self._remember_last_advanced_file_deletion_reason.isChecked() )
class _FileViewingStatisticsPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._new_options = HG.client_controller.new_options
self._file_viewing_statistics_active = QW.QCheckBox( self )
self._file_viewing_statistics_active_on_dupe_filter = QW.QCheckBox( self )
self._file_viewing_statistics_media_min_time = ClientGUICommon.NoneableSpinCtrl( self )
self._file_viewing_statistics_media_max_time = ClientGUICommon.NoneableSpinCtrl( self )
self._file_viewing_statistics_preview_min_time = ClientGUICommon.NoneableSpinCtrl( self )
self._file_viewing_statistics_preview_max_time = ClientGUICommon.NoneableSpinCtrl( self )
self._file_viewing_stats_menu_display = ClientGUICommon.BetterChoice( self )
self._file_viewing_stats_menu_display.addItem( 'do not show', CC.FILE_VIEWING_STATS_MENU_DISPLAY_NONE )
self._file_viewing_stats_menu_display.addItem( 'show media', CC.FILE_VIEWING_STATS_MENU_DISPLAY_MEDIA_ONLY )
self._file_viewing_stats_menu_display.addItem( 'show media, and put preview in a submenu', CC.FILE_VIEWING_STATS_MENU_DISPLAY_MEDIA_AND_PREVIEW_IN_SUBMENU )
self._file_viewing_stats_menu_display.addItem( 'show media and preview in two lines', CC.FILE_VIEWING_STATS_MENU_DISPLAY_MEDIA_AND_PREVIEW_STACKED )
self._file_viewing_stats_menu_display.addItem( 'show media and preview combined', CC.FILE_VIEWING_STATS_MENU_DISPLAY_MEDIA_AND_PREVIEW_SUMMED )
#
self._file_viewing_statistics_active.setChecked( self._new_options.GetBoolean( 'file_viewing_statistics_active' ) )
self._file_viewing_statistics_active_on_dupe_filter.setChecked( self._new_options.GetBoolean( 'file_viewing_statistics_active_on_dupe_filter' ) )
self._file_viewing_statistics_media_min_time.SetValue( self._new_options.GetNoneableInteger( 'file_viewing_statistics_media_min_time' ) )
self._file_viewing_statistics_media_max_time.SetValue( self._new_options.GetNoneableInteger( 'file_viewing_statistics_media_max_time' ) )
self._file_viewing_statistics_preview_min_time.SetValue( self._new_options.GetNoneableInteger( 'file_viewing_statistics_preview_min_time' ) )
self._file_viewing_statistics_preview_max_time.SetValue( self._new_options.GetNoneableInteger( 'file_viewing_statistics_preview_max_time' ) )
self._file_viewing_stats_menu_display.SetValue( self._new_options.GetInteger( 'file_viewing_stats_menu_display' ) )
#
vbox = QP.VBoxLayout()
rows = []
rows.append( ( 'Enable file viewing statistics tracking?:', self._file_viewing_statistics_active ) )
rows.append( ( 'Enable file viewing statistics tracking on the duplicate filter?:', self._file_viewing_statistics_active_on_dupe_filter ) )
rows.append( ( 'Min time to view on media viewer to count as a view (seconds):', self._file_viewing_statistics_media_min_time ) )
rows.append( ( 'Cap any view on the media viewer to this maximum time (seconds):', self._file_viewing_statistics_media_max_time ) )
rows.append( ( 'Min time to view on preview viewer to count as a view (seconds):', self._file_viewing_statistics_preview_min_time ) )
rows.append( ( 'Cap any view on the preview viewer to this maximum time (seconds):', self._file_viewing_statistics_preview_max_time ) )
rows.append( ( 'Show media/preview viewing stats on media right-click menus?:', self._file_viewing_stats_menu_display ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetBoolean( 'file_viewing_statistics_active', self._file_viewing_statistics_active.isChecked() )
self._new_options.SetBoolean( 'file_viewing_statistics_active_on_dupe_filter', self._file_viewing_statistics_active_on_dupe_filter.isChecked() )
self._new_options.SetNoneableInteger( 'file_viewing_statistics_media_min_time', self._file_viewing_statistics_media_min_time.GetValue() )
self._new_options.SetNoneableInteger( 'file_viewing_statistics_media_max_time', self._file_viewing_statistics_media_max_time.GetValue() )
self._new_options.SetNoneableInteger( 'file_viewing_statistics_preview_min_time', self._file_viewing_statistics_preview_min_time.GetValue() )
self._new_options.SetNoneableInteger( 'file_viewing_statistics_preview_max_time', self._file_viewing_statistics_preview_max_time.GetValue() )
self._new_options.SetInteger( 'file_viewing_stats_menu_display', self._file_viewing_stats_menu_display.GetValue() )
class _GUIPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._main_gui_panel = ClientGUICommon.StaticBox( self, 'main window' )
self._app_display_name = QW.QLineEdit( self._main_gui_panel )
self._app_display_name.setToolTip( 'This is placed in every window title, with current version name. Rename if you want to personalise or differentiate.' )
self._confirm_client_exit = QW.QCheckBox( self._main_gui_panel )
self._activate_window_on_tag_search_page_activation = QW.QCheckBox( self._main_gui_panel )
tt = 'Middle-clicking one or more tags in a taglist will cause the creation of a new search page for those tags. If you do this from the media viewer or a child manage tags dialog, do you want to switch immediately to the main gui?'
self._activate_window_on_tag_search_page_activation.setToolTip( tt )
#
self._misc_panel = ClientGUICommon.StaticBox( self, 'misc' )
self._always_show_iso_time = QW.QCheckBox( self._misc_panel )
tt = 'In many places across the program (typically import status lists), the client will state a timestamp as "5 days ago". If you would prefer a standard ISO string, like "2018-03-01 12:40:23", check this.'
self._always_show_iso_time.setToolTip( tt )
self._human_bytes_sig_figs = QP.MakeQSpinBox( self._misc_panel, min = 1, max = 6 )
self._human_bytes_sig_figs.setToolTip( 'When the program presents a bytes size above 1KB, like 21.3KB or 4.11GB, how many total digits do we want in the number? 2 or 3 is best.')
self._discord_dnd_fix = QW.QCheckBox( self._misc_panel )
self._discord_dnd_fix.setToolTip( 'This makes small file drag-and-drops a little laggier in exchange for discord support.' )
self._discord_dnd_filename_pattern = QW.QLineEdit( self._misc_panel )
self._discord_dnd_filename_pattern.setToolTip( 'When discord DnD is enabled, this will use this export phrase to rename your files. If no filename can be generated, hash will be used instead.' )
self._secret_discord_dnd_fix = QW.QCheckBox( self._misc_panel )
self._secret_discord_dnd_fix.setToolTip( 'This saves the lag but is potentially dangerous, as it (may) treat the from-db-files-drag as a move rather than a copy and hence only works when the drop destination will not consume the files. It requires an additional secret Alternate key to unlock.' )
self._do_macos_debug_dialog_menus = QW.QCheckBox( self._misc_panel )
self._do_macos_debug_dialog_menus.setToolTip( 'There is a bug in Big Sur Qt regarding interacting with some menus in dialogs. The menus show but cannot be clicked. This shows the menu items in a debug dialog instead.' )
self._use_qt_file_dialogs = QW.QCheckBox( self._misc_panel )
self._use_qt_file_dialogs.setToolTip( 'If you get crashes opening file/directory dialogs, try this.' )
#
frame_locations_panel = ClientGUICommon.StaticBox( self, 'frame locations' )
self._frame_locations = ClientGUIListCtrl.BetterListCtrl( frame_locations_panel, CGLC.COLUMN_LIST_FRAME_LOCATIONS.ID, 15, data_to_tuples_func = lambda x: (self._GetPrettyFrameLocationInfo( x ), self._GetPrettyFrameLocationInfo( x )), activation_callback = self.EditFrameLocations )
self._frame_locations_edit_button = QW.QPushButton( 'edit', frame_locations_panel )
self._frame_locations_edit_button.clicked.connect( self.EditFrameLocations )
#
self._new_options = HG.client_controller.new_options
self._app_display_name.setText( self._new_options.GetString( 'app_display_name' ) )
self._confirm_client_exit.setChecked( HC.options[ 'confirm_client_exit' ] )
self._activate_window_on_tag_search_page_activation.setChecked( self._new_options.GetBoolean( 'activate_window_on_tag_search_page_activation' ) )
self._always_show_iso_time.setChecked( self._new_options.GetBoolean( 'always_show_iso_time' ) )
self._human_bytes_sig_figs.setValue( self._new_options.GetInteger( 'human_bytes_sig_figs' ) )
self._discord_dnd_fix.setChecked( self._new_options.GetBoolean( 'discord_dnd_fix' ) )
self._discord_dnd_filename_pattern.setText( self._new_options.GetString( 'discord_dnd_filename_pattern' ) )
self._secret_discord_dnd_fix.setChecked( self._new_options.GetBoolean( 'secret_discord_dnd_fix' ) )
self._do_macos_debug_dialog_menus.setChecked( self._new_options.GetBoolean( 'do_macos_debug_dialog_menus' ) )
self._use_qt_file_dialogs.setChecked( self._new_options.GetBoolean( 'use_qt_file_dialogs' ) )
for ( name, info ) in self._new_options.GetFrameLocations():
listctrl_list = QP.ListsToTuples( [ name ] + list( info ) )
self._frame_locations.AddDatas( ( listctrl_list, ) )
#self._frame_locations.SortListItems( col = 0 )
#
rows = []
rows.append( ( 'Application display name: ', self._app_display_name ) )
rows.append( ( 'Confirm client exit: ', self._confirm_client_exit ) )
rows.append( ( 'Switch to main window when opening tag search page from media viewer: ', self._activate_window_on_tag_search_page_activation ) )
gridbox = ClientGUICommon.WrapInGrid( self._main_gui_panel, rows )
self._main_gui_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
rows = []
rows.append( ( 'Prefer ISO time ("2018-03-01 12:40:23") to "5 days ago": ', self._always_show_iso_time ) )
rows.append( ( 'BUGFIX: Discord file drag-and-drop fix (works for <=25, <200MB file DnDs): ', self._discord_dnd_fix ) )
rows.append( ( 'Discord drag-and-drop filename pattern: ', self._discord_dnd_filename_pattern ) )
rows.append( ( 'Export pattern shortcuts: ', ClientGUICommon.ExportPatternButton( self ) ) )
rows.append( ( 'EXPERIMENTAL: Bytes strings >1KB pseudo significant figures: ', self._human_bytes_sig_figs ) )
rows.append( ( 'EXPERIMENTAL BUGFIX: Secret discord file drag-and-drop fix: ', self._secret_discord_dnd_fix ) )
rows.append( ( 'BUGFIX: If on macOS, show dialog menus in a debug menu: ', self._do_macos_debug_dialog_menus ) )
rows.append( ( 'ANTI-CRASH BUGFIX: Use Qt file/directory selection dialogs, rather than OS native: ', self._use_qt_file_dialogs ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
self._misc_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
text = 'Here you can override the current and default values for many frame and dialog sizing and positioning variables.'
text += os.linesep
text += 'This is an advanced control. If you aren\'t confident of what you are doing here, come back later!'
frame_locations_panel.Add( QW.QLabel( text, frame_locations_panel ), CC.FLAGS_EXPAND_PERPENDICULAR )
frame_locations_panel.Add( self._frame_locations, CC.FLAGS_EXPAND_BOTH_WAYS )
frame_locations_panel.Add( self._frame_locations_edit_button, CC.FLAGS_ON_RIGHT )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._main_gui_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, self._misc_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, frame_locations_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
def _GetPrettyFrameLocationInfo( self, listctrl_list ):
pretty_listctrl_list = []
for item in listctrl_list:
pretty_listctrl_list.append( str( item ) )
return pretty_listctrl_list
def EditFrameLocations( self ):
for listctrl_list in self._frame_locations.GetData( only_selected = True ):
title = 'set frame location information'
with ClientGUITopLevelWindowsPanels.DialogEdit( self, title ) as dlg:
panel = ClientGUIScrolledPanelsEdit.EditFrameLocationPanel( dlg, listctrl_list )
dlg.SetPanel( panel )
if dlg.exec() == QW.QDialog.Accepted:
new_listctrl_list = panel.GetValue()
self._frame_locations.ReplaceData( listctrl_list, new_listctrl_list )
def UpdateOptions( self ):
HC.options[ 'confirm_client_exit' ] = self._confirm_client_exit.isChecked()
self._new_options.SetBoolean( 'always_show_iso_time', self._always_show_iso_time.isChecked() )
self._new_options.SetInteger( 'human_bytes_sig_figs', self._human_bytes_sig_figs.value() )
self._new_options.SetBoolean( 'activate_window_on_tag_search_page_activation', self._activate_window_on_tag_search_page_activation.isChecked() )
app_display_name = self._app_display_name.text()
if app_display_name == '':
app_display_name = 'hydrus client'
self._new_options.SetString( 'app_display_name', app_display_name )
self._new_options.SetBoolean( 'discord_dnd_fix', self._discord_dnd_fix.isChecked() )
self._new_options.SetString( 'discord_dnd_filename_pattern', self._discord_dnd_filename_pattern.text() )
self._new_options.SetBoolean( 'secret_discord_dnd_fix', self._secret_discord_dnd_fix.isChecked() )
self._new_options.SetBoolean( 'do_macos_debug_dialog_menus', self._do_macos_debug_dialog_menus.isChecked() )
self._new_options.SetBoolean( 'use_qt_file_dialogs', self._use_qt_file_dialogs.isChecked() )
for listctrl_list in self._frame_locations.GetData():
( name, remember_size, remember_position, last_size, last_position, default_gravity, default_position, maximised, fullscreen ) = listctrl_list
self._new_options.SetFrameLocation( name, remember_size, remember_position, last_size, last_position, default_gravity, default_position, maximised, fullscreen )
class _GUIPagesPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
self._sessions_panel = ClientGUICommon.StaticBox( self, 'sessions' )
self._default_gui_session = QW.QComboBox( self._sessions_panel )
self._last_session_save_period_minutes = QP.MakeQSpinBox( self._sessions_panel, min = 1, max = 1440 )
self._only_save_last_session_during_idle = QW.QCheckBox( self._sessions_panel )
self._only_save_last_session_during_idle.setToolTip( 'This is useful if you usually have a very large session (200,000+ files/import items open) and a client that is always on.' )
self._number_of_gui_session_backups = QP.MakeQSpinBox( self._sessions_panel, min = 1, max = 32 )
self._number_of_gui_session_backups.setToolTip( 'The client keeps multiple rolling backups of your gui sessions. If you have very large sessions, you might like to reduce this number.' )
self._show_session_size_warnings = QW.QCheckBox( self._sessions_panel )
self._show_session_size_warnings.setToolTip( 'This will give you a once-per-boot warning popup if your active session contains more than 10M weight.' )
#
self._pages_panel = ClientGUICommon.StaticBox( self, 'pages' )
self._default_new_page_goes = ClientGUICommon.BetterChoice( self._pages_panel )
for value in [ CC.NEW_PAGE_GOES_FAR_LEFT, CC.NEW_PAGE_GOES_LEFT_OF_CURRENT, CC.NEW_PAGE_GOES_RIGHT_OF_CURRENT, CC.NEW_PAGE_GOES_FAR_RIGHT ]:
self._default_new_page_goes.addItem( CC.new_page_goes_string_lookup[ value], value )
self._notebook_tab_alignment = ClientGUICommon.BetterChoice( self._pages_panel )
for value in [ CC.DIRECTION_UP, CC.DIRECTION_LEFT, CC.DIRECTION_RIGHT, CC.DIRECTION_DOWN ]:
self._notebook_tab_alignment.addItem( CC.directions_alignment_string_lookup[ value ], value )
self._total_pages_warning = QP.MakeQSpinBox( self._pages_panel, min=5, max=500 )
self._reverse_page_shift_drag_behaviour = QW.QCheckBox( self._pages_panel )
self._reverse_page_shift_drag_behaviour.setToolTip( 'By default, holding down shift when you drop off a page tab means the client will not \'chase\' the page tab. This makes this behaviour default, with shift-drop meaning to chase.' )
#
self._page_names_panel = ClientGUICommon.StaticBox( self._pages_panel, 'page tab names' )
self._max_page_name_chars = QP.MakeQSpinBox( self._page_names_panel, min=1, max=256 )
self._elide_page_tab_names = QW.QCheckBox( self._page_names_panel )
self._page_file_count_display = ClientGUICommon.BetterChoice( self._page_names_panel )
for display_type in ( CC.PAGE_FILE_COUNT_DISPLAY_ALL, CC.PAGE_FILE_COUNT_DISPLAY_ONLY_IMPORTERS, CC.PAGE_FILE_COUNT_DISPLAY_NONE ):
self._page_file_count_display.addItem( CC.page_file_count_display_string_lookup[ display_type], display_type )
self._import_page_progress_display = QW.QCheckBox( self._page_names_panel )
#
self._controls_panel = ClientGUICommon.StaticBox( self, 'controls' )
self._set_search_focus_on_page_change = QW.QCheckBox( self._controls_panel )
self._hide_preview = QW.QCheckBox( self._controls_panel )
#
gui_session_names = HG.client_controller.Read( 'serialisable_names', HydrusSerialisable.SERIALISABLE_TYPE_GUI_SESSION_CONTAINER )
if CC.LAST_SESSION_SESSION_NAME not in gui_session_names:
gui_session_names.insert( 0, CC.LAST_SESSION_SESSION_NAME )
self._default_gui_session.addItem( 'just a blank page', None )
for name in gui_session_names:
self._default_gui_session.addItem( name, name )
try:
QP.SetStringSelection( self._default_gui_session, HC.options['default_gui_session'] )
except:
self._default_gui_session.setCurrentIndex( 0 )
self._last_session_save_period_minutes.setValue( self._new_options.GetInteger( 'last_session_save_period_minutes' ) )
self._only_save_last_session_during_idle.setChecked( self._new_options.GetBoolean( 'only_save_last_session_during_idle' ) )
self._number_of_gui_session_backups.setValue( self._new_options.GetInteger( 'number_of_gui_session_backups' ) )
self._show_session_size_warnings.setChecked( self._new_options.GetBoolean( 'show_session_size_warnings' ) )
self._default_new_page_goes.SetValue( self._new_options.GetInteger( 'default_new_page_goes' ) )
self._notebook_tab_alignment.SetValue( self._new_options.GetInteger( 'notebook_tab_alignment' ) )
self._max_page_name_chars.setValue( self._new_options.GetInteger( 'max_page_name_chars' ) )
self._elide_page_tab_names.setChecked( self._new_options.GetBoolean( 'elide_page_tab_names' ) )
self._page_file_count_display.SetValue( self._new_options.GetInteger( 'page_file_count_display' ) )
self._import_page_progress_display.setChecked( self._new_options.GetBoolean( 'import_page_progress_display' ) )
self._total_pages_warning.setValue( self._new_options.GetInteger( 'total_pages_warning' ) )
self._reverse_page_shift_drag_behaviour.setChecked( self._new_options.GetBoolean( 'reverse_page_shift_drag_behaviour' ) )
self._set_search_focus_on_page_change.setChecked( self._new_options.GetBoolean( 'set_search_focus_on_page_change' ) )
self._hide_preview.setChecked( HC.options[ 'hide_preview' ] )
#
rows = []
rows.append( ( 'Default session on startup: ', self._default_gui_session ) )
rows.append( ( 'If \'last session\' above, autosave it how often (minutes)?', self._last_session_save_period_minutes ) )
rows.append( ( 'If \'last session\' above, only autosave during idle time?', self._only_save_last_session_during_idle ) )
rows.append( ( 'Number of session backups to keep: ', self._number_of_gui_session_backups ) )
rows.append( ( 'Show warning popup if session size exceeds 10,000,000: ', self._show_session_size_warnings ) )
sessions_gridbox = ClientGUICommon.WrapInGrid( self._sessions_panel, rows )
self._sessions_panel.Add( sessions_gridbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
rows = []
rows.append( ( 'By default, put new page tabs on: ', self._default_new_page_goes ) )
rows.append( ( 'Notebook tab alignment: ', self._notebook_tab_alignment ) )
rows.append( ( 'Reverse page tab shift-drag behaviour: ', self._reverse_page_shift_drag_behaviour ) )
rows.append( ( 'Warn at this many total pages: ', self._total_pages_warning ) )
gridbox = ClientGUICommon.WrapInGrid( self._pages_panel, rows )
rows = []
rows.append( ( 'Max characters to display in a page name: ', self._max_page_name_chars ) )
rows.append( ( 'When there are too many tabs to fit, \'...\' elide their names so they fit: ', self._elide_page_tab_names ) )
rows.append( ( 'Show page file count after its name: ', self._page_file_count_display ) )
rows.append( ( 'Show import page x/y progress after its name: ', self._import_page_progress_display ) )
page_names_gridbox = ClientGUICommon.WrapInGrid( self._page_names_panel, rows )
label = 'If you have enough pages in a row, left/right arrows will appear to navigate them back and forth.'
label += os.linesep
label += 'Due to an unfortunate Qt issue, the tab bar will scroll so the current tab is right-most visible whenever a page is renamed.'
label += os.linesep
label += 'Therefore, if you set pages to have current file count or import progress in their name (which will update from time to time), do not put import pages in a long row of tabs, as it will reset scroll position on every progress update.'
label += os.linesep
label += 'Just make some nested \'page of pages\' so they are not all in the same row.'
st = ClientGUICommon.BetterStaticText( self._page_names_panel, label )
st.setWordWrap( True )
self._page_names_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
self._page_names_panel.Add( page_names_gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._pages_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._pages_panel.Add( self._page_names_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
rows = []
rows.append( ( 'When switching to a page, focus its text input field (if any): ', self._set_search_focus_on_page_change ) )
rows.append( ( 'Hide the bottom-left preview window: ', self._hide_preview ) )
gridbox = ClientGUICommon.WrapInGrid( self._controls_panel, rows )
self._controls_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._sessions_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, self._pages_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._controls_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
HC.options[ 'default_gui_session' ] = self._default_gui_session.currentText()
self._new_options.SetInteger( 'notebook_tab_alignment', self._notebook_tab_alignment.GetValue() )
self._new_options.SetInteger( 'last_session_save_period_minutes', self._last_session_save_period_minutes.value() )
self._new_options.SetInteger( 'number_of_gui_session_backups', self._number_of_gui_session_backups.value() )
self._new_options.SetBoolean( 'show_session_size_warnings', self._show_session_size_warnings.isChecked() )
self._new_options.SetBoolean( 'only_save_last_session_during_idle', self._only_save_last_session_during_idle.isChecked() )
self._new_options.SetInteger( 'default_new_page_goes', self._default_new_page_goes.GetValue() )
self._new_options.SetInteger( 'max_page_name_chars', self._max_page_name_chars.value() )
self._new_options.SetBoolean( 'elide_page_tab_names', self._elide_page_tab_names.isChecked() )
self._new_options.SetInteger( 'page_file_count_display', self._page_file_count_display.GetValue() )
self._new_options.SetBoolean( 'import_page_progress_display', self._import_page_progress_display.isChecked() )
self._new_options.SetInteger( 'total_pages_warning', self._total_pages_warning.value() )
self._new_options.SetBoolean( 'reverse_page_shift_drag_behaviour', self._reverse_page_shift_drag_behaviour.isChecked() )
self._new_options.SetBoolean( 'set_search_focus_on_page_change', self._set_search_focus_on_page_change.isChecked() )
HC.options[ 'hide_preview' ] = self._hide_preview.isChecked()
class _ImportingPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
default_fios = ClientGUICommon.StaticBox( self, 'default file import options' )
show_downloader_options = True
quiet_file_import_options = self._new_options.GetDefaultFileImportOptions( 'quiet' )
self._quiet_fios = ClientGUIImport.FileImportOptionsButton( default_fios, quiet_file_import_options, show_downloader_options )
loud_file_import_options = self._new_options.GetDefaultFileImportOptions( 'loud' )
self._loud_fios = ClientGUIImport.FileImportOptionsButton( default_fios, loud_file_import_options, show_downloader_options )
#
rows = []
rows.append( ( 'For \'quiet\' import contexts like import folders and subscriptions:', self._quiet_fios ) )
rows.append( ( 'For import contexts that work on pages:', self._loud_fios ) )
gridbox = ClientGUICommon.WrapInGrid( default_fios, rows )
default_fios.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, default_fios, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetDefaultFileImportOptions( 'quiet', self._quiet_fios.GetValue() )
self._new_options.SetDefaultFileImportOptions( 'loud', self._loud_fios.GetValue() )
class _MaintenanceAndProcessingPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._new_options = HG.client_controller.new_options
self._jobs_panel = ClientGUICommon.StaticBox( self, 'when to run high cpu jobs' )
self._file_maintenance_panel = ClientGUICommon.StaticBox( self, 'file maintenance' )
self._idle_panel = ClientGUICommon.StaticBox( self._jobs_panel, 'idle' )
self._shutdown_panel = ClientGUICommon.StaticBox( self._jobs_panel, 'shutdown' )
#
self._idle_normal = QW.QCheckBox( self._idle_panel )
self._idle_normal.clicked.connect( self._EnableDisableIdleNormal )
self._idle_period = ClientGUICommon.NoneableSpinCtrl( self._idle_panel, '', min = 1, max = 1000, multiplier = 60, unit = 'minutes', none_phrase = 'ignore normal browsing' )
self._idle_mouse_period = ClientGUICommon.NoneableSpinCtrl( self._idle_panel, '', min = 1, max = 1000, multiplier = 60, unit = 'minutes', none_phrase = 'ignore mouse movements' )
self._idle_mode_client_api_timeout = ClientGUICommon.NoneableSpinCtrl( self._idle_panel, '', min = 1, max = 1000, multiplier = 60, unit = 'minutes', none_phrase = 'ignore client api' )
self._system_busy_cpu_percent = QP.MakeQSpinBox( self._idle_panel, min = 5, max = 99 )
self._system_busy_cpu_count = ClientGUICommon.NoneableSpinCtrl( self._idle_panel, min = 1, max = 64, unit = 'cores', none_phrase = 'ignore cpu usage' )
#
self._idle_shutdown = ClientGUICommon.BetterChoice( self._shutdown_panel )
for idle_id in ( CC.IDLE_NOT_ON_SHUTDOWN, CC.IDLE_ON_SHUTDOWN, CC.IDLE_ON_SHUTDOWN_ASK_FIRST ):
self._idle_shutdown.addItem( CC.idle_string_lookup[ idle_id], idle_id )
self._idle_shutdown.currentIndexChanged.connect( self._EnableDisableIdleShutdown )
self._idle_shutdown_max_minutes = QP.MakeQSpinBox( self._shutdown_panel, min=1, max=1440 )
self._shutdown_work_period = ClientGUITime.TimeDeltaButton( self._shutdown_panel, min = 60, days = True, hours = True, minutes = True )
#
min_unit_value = 1
max_unit_value = 1000
min_time_delta = 1
self._file_maintenance_during_idle = QW.QCheckBox( self._file_maintenance_panel )
self._file_maintenance_idle_throttle_velocity = ClientGUITime.VelocityCtrl( self._file_maintenance_panel, min_unit_value, max_unit_value, min_time_delta, minutes = True, seconds = True, per_phrase = 'every', unit = 'heavy work units' )
self._file_maintenance_during_active = QW.QCheckBox( self._file_maintenance_panel )
self._file_maintenance_active_throttle_velocity = ClientGUITime.VelocityCtrl( self._file_maintenance_panel, min_unit_value, max_unit_value, min_time_delta, minutes = True, seconds = True, per_phrase = 'every', unit = 'heavy work units' )
tt = 'Different jobs will count for more or less weight. A file metadata reparse will count as one work unit, but quicker jobs like checking for file presence will count as fractions of one and will will work more frequently.'
tt += os.linesep * 2
tt += 'Please note that this throttle is not rigorous for long timescales, as file processing history is not currently saved on client exit. If you restart the client, the file manager thinks it has run 0 jobs and will be happy to run until the throttle kicks in again.'
self._file_maintenance_idle_throttle_velocity.setToolTip( tt )
self._file_maintenance_active_throttle_velocity.setToolTip( tt )
#
self._idle_normal.setChecked( HC.options[ 'idle_normal' ] )
self._idle_period.SetValue( HC.options['idle_period'] )
self._idle_mouse_period.SetValue( HC.options['idle_mouse_period'] )
self._idle_mode_client_api_timeout.SetValue( self._new_options.GetNoneableInteger( 'idle_mode_client_api_timeout' ) )
self._system_busy_cpu_percent.setValue( self._new_options.GetInteger( 'system_busy_cpu_percent' ) )
self._system_busy_cpu_count.SetValue( self._new_options.GetNoneableInteger( 'system_busy_cpu_count' ) )
self._idle_shutdown.SetValue( HC.options[ 'idle_shutdown' ] )
self._idle_shutdown_max_minutes.setValue( HC.options['idle_shutdown_max_minutes'] )
self._shutdown_work_period.SetValue( self._new_options.GetInteger( 'shutdown_work_period' ) )
self._file_maintenance_during_idle.setChecked( self._new_options.GetBoolean( 'file_maintenance_during_idle' ) )
file_maintenance_idle_throttle_files = self._new_options.GetInteger( 'file_maintenance_idle_throttle_files' )
file_maintenance_idle_throttle_time_delta = self._new_options.GetInteger( 'file_maintenance_idle_throttle_time_delta' )
file_maintenance_idle_throttle_velocity = ( file_maintenance_idle_throttle_files, file_maintenance_idle_throttle_time_delta )
self._file_maintenance_idle_throttle_velocity.SetValue( file_maintenance_idle_throttle_velocity )
self._file_maintenance_during_active.setChecked( self._new_options.GetBoolean( 'file_maintenance_during_active' ) )
file_maintenance_active_throttle_files = self._new_options.GetInteger( 'file_maintenance_active_throttle_files' )
file_maintenance_active_throttle_time_delta = self._new_options.GetInteger( 'file_maintenance_active_throttle_time_delta' )
file_maintenance_active_throttle_velocity = ( file_maintenance_active_throttle_files, file_maintenance_active_throttle_time_delta )
self._file_maintenance_active_throttle_velocity.SetValue( file_maintenance_active_throttle_velocity )
#
rows = []
rows.append( ( 'Run maintenance jobs when the client is idle and the system is not otherwise busy: ', self._idle_normal ) )
rows.append( ( 'Permit idle mode if no general browsing activity has occurred in the past: ', self._idle_period ) )
rows.append( ( 'Permit idle mode if the mouse has not been moved in the past: ', self._idle_mouse_period ) )
rows.append( ( 'Permit idle mode if no Client API requests in the past: ', self._idle_mode_client_api_timeout ) )
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._system_busy_cpu_percent, CC.FLAGS_CENTER )
QP.AddToLayout( hbox, ClientGUICommon.BetterStaticText( self._idle_panel, label = '% on ' ), CC.FLAGS_CENTER )
QP.AddToLayout( hbox, self._system_busy_cpu_count, CC.FLAGS_CENTER )
import psutil
num_cores = psutil.cpu_count()
QP.AddToLayout( hbox, ClientGUICommon.BetterStaticText( self._idle_panel, label = '(you appear to have {} cores)'.format( num_cores ) ), CC.FLAGS_CENTER )
rows.append( ( 'Consider the system busy if CPU usage is above: ', hbox ) )
gridbox = ClientGUICommon.WrapInGrid( self._idle_panel, rows )
self._idle_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
rows = []
rows.append( ( 'Run jobs on shutdown: ', self._idle_shutdown ) )
rows.append( ( 'Only run shutdown jobs once per: ', self._shutdown_work_period ) )
rows.append( ( 'Max number of minutes to run shutdown jobs: ', self._idle_shutdown_max_minutes ) )
gridbox = ClientGUICommon.WrapInGrid( self._shutdown_panel, rows )
self._shutdown_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
text = '***'
text += os.linesep
text +='If you are a new user or do not completely understand these options, please do not touch them! Do not set the client to be idle all the time unless you know what you are doing or are testing something and are prepared for potential problems!'
text += os.linesep
text += '***'
text += os.linesep * 2
text += 'Sometimes, the client needs to do some heavy maintenance. This could be reformatting the database to keep it running fast or processing a large number of tags from a repository. Typically, these jobs will not allow you to use the gui while they run, and on slower computers--or those with not much memory--they can take a long time to complete.'
text += os.linesep * 2
text += 'You can set these jobs to run only when the client is idle, or only during shutdown, or neither, or both. If you leave the client on all the time in the background, focusing on \'idle time\' processing is often ideal. If you have a slow computer, relying on \'shutdown\' processing (which you can manually start when convenient), is often better.'
text += os.linesep * 2
text += 'If the client switches from idle to not idle during a job, it will try to abandon it and give you back control. This is not always possible, and even when it is, it will sometimes take several minutes, particularly on slower machines or those on HDDs rather than SSDs.'
text += os.linesep * 2
text += 'If the client believes the system is busy, it will generally not start jobs.'
st = ClientGUICommon.BetterStaticText( self._jobs_panel, label = text )
st.setWordWrap( True )
self._jobs_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
self._jobs_panel.Add( self._idle_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
self._jobs_panel.Add( self._shutdown_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
message = 'Scheduled jobs such as reparsing file metadata and regenerating thumbnails are performed in the background.'
self._file_maintenance_panel.Add( ClientGUICommon.BetterStaticText( self._file_maintenance_panel, label = message ), CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'Run file maintenance during idle time: ', self._file_maintenance_during_idle ) )
rows.append( ( 'Idle throttle: ', self._file_maintenance_idle_throttle_velocity ) )
rows.append( ( 'Run file maintenance during normal time: ', self._file_maintenance_during_active ) )
rows.append( ( 'Normal throttle: ', self._file_maintenance_active_throttle_velocity ) )
gridbox = ClientGUICommon.WrapInGrid( self._file_maintenance_panel, rows )
self._file_maintenance_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._jobs_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._file_maintenance_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
self._EnableDisableIdleNormal()
self._EnableDisableIdleShutdown()
self._system_busy_cpu_count.valueChanged.connect( self._EnableDisableCPUPercent )
def _EnableDisableCPUPercent( self ):
enabled = self._system_busy_cpu_count.isEnabled() and self._system_busy_cpu_count.GetValue() is not None
self._system_busy_cpu_percent.setEnabled( enabled )
def _EnableDisableIdleNormal( self ):
enabled = self._idle_normal.isChecked()
self._idle_period.setEnabled( enabled )
self._idle_mouse_period.setEnabled( enabled )
self._idle_mode_client_api_timeout.setEnabled( enabled )
self._system_busy_cpu_count.setEnabled( enabled )
self._EnableDisableCPUPercent()
def _EnableDisableIdleShutdown( self ):
enabled = self._idle_shutdown.GetValue() != CC.IDLE_NOT_ON_SHUTDOWN
self._shutdown_work_period.setEnabled( enabled )
self._idle_shutdown_max_minutes.setEnabled( enabled )
def UpdateOptions( self ):
HC.options[ 'idle_normal' ] = self._idle_normal.isChecked()
HC.options[ 'idle_period' ] = self._idle_period.GetValue()
HC.options[ 'idle_mouse_period' ] = self._idle_mouse_period.GetValue()
self._new_options.SetNoneableInteger( 'idle_mode_client_api_timeout', self._idle_mode_client_api_timeout.GetValue() )
self._new_options.SetInteger( 'system_busy_cpu_percent', self._system_busy_cpu_percent.value() )
self._new_options.SetNoneableInteger( 'system_busy_cpu_count', self._system_busy_cpu_count.GetValue() )
HC.options[ 'idle_shutdown' ] = self._idle_shutdown.GetValue()
HC.options[ 'idle_shutdown_max_minutes' ] = self._idle_shutdown_max_minutes.value()
self._new_options.SetInteger( 'shutdown_work_period', self._shutdown_work_period.GetValue() )
self._new_options.SetBoolean( 'file_maintenance_during_idle', self._file_maintenance_during_idle.isChecked() )
file_maintenance_idle_throttle_velocity = self._file_maintenance_idle_throttle_velocity.GetValue()
( file_maintenance_idle_throttle_files, file_maintenance_idle_throttle_time_delta ) = file_maintenance_idle_throttle_velocity
self._new_options.SetInteger( 'file_maintenance_idle_throttle_files', file_maintenance_idle_throttle_files )
self._new_options.SetInteger( 'file_maintenance_idle_throttle_time_delta', file_maintenance_idle_throttle_time_delta )
self._new_options.SetBoolean( 'file_maintenance_during_active', self._file_maintenance_during_active.isChecked() )
file_maintenance_active_throttle_velocity = self._file_maintenance_active_throttle_velocity.GetValue()
( file_maintenance_active_throttle_files, file_maintenance_active_throttle_time_delta ) = file_maintenance_active_throttle_velocity
self._new_options.SetInteger( 'file_maintenance_active_throttle_files', file_maintenance_active_throttle_files )
self._new_options.SetInteger( 'file_maintenance_active_throttle_time_delta', file_maintenance_active_throttle_time_delta )
class _MediaPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._new_options = HG.client_controller.new_options
self._animation_start_position = QP.MakeQSpinBox( self, min=0, max=100 )
self._disable_cv_for_gifs = QW.QCheckBox( self )
self._disable_cv_for_gifs.setToolTip( 'OpenCV is good at rendering gifs, but if you have problems with it and your graphics card, check this and the less reliable and slower PIL will be used instead. EDIT: OpenCV is much better these days--this is mostly not needed.' )
self._load_images_with_pil = QW.QCheckBox( self )
self._load_images_with_pil.setToolTip( 'OpenCV is much faster than PIL, but it is sometimes less reliable. Switch this on if you experience crashes or other unusual problems while importing or viewing certain images. EDIT: OpenCV is much better these days--this is mostly not needed.' )
self._use_system_ffmpeg = QW.QCheckBox( self )
self._use_system_ffmpeg.setToolTip( 'Check this to always default to the system ffmpeg in your path, rather than using the static ffmpeg in hydrus\'s bin directory. (requires restart)' )
self._always_loop_gifs = QW.QCheckBox( self )
self._always_loop_gifs.setToolTip( 'Some GIFS have metadata specifying how many times they should be played, usually 1. Uncheck this to obey that number.' )
self._media_viewer_cursor_autohide_time_ms = ClientGUICommon.NoneableSpinCtrl( self, none_phrase = 'do not autohide', min = 100, max = 100000, unit = 'ms' )
self._anchor_and_hide_canvas_drags = QW.QCheckBox( self )
self._touchscreen_canvas_drags_unanchor = QW.QCheckBox( self )
from hydrus.client.gui.canvas import ClientGUICanvas
self._media_viewer_zoom_center = ClientGUICommon.BetterChoice()
for zoom_centerpoint_type in ClientGUICanvas.ZOOM_CENTERPOINT_TYPES:
self._media_viewer_zoom_center.addItem( ClientGUICanvas.zoom_centerpoints_str_lookup[ zoom_centerpoint_type ], zoom_centerpoint_type )
tt = 'When you zoom in or out, there is a centerpoint about which the image zooms. This point \'stays still\' while the image expands or shrinks around it. Different centerpoints give different feels, especially if you drag images around a bit.'
self._media_viewer_zoom_center.setToolTip( tt )
self._media_zooms = QW.QLineEdit( self )
self._media_zooms.textChanged.connect( self.EventZoomsChanged )
self._mpv_conf_path = QP.FilePickerCtrl( self, starting_directory = os.path.join( HC.STATIC_DIR, 'mpv-conf' ) )
self._animated_scanbar_height = QP.MakeQSpinBox( self, min=1, max=255 )
self._animated_scanbar_nub_width = QP.MakeQSpinBox( self, min=1, max=63 )
self._media_viewer_panel = ClientGUICommon.StaticBox( self, 'media viewer mime handling' )
media_viewer_list_panel = ClientGUIListCtrl.BetterListCtrlPanel( self._media_viewer_panel )
self._media_viewer_options = ClientGUIListCtrl.BetterListCtrl( media_viewer_list_panel, CGLC.COLUMN_LIST_MEDIA_VIEWER_OPTIONS.ID, 20, data_to_tuples_func = self._GetListCtrlData, activation_callback = self.EditMediaViewerOptions, use_simple_delete = True )
media_viewer_list_panel.SetListCtrl( self._media_viewer_options )
media_viewer_list_panel.AddButton( 'add', self.AddMediaViewerOptions, enabled_check_func = self._CanAddMediaViewOption )
media_viewer_list_panel.AddButton( 'edit', self.EditMediaViewerOptions, enabled_only_on_selection = True )
media_viewer_list_panel.AddDeleteButton( enabled_check_func = self._CanDeleteMediaViewOptions )
#
self._animation_start_position.setValue( int( HC.options['animation_start_position'] * 100.0 ) )
self._disable_cv_for_gifs.setChecked( self._new_options.GetBoolean( 'disable_cv_for_gifs' ) )
self._load_images_with_pil.setChecked( self._new_options.GetBoolean( 'load_images_with_pil' ) )
self._use_system_ffmpeg.setChecked( self._new_options.GetBoolean( 'use_system_ffmpeg' ) )
self._always_loop_gifs.setChecked( self._new_options.GetBoolean( 'always_loop_gifs' ) )
self._media_viewer_cursor_autohide_time_ms.SetValue( self._new_options.GetNoneableInteger( 'media_viewer_cursor_autohide_time_ms' ) )
self._anchor_and_hide_canvas_drags.setChecked( self._new_options.GetBoolean( 'anchor_and_hide_canvas_drags' ) )
self._touchscreen_canvas_drags_unanchor.setChecked( self._new_options.GetBoolean( 'touchscreen_canvas_drags_unanchor' ) )
self._animated_scanbar_height.setValue( self._new_options.GetInteger( 'animated_scanbar_height' ) )
self._animated_scanbar_nub_width.setValue( self._new_options.GetInteger( 'animated_scanbar_nub_width' ) )
self._media_viewer_zoom_center.SetValue( self._new_options.GetInteger( 'media_viewer_zoom_center' ) )
media_zooms = self._new_options.GetMediaZooms()
self._media_zooms.setText( ','.join( ( str( media_zoom ) for media_zoom in media_zooms ) ) )
all_media_view_options = self._new_options.GetMediaViewOptions()
for ( mime, view_options ) in all_media_view_options.items():
data = QP.ListsToTuples( [ mime ] + list( view_options ) )
self._media_viewer_options.AddDatas( ( data, ) )
self._media_viewer_options.Sort()
#
vbox = QP.VBoxLayout()
text = 'Please be warned that hydrus does not currently zoom in very efficiently at high zooms!'
text += os.linesep
text += 'Just be careful at >400%, particularly for already large files--it can lag out and eat a chunk of memory.'
st = ClientGUICommon.BetterStaticText( self, text )
st.setObjectName( 'HydrusWarning' )
QP.AddToLayout( vbox, st )
rows = []
rows.append( ( 'Start animations this % in:', self._animation_start_position ) )
rows.append( ( 'Prefer system FFMPEG:', self._use_system_ffmpeg ) )
rows.append( ( 'Always Loop GIFs:', self._always_loop_gifs ) )
rows.append( ( 'Centerpoint for media zooming:', self._media_viewer_zoom_center ) )
rows.append( ( 'Media zooms:', self._media_zooms ) )
rows.append( ( 'Set a new mpv.conf on dialog ok?:', self._mpv_conf_path ) )
rows.append( ( 'Animation scanbar height:', self._animated_scanbar_height ) )
rows.append( ( 'Animation scanbar nub width:', self._animated_scanbar_nub_width ) )
rows.append( ( 'Time until mouse cursor autohides on media viewer:', self._media_viewer_cursor_autohide_time_ms ) )
rows.append( ( 'RECOMMEND WINDOWS ONLY: Hide and anchor mouse cursor on media viewer drags:', self._anchor_and_hide_canvas_drags ) )
rows.append( ( 'RECOMMEND WINDOWS ONLY: If set to hide and anchor, undo on apparent touchscreen drag:', self._touchscreen_canvas_drags_unanchor ) )
rows.append( ( 'BUGFIX: Load images with PIL (slower):', self._load_images_with_pil ) )
rows.append( ( 'BUGFIX: Load gifs with PIL instead of OpenCV (slower, bad transparency):', self._disable_cv_for_gifs ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._media_viewer_panel.Add( media_viewer_list_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._media_viewer_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
def _CanAddMediaViewOption( self ):
return len( self._GetUnsetMediaViewFiletypes() ) > 0
def _CanDeleteMediaViewOptions( self ):
deletable_mimes = set( HC.SEARCHABLE_MIMES )
selected_mimes = set()
for ( mime, media_show_action, media_start_paused, media_start_with_embed, preview_show_action, preview_start_paused, preview_start_with_embed, zoom_info ) in self._media_viewer_options.GetData( only_selected = True ):
selected_mimes.add( mime )
if len( selected_mimes ) == 0:
return False
all_selected_are_deletable = selected_mimes.issubset( deletable_mimes )
return all_selected_are_deletable
def _GetCopyOfGeneralMediaViewOptions( self, desired_mime ):
general_mime_type = HC.mimes_to_general_mimetypes[ desired_mime ]
for ( mime, media_show_action, media_start_paused, media_start_with_embed, preview_show_action, preview_start_paused, preview_start_with_embed, zoom_info ) in self._media_viewer_options.GetData():
if mime == general_mime_type:
view_options = ( desired_mime, media_show_action, media_start_paused, media_start_with_embed, preview_show_action, preview_start_paused, preview_start_with_embed, zoom_info )
return view_options
def _GetUnsetMediaViewFiletypes( self ):
editable_mimes = set( HC.SEARCHABLE_MIMES )
set_mimes = set()
for ( mime, media_show_action, media_start_paused, media_start_with_embed, preview_show_action, preview_start_paused, preview_start_with_embed, zoom_info ) in self._media_viewer_options.GetData():
set_mimes.add( mime )
unset_mimes = editable_mimes.difference( set_mimes )
return unset_mimes
def _GetListCtrlData( self, data ):
( mime, media_show_action, media_start_paused, media_start_with_embed, preview_show_action, preview_start_paused, preview_start_with_embed, zoom_info ) = data
pretty_mime = self._GetPrettyMime( mime )
pretty_media_show_action = CC.media_viewer_action_string_lookup[ media_show_action ]
if media_start_paused:
pretty_media_show_action += ', start paused'
if media_start_with_embed:
pretty_media_show_action += ', start with embed button'
pretty_preview_show_action = CC.media_viewer_action_string_lookup[ preview_show_action ]
if preview_start_paused:
pretty_preview_show_action += ', start paused'
if preview_start_with_embed:
pretty_preview_show_action += ', start with embed button'
no_show = { media_show_action, preview_show_action }.isdisjoint( { CC.MEDIA_VIEWER_ACTION_SHOW_WITH_NATIVE, CC.MEDIA_VIEWER_ACTION_SHOW_WITH_MPV } )
if no_show:
pretty_zoom_info = ''
else:
pretty_zoom_info = str( zoom_info )
display_tuple = ( pretty_mime, pretty_media_show_action, pretty_preview_show_action, pretty_zoom_info )
sort_tuple = ( pretty_mime, pretty_media_show_action, pretty_preview_show_action, pretty_zoom_info )
return ( display_tuple, sort_tuple )
def _GetPrettyMime( self, mime ):
pretty_mime = HC.mime_string_lookup[ mime ]
if mime not in HC.GENERAL_FILETYPES:
pretty_mime = '{}: {}'.format( HC.mime_string_lookup[ HC.mimes_to_general_mimetypes[ mime ] ], pretty_mime )
return pretty_mime
def AddMediaViewerOptions( self ):
unset_filetypes = self._GetUnsetMediaViewFiletypes()
if len( unset_filetypes ) == 0:
QW.QMessageBox.warning( self, 'Warning', 'You cannot add any more specific filetype options!' )
return
choice_tuples = [ ( self._GetPrettyMime( mime ), mime ) for mime in unset_filetypes ]
try:
mime = ClientGUIDialogsQuick.SelectFromList( self, 'select the filetype to add', choice_tuples, sort_tuples = True )
except HydrusExceptions.CancelledException:
return
data = self._GetCopyOfGeneralMediaViewOptions( mime )
title = 'add media view options information'
with ClientGUITopLevelWindowsPanels.DialogEdit( self, title ) as dlg:
panel = ClientGUIScrolledPanelsEdit.EditMediaViewOptionsPanel( dlg, data )
dlg.SetPanel( panel )
if dlg.exec() == QW.QDialog.Accepted:
new_data = panel.GetValue()
self._media_viewer_options.AddDatas( ( new_data, ) )
def EditMediaViewerOptions( self ):
for data in self._media_viewer_options.GetData( only_selected = True ):
title = 'edit media view options information'
with ClientGUITopLevelWindowsPanels.DialogEdit( self, title ) as dlg:
panel = ClientGUIScrolledPanelsEdit.EditMediaViewOptionsPanel( dlg, data )
dlg.SetPanel( panel )
if dlg.exec() == QW.QDialog.Accepted:
new_data = panel.GetValue()
self._media_viewer_options.ReplaceData( data, new_data )
def EventZoomsChanged( self, text ):
try:
media_zooms = [ float( media_zoom ) for media_zoom in self._media_zooms.text().split( ',' ) ]
self._media_zooms.setObjectName( '' )
except ValueError:
self._media_zooms.setObjectName( 'HydrusInvalid' )
self._media_zooms.style().polish( self._media_zooms )
self._media_zooms.update()
def UpdateOptions( self ):
HC.options[ 'animation_start_position' ] = self._animation_start_position.value() / 100.0
self._new_options.SetBoolean( 'disable_cv_for_gifs', self._disable_cv_for_gifs.isChecked() )
self._new_options.SetBoolean( 'load_images_with_pil', self._load_images_with_pil.isChecked() )
self._new_options.SetBoolean( 'use_system_ffmpeg', self._use_system_ffmpeg.isChecked() )
self._new_options.SetBoolean( 'always_loop_gifs', self._always_loop_gifs.isChecked() )
self._new_options.SetBoolean( 'anchor_and_hide_canvas_drags', self._anchor_and_hide_canvas_drags.isChecked() )
self._new_options.SetBoolean( 'touchscreen_canvas_drags_unanchor', self._touchscreen_canvas_drags_unanchor.isChecked() )
self._new_options.SetNoneableInteger( 'media_viewer_cursor_autohide_time_ms', self._media_viewer_cursor_autohide_time_ms.GetValue() )
mpv_conf_path = self._mpv_conf_path.GetPath()
if mpv_conf_path is not None and mpv_conf_path != '' and os.path.exists( mpv_conf_path ) and os.path.isfile( mpv_conf_path ):
dest_mpv_conf_path = HG.client_controller.GetMPVConfPath()
try:
HydrusPaths.MirrorFile( mpv_conf_path, dest_mpv_conf_path )
except Exception as e:
HydrusData.ShowText( 'Could not set the mpv conf path "{}" to "{}"! Error follows!'.format( mpv_conf_path, dest_mpv_conf_path ) )
HydrusData.ShowException( e )
self._new_options.SetInteger( 'animated_scanbar_height', self._animated_scanbar_height.value() )
self._new_options.SetInteger( 'animated_scanbar_nub_width', self._animated_scanbar_nub_width.value() )
self._new_options.SetInteger( 'media_viewer_zoom_center', self._media_viewer_zoom_center.GetValue() )
try:
media_zooms = [ float( media_zoom ) for media_zoom in self._media_zooms.text().split( ',' ) ]
media_zooms = [ media_zoom for media_zoom in media_zooms if media_zoom > 0.0 ]
if len( media_zooms ) > 0:
self._new_options.SetMediaZooms( media_zooms )
except ValueError:
HydrusData.ShowText( 'Could not parse those zooms, so they were not saved!' )
mimes_to_media_view_options = {}
for data in self._media_viewer_options.GetData():
data = list( data )
mime = data[0]
value = data[1:]
mimes_to_media_view_options[ mime ] = value
self._new_options.SetMediaViewOptions( mimes_to_media_view_options )
class _PopupPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
self._popup_panel = ClientGUICommon.StaticBox( self, 'popup window toaster' )
self._popup_message_character_width = QP.MakeQSpinBox( self._popup_panel, min = 16, max = 256 )
self._popup_message_force_min_width = QW.QCheckBox( self._popup_panel )
self._freeze_message_manager_when_mouse_on_other_monitor = QW.QCheckBox( self._popup_panel )
self._freeze_message_manager_when_mouse_on_other_monitor.setToolTip( 'This is useful if you have a virtual desktop and find the popup manager restores strangely when you hop back to the hydrus display.' )
self._freeze_message_manager_when_main_gui_minimised = QW.QCheckBox( self._popup_panel )
self._freeze_message_manager_when_main_gui_minimised.setToolTip( 'This is useful if the popup toaster restores strangely after minimised changes.' )
self._hide_message_manager_on_gui_iconise = QW.QCheckBox( self._popup_panel )
self._hide_message_manager_on_gui_iconise.setToolTip( 'If your message manager does not automatically minimise with your main gui, try this. It can lead to unusual show and positioning behaviour on window managers that do not support it, however.' )
self._hide_message_manager_on_gui_deactive = QW.QCheckBox( self._popup_panel )
self._hide_message_manager_on_gui_deactive.setToolTip( 'If your message manager stays up after you minimise the program to the system tray using a custom window manager, try this out! It hides the popup messages as soon as the main gui loses focus.' )
self._notify_client_api_cookies = QW.QCheckBox( self._popup_panel )
self._notify_client_api_cookies.setToolTip( 'This will make a short-lived popup message every time you get new cookie information over the Client API.' )
#
self._popup_message_character_width.setValue( self._new_options.GetInteger( 'popup_message_character_width' ) )
self._popup_message_force_min_width.setChecked( self._new_options.GetBoolean( 'popup_message_force_min_width' ) )
self._freeze_message_manager_when_mouse_on_other_monitor.setChecked( self._new_options.GetBoolean( 'freeze_message_manager_when_mouse_on_other_monitor' ) )
self._freeze_message_manager_when_main_gui_minimised.setChecked( self._new_options.GetBoolean( 'freeze_message_manager_when_main_gui_minimised' ) )
self._hide_message_manager_on_gui_iconise.setChecked( self._new_options.GetBoolean( 'hide_message_manager_on_gui_iconise' ) )
self._hide_message_manager_on_gui_deactive.setChecked( self._new_options.GetBoolean( 'hide_message_manager_on_gui_deactive' ) )
self._notify_client_api_cookies.setChecked( self._new_options.GetBoolean( 'notify_client_api_cookies' ) )
#
rows = []
rows.append( ( 'Approximate max width of popup messages (in characters): ', self._popup_message_character_width ) )
rows.append( ( 'BUGFIX: Force this width as the minimum width for all popup messages: ', self._popup_message_force_min_width ) )
rows.append( ( 'Freeze the popup toaster when mouse is on another display: ', self._freeze_message_manager_when_mouse_on_other_monitor ) )
rows.append( ( 'Freeze the popup toaster when the main gui is minimised: ', self._freeze_message_manager_when_main_gui_minimised ) )
rows.append( ( 'BUGFIX: Hide the popup toaster when the main gui is minimised: ', self._hide_message_manager_on_gui_iconise ) )
rows.append( ( 'BUGFIX: Hide the popup toaster when the main gui loses focus: ', self._hide_message_manager_on_gui_deactive ) )
rows.append( ( 'Make a short-lived popup on cookie updates through the Client API: ', self._notify_client_api_cookies ) )
gridbox = ClientGUICommon.WrapInGrid( self._popup_panel, rows )
self._popup_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._popup_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetInteger( 'popup_message_character_width', self._popup_message_character_width.value() )
self._new_options.SetBoolean( 'popup_message_force_min_width', self._popup_message_force_min_width.isChecked() )
self._new_options.SetBoolean( 'freeze_message_manager_when_mouse_on_other_monitor', self._freeze_message_manager_when_mouse_on_other_monitor.isChecked() )
self._new_options.SetBoolean( 'freeze_message_manager_when_main_gui_minimised', self._freeze_message_manager_when_main_gui_minimised.isChecked() )
self._new_options.SetBoolean( 'hide_message_manager_on_gui_iconise', self._hide_message_manager_on_gui_iconise.isChecked() )
self._new_options.SetBoolean( 'hide_message_manager_on_gui_deactive', self._hide_message_manager_on_gui_deactive.isChecked() )
self._new_options.SetBoolean( 'notify_client_api_cookies', self._notify_client_api_cookies.isChecked() )
class _RegexPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
regex_favourites = HC.options[ 'regex_favourites' ]
self._regex_panel = ClientGUIScrolledPanelsEdit.EditRegexFavourites( self, regex_favourites )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._regex_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
def UpdateOptions( self ):
regex_favourites = self._regex_panel.GetValue()
HC.options[ 'regex_favourites' ] = regex_favourites
class _SearchPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
self._autocomplete_panel = ClientGUICommon.StaticBox( self, 'autocomplete' )
self._autocomplete_float_main_gui = QW.QCheckBox( self._autocomplete_panel )
tt = 'The autocomplete dropdown can either \'float\' on top of the main window, or if that does not work well for you, it can embed into the parent panel.'
self._autocomplete_float_main_gui.setToolTip( tt )
self._autocomplete_float_frames = QW.QCheckBox( self._autocomplete_panel )
tt = 'The autocomplete dropdown can either \'float\' on top of dialogs like _manage tags_, or if that does not work well for you (it can sometimes annoyingly overlap the ok/cancel buttons), it can embed into the parent dialog panel.'
self._autocomplete_float_frames.setToolTip( tt )
self._ac_read_list_height_num_chars = QP.MakeQSpinBox( self._autocomplete_panel, min = 1, max = 128 )
tt = 'Read autocompletes are those in search pages, where you are looking through existing tags to find your files.'
self._ac_read_list_height_num_chars.setToolTip( tt )
self._ac_write_list_height_num_chars = QP.MakeQSpinBox( self._autocomplete_panel, min = 1, max = 128 )
tt = 'Write autocompletes are those in most dialogs, where you are adding new tags to files.'
self._ac_write_list_height_num_chars.setToolTip( tt )
self._always_show_system_everything = QW.QCheckBox( self._autocomplete_panel )
tt = 'After users get some experience with the program and a larger collection, they tend to have less use for system:everything.'
self._always_show_system_everything.setToolTip( tt )
self._filter_inbox_and_archive_predicates = QW.QCheckBox( self._autocomplete_panel )
tt = 'If everything is current in the inbox (or archive), then there is no use listing it or its opposite--it either does not change the search or it produces nothing. If you find it jarring though, turn it off here!'
self._filter_inbox_and_archive_predicates.setToolTip( tt )
#
self._autocomplete_float_main_gui.setChecked( self._new_options.GetBoolean( 'autocomplete_float_main_gui' ) )
self._autocomplete_float_frames.setChecked( self._new_options.GetBoolean( 'autocomplete_float_frames' ) )
self._ac_read_list_height_num_chars.setValue( self._new_options.GetInteger( 'ac_read_list_height_num_chars' ) )
self._ac_write_list_height_num_chars.setValue( self._new_options.GetInteger( 'ac_write_list_height_num_chars' ) )
self._always_show_system_everything.setChecked( self._new_options.GetBoolean( 'always_show_system_everything' ) )
self._filter_inbox_and_archive_predicates.setChecked( self._new_options.GetBoolean( 'filter_inbox_and_archive_predicates' ) )
#
vbox = QP.VBoxLayout()
message = 'The autocomplete dropdown list is the panel that hangs below the tag input text box on search pages.'
st = ClientGUICommon.BetterStaticText( self._autocomplete_panel, label = message )
self._autocomplete_panel.Add( st, CC.FLAGS_CENTER )
rows = []
#
rows.append( ( 'Autocomplete results float in main gui: ', self._autocomplete_float_main_gui ) )
rows.append( ( 'Autocomplete results float in other windows: ', self._autocomplete_float_frames ) )
rows.append( ( '\'Read\' autocomplete list height: ', self._ac_read_list_height_num_chars ) )
rows.append( ( '\'Write\' autocomplete list height: ', self._ac_write_list_height_num_chars ) )
rows.append( ( 'show system:everything even if total files is over 10,000: ', self._always_show_system_everything ) )
rows.append( ( 'hide inbox and archive system predicates if either has no files: ', self._filter_inbox_and_archive_predicates ) )
gridbox = ClientGUICommon.WrapInGrid( self._autocomplete_panel, rows )
self._autocomplete_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
#
QP.AddToLayout( vbox, self._autocomplete_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetBoolean( 'autocomplete_float_main_gui', self._autocomplete_float_main_gui.isChecked() )
self._new_options.SetBoolean( 'autocomplete_float_frames', self._autocomplete_float_frames.isChecked() )
self._new_options.SetInteger( 'ac_read_list_height_num_chars', self._ac_read_list_height_num_chars.value() )
self._new_options.SetInteger( 'ac_write_list_height_num_chars', self._ac_write_list_height_num_chars.value() )
self._new_options.SetBoolean( 'always_show_system_everything', self._always_show_system_everything.isChecked() )
self._new_options.SetBoolean( 'filter_inbox_and_archive_predicates', self._filter_inbox_and_archive_predicates.isChecked() )
class _SortCollectPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._default_media_sort = ClientGUIResultsSortCollect.MediaSortControl( self )
self._fallback_media_sort = ClientGUIResultsSortCollect.MediaSortControl( self )
self._save_page_sort_on_change = QW.QCheckBox( self )
self._default_media_collect = ClientGUIResultsSortCollect.MediaCollectControl( self, silent = True )
namespace_sorting_box = ClientGUICommon.StaticBox( self, 'namespace sorting' )
self._namespace_sort_by = ClientGUIListBoxes.QueueListBox( namespace_sorting_box, 8, self._ConvertNamespaceTupleToSortString, self._AddNamespaceSort, self._EditNamespaceSort )
#
self._new_options = HG.client_controller.new_options
try:
self._default_media_sort.SetSort( self._new_options.GetDefaultSort() )
except:
media_sort = ClientMedia.MediaSort( ( 'system', CC.SORT_FILES_BY_FILESIZE ), CC.SORT_ASC )
self._default_media_sort.SetSort( media_sort )
try:
self._fallback_media_sort.SetSort( self._new_options.GetFallbackSort() )
except:
media_sort = ClientMedia.MediaSort( ( 'system', CC.SORT_FILES_BY_IMPORT_TIME ), CC.SORT_ASC )
self._fallback_media_sort.SetSort( media_sort )
self._namespace_sort_by.AddDatas( [ media_sort.sort_type[1] for media_sort in HG.client_controller.new_options.GetDefaultNamespaceSorts() ] )
self._save_page_sort_on_change.setChecked( self._new_options.GetBoolean( 'save_page_sort_on_change' ) )
#
sort_by_text = 'You can manage your namespace sorting schemes here.'
sort_by_text += os.linesep
sort_by_text += 'The client will sort media by comparing their namespaces, moving from left to right until an inequality is found.'
sort_by_text += os.linesep
sort_by_text += 'Any namespaces here will also appear in your collect-by dropdowns.'
namespace_sorting_box.Add( ClientGUICommon.BetterStaticText( namespace_sorting_box, sort_by_text ), CC.FLAGS_EXPAND_PERPENDICULAR )
namespace_sorting_box.Add( self._namespace_sort_by, CC.FLAGS_EXPAND_BOTH_WAYS )
rows = []
rows.append( ( 'Default sort: ', self._default_media_sort ) )
rows.append( ( 'Secondary sort (when primary gives two equal values): ', self._fallback_media_sort ) )
rows.append( ( 'Update default sort every time a new sort is manually chosen: ', self._save_page_sort_on_change ) )
rows.append( ( 'Default collect: ', self._default_media_collect ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, namespace_sorting_box, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
def _AddNamespaceSort( self ):
default = ( ( 'creator', 'series', 'page' ), ClientTags.TAG_DISPLAY_ACTUAL )
return self._EditNamespaceSort( default )
def _ConvertNamespaceTupleToSortString( self, sort_data ):
( namespaces, tag_display_type ) = sort_data
return '-'.join( namespaces )
def _EditNamespaceSort( self, sort_data ):
return ClientGUITags.EditNamespaceSort( self, sort_data )
def UpdateOptions( self ):
self._new_options.SetDefaultSort( self._default_media_sort.GetSort() )
self._new_options.SetFallbackSort( self._fallback_media_sort.GetSort() )
self._new_options.SetBoolean( 'save_page_sort_on_change', self._save_page_sort_on_change.isChecked() )
self._new_options.SetDefaultCollect( self._default_media_collect.GetValue() )
namespace_sorts = [ ClientMedia.MediaSort( sort_type = ( 'namespaces', sort_data ) ) for sort_data in self._namespace_sort_by.GetData() ]
self._new_options.SetDefaultNamespaceSorts( namespace_sorts )
class _SpeedAndMemoryPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
thumbnail_cache_panel = ClientGUICommon.StaticBox( self, 'thumbnail cache' )
self._thumbnail_cache_size = QP.MakeQSpinBox( thumbnail_cache_panel, min=5, max=3000 )
self._thumbnail_cache_size.valueChanged.connect( self.EventThumbnailsUpdate )
self._estimated_number_thumbnails = QW.QLabel( '', thumbnail_cache_panel )
self._thumbnail_cache_timeout = ClientGUITime.TimeDeltaButton( thumbnail_cache_panel, min = 300, days = True, hours = True, minutes = True )
self._thumbnail_cache_timeout.setToolTip( 'The amount of time after which a thumbnail in the cache will naturally be removed, if it is not shunted out due to a new member exceeding the size limit.' )
image_cache_panel = ClientGUICommon.StaticBox( self, 'image cache' )
self._fullscreen_cache_size = QP.MakeQSpinBox( image_cache_panel, min=25, max=8192 )
self._fullscreen_cache_size.valueChanged.connect( self.EventImageCacheUpdate )
self._estimated_number_fullscreens = QW.QLabel( '', image_cache_panel )
self._image_cache_timeout = ClientGUITime.TimeDeltaButton( image_cache_panel, min = 300, days = True, hours = True, minutes = True )
self._image_cache_timeout.setToolTip( 'The amount of time after which a rendered image in the cache will naturally be removed, if it is not shunted out due to a new member exceeding the size limit.' )
self._media_viewer_prefetch_delay_base_ms = QP.MakeQSpinBox( image_cache_panel, min = 0, max = 2000 )
tt = 'How long to wait, after the current image is rendered, to start rendering neighbours. Does not matter so much any more, but if you have CPU lag, you can try boosting it a bit.'
self._media_viewer_prefetch_delay_base_ms.setToolTip( tt )
self._media_viewer_prefetch_num_previous = QP.MakeQSpinBox( image_cache_panel, min = 0, max = 5 )
self._media_viewer_prefetch_num_next = QP.MakeQSpinBox( image_cache_panel, min = 0, max = 5 )
self._image_cache_storage_limit_percentage = QP.MakeQSpinBox( image_cache_panel, min = 20, max = 50 )
self._image_cache_storage_limit_percentage_st = ClientGUICommon.BetterStaticText( image_cache_panel, label = '' )
self._image_cache_prefetch_limit_percentage = QP.MakeQSpinBox( image_cache_panel, min = 5, max = 20 )
self._image_cache_prefetch_limit_percentage_st = ClientGUICommon.BetterStaticText( image_cache_panel, label = '' )
image_tile_cache_panel = ClientGUICommon.StaticBox( self, 'image tile cache' )
self._image_tile_cache_size = ClientGUIControls.BytesControl( image_tile_cache_panel )
self._image_tile_cache_size.valueChanged.connect( self.EventImageTilesUpdate )
self._estimated_number_image_tiles = QW.QLabel( '', image_tile_cache_panel )
self._image_tile_cache_timeout = ClientGUITime.TimeDeltaButton( image_tile_cache_panel, min = 300, hours = True, minutes = True )
self._image_tile_cache_timeout.setToolTip( 'The amount of time after which a rendered image tile in the cache will naturally be removed, if it is not shunted out due to a new member exceeding the size limit.' )
self._ideal_tile_dimension = QP.MakeQSpinBox( image_tile_cache_panel, min = 256, max = 4096 )
self._ideal_tile_dimension.setToolTip( 'This is the square size the system will aim for. Smaller tiles are more memory efficient but prone to warping and other artifacts. Extreme values may waste CPU.' )
#
buffer_panel = ClientGUICommon.StaticBox( self, 'video buffer' )
self._video_buffer_size_mb = QP.MakeQSpinBox( buffer_panel, min=48, max=16*1024 )
self._video_buffer_size_mb.valueChanged.connect( self.EventVideoBufferUpdate )
self._estimated_number_video_frames = QW.QLabel( '', buffer_panel )
#
misc_panel = ClientGUICommon.StaticBox( self, 'misc' )
self._forced_search_limit = ClientGUICommon.NoneableSpinCtrl( misc_panel, '', min = 1, max = 100000 )
#
self._thumbnail_cache_size.setValue( int( HC.options['thumbnail_cache_size'] // 1048576 ) )
self._fullscreen_cache_size.setValue( int( HC.options['fullscreen_cache_size'] // 1048576 ) )
self._image_tile_cache_size.SetValue( self._new_options.GetInteger( 'image_tile_cache_size' ) )
self._thumbnail_cache_timeout.SetValue( self._new_options.GetInteger( 'thumbnail_cache_timeout' ) )
self._image_cache_timeout.SetValue( self._new_options.GetInteger( 'image_cache_timeout' ) )
self._image_tile_cache_timeout.SetValue( self._new_options.GetInteger( 'image_tile_cache_timeout' ) )
self._ideal_tile_dimension.setValue( self._new_options.GetInteger( 'ideal_tile_dimension' ) )
self._video_buffer_size_mb.setValue( self._new_options.GetInteger( 'video_buffer_size_mb' ) )
self._forced_search_limit.SetValue( self._new_options.GetNoneableInteger( 'forced_search_limit' ) )
self._media_viewer_prefetch_delay_base_ms.setValue( self._new_options.GetInteger( 'media_viewer_prefetch_delay_base_ms' ) )
self._media_viewer_prefetch_num_previous.setValue( self._new_options.GetInteger( 'media_viewer_prefetch_num_previous' ) )
self._media_viewer_prefetch_num_next.setValue( self._new_options.GetInteger( 'media_viewer_prefetch_num_next' ) )
self._image_cache_storage_limit_percentage.setValue( self._new_options.GetInteger( 'image_cache_storage_limit_percentage' ) )
self._image_cache_prefetch_limit_percentage.setValue( self._new_options.GetInteger( 'image_cache_prefetch_limit_percentage' ) )
#
vbox = QP.VBoxLayout()
text = 'These options are advanced! PROTIP: Do not go crazy here.'
st = ClientGUICommon.BetterStaticText( self, text )
QP.AddToLayout( vbox, st, CC.FLAGS_CENTER )
#
thumbnails_sizer = QP.HBoxLayout()
QP.AddToLayout( thumbnails_sizer, self._thumbnail_cache_size, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( thumbnails_sizer, self._estimated_number_thumbnails, CC.FLAGS_CENTER_PERPENDICULAR )
fullscreens_sizer = QP.HBoxLayout()
QP.AddToLayout( fullscreens_sizer, self._fullscreen_cache_size, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( fullscreens_sizer, self._estimated_number_fullscreens, CC.FLAGS_CENTER_PERPENDICULAR )
image_tiles_sizer = QP.HBoxLayout()
QP.AddToLayout( image_tiles_sizer, self._image_tile_cache_size, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( image_tiles_sizer, self._estimated_number_image_tiles, CC.FLAGS_CENTER_PERPENDICULAR )
image_cache_storage_sizer = QP.HBoxLayout()
QP.AddToLayout( image_cache_storage_sizer, self._image_cache_storage_limit_percentage, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( image_cache_storage_sizer, self._image_cache_storage_limit_percentage_st, CC.FLAGS_CENTER_PERPENDICULAR )
image_cache_prefetch_sizer = QP.HBoxLayout()
QP.AddToLayout( image_cache_prefetch_sizer, self._image_cache_prefetch_limit_percentage, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( image_cache_prefetch_sizer, self._image_cache_prefetch_limit_percentage_st, CC.FLAGS_CENTER_PERPENDICULAR )
video_buffer_sizer = QP.HBoxLayout()
QP.AddToLayout( video_buffer_sizer, self._video_buffer_size_mb, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( video_buffer_sizer, self._estimated_number_video_frames, CC.FLAGS_CENTER_PERPENDICULAR )
#
text = 'Does not change much, thumbs are cheap.'
st = ClientGUICommon.BetterStaticText( thumbnail_cache_panel, text )
thumbnail_cache_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'MB memory reserved for thumbnail cache:', thumbnails_sizer ) )
rows.append( ( 'Thumbnail cache timeout:', self._thumbnail_cache_timeout ) )
gridbox = ClientGUICommon.WrapInGrid( thumbnail_cache_panel, rows )
thumbnail_cache_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, thumbnail_cache_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
text = 'Important if you want smooth navigation between different images in the media viewer. If you deal with huge images, bump up cache size and max size that can be cached or prefetched, but be prepared to pay the memory price.'
text += os.linesep * 2
text += 'Allowing more prefetch is great, but it needs CPU.'
st = ClientGUICommon.BetterStaticText( image_cache_panel, text )
st.setWordWrap( True )
image_cache_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'MB memory reserved for image cache:', fullscreens_sizer ) )
rows.append( ( 'Image cache timeout:', self._image_cache_timeout ) )
rows.append( ( 'Maximum image size (in % of cache) that can be cached:', image_cache_storage_sizer ) )
rows.append( ( 'Maximum image size (in % of cache) that will be prefetched:', image_cache_prefetch_sizer ) )
rows.append( ( 'Base ms delay for media viewer neighbour render prefetch:', self._media_viewer_prefetch_delay_base_ms ) )
rows.append( ( 'Num previous to prefetch:', self._media_viewer_prefetch_num_previous ) )
rows.append( ( 'Num next to prefetch:', self._media_viewer_prefetch_num_next ) )
gridbox = ClientGUICommon.WrapInGrid( image_cache_panel, rows )
image_cache_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, image_cache_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
text = 'Important if you do a lot of zooming in and out on the same image or a small number of comparison images.'
st = ClientGUICommon.BetterStaticText( image_tile_cache_panel, text )
image_tile_cache_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'MB memory reserved for image tile cache:', image_tiles_sizer ) )
rows.append( ( 'Image tile cache timeout:', self._image_tile_cache_timeout ) )
rows.append( ( 'Ideal tile width/height px:', self._ideal_tile_dimension ) )
gridbox = ClientGUICommon.WrapInGrid( image_tile_cache_panel, rows )
image_tile_cache_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, image_tile_cache_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
text = 'This old option does not apply to mpv! It only applies to the native hydrus animation renderer!'
text += os.linesep
text += 'Hydrus video rendering is CPU intensive.'
text += os.linesep
text += 'If you have a lot of memory, you can set a generous potential video buffer to compensate.'
text += os.linesep
text += 'If the video buffer can hold an entire video, it only needs to be rendered once and will play and loop very smoothly.'
text += os.linesep
text += 'PROTIP: Do not go crazy here.'
st = ClientGUICommon.BetterStaticText( buffer_panel, text )
st.setWordWrap( True )
buffer_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'MB memory for video buffer: ', video_buffer_sizer ) )
gridbox = ClientGUICommon.WrapInGrid( buffer_panel, rows )
buffer_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, buffer_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
rows = []
rows.append( ( 'Forced system:limit for all searches: ', self._forced_search_limit ) )
gridbox = ClientGUICommon.WrapInGrid( misc_panel, rows )
misc_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, misc_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
vbox.addStretch( 1 )
self.setLayout( vbox )
#
self._image_cache_storage_limit_percentage.valueChanged.connect( self.EventImageCacheUpdate )
self._image_cache_prefetch_limit_percentage.valueChanged.connect( self.EventImageCacheUpdate )
self.EventImageCacheUpdate()
self.EventThumbnailsUpdate( self._thumbnail_cache_size.value() )
self.EventImageTilesUpdate()
self.EventVideoBufferUpdate( self._video_buffer_size_mb.value() )
def EventImageCacheUpdate( self ):
cache_size = self._fullscreen_cache_size.value() * 1048576
display_size = ClientGUIFunctions.GetDisplaySize( self )
estimated_bytes_per_fullscreen = 3 * display_size.width() * display_size.height()
estimate = cache_size // estimated_bytes_per_fullscreen
self._estimated_number_fullscreens.setText( '(about {}-{} images the size of your screen)'.format( HydrusData.ToHumanInt( estimate // 2 ), HydrusData.ToHumanInt( estimate * 2 ) ) )
num_pixels = cache_size * ( self._image_cache_storage_limit_percentage.value() / 100 ) / 3
unit_square = num_pixels / ( 16 * 9 )
unit_length = unit_square ** 0.5
resolution = ( int( 16 * unit_length ), int( 9 * unit_length ) )
self._image_cache_storage_limit_percentage_st.setText( 'about a {} image'.format( HydrusData.ConvertResolutionToPrettyString( resolution ) ) )
num_pixels = cache_size * ( self._image_cache_prefetch_limit_percentage.value() / 100 ) / 3
unit_square = num_pixels / ( 16 * 9 )
unit_length = unit_square ** 0.5
resolution = ( int( 16 * unit_length ), int( 9 * unit_length ) )
self._image_cache_prefetch_limit_percentage_st.setText( 'about a {} image'.format( HydrusData.ConvertResolutionToPrettyString( resolution ) ) )
def EventImageTilesUpdate( self ):
value = self._image_tile_cache_size.GetValue()
display_size = ClientGUIFunctions.GetDisplaySize( self )
estimated_bytes_per_fullscreen = 3 * display_size.width() * display_size.height()
estimate = value // estimated_bytes_per_fullscreen
self._estimated_number_image_tiles.setText( '(about {} fullscreens)'.format( HydrusData.ToHumanInt( estimate ) ) )
def EventThumbnailsUpdate( self, value ):
( thumbnail_width, thumbnail_height ) = HC.options[ 'thumbnail_dimensions' ]
res_string = HydrusData.ConvertResolutionToPrettyString( ( thumbnail_width, thumbnail_height ) )
estimated_bytes_per_thumb = 3 * thumbnail_width * thumbnail_height
estimated_thumbs = ( value * 1024 * 1024 ) // estimated_bytes_per_thumb
self._estimated_number_thumbnails.setText( '(at '+res_string+', about '+HydrusData.ToHumanInt(estimated_thumbs)+' thumbnails)' )
def EventVideoBufferUpdate( self, value ):
estimated_720p_frames = int( ( value * 1024 * 1024 ) // ( 1280 * 720 * 3 ) )
self._estimated_number_video_frames.setText( '(about '+HydrusData.ToHumanInt(estimated_720p_frames)+' frames of 720p video)' )
def UpdateOptions( self ):
HC.options[ 'thumbnail_cache_size' ] = self._thumbnail_cache_size.value() * 1048576
HC.options[ 'fullscreen_cache_size' ] = self._fullscreen_cache_size.value() * 1048576
self._new_options.SetInteger( 'image_tile_cache_size', self._image_tile_cache_size.GetValue() )
self._new_options.SetInteger( 'thumbnail_cache_timeout', self._thumbnail_cache_timeout.GetValue() )
self._new_options.SetInteger( 'image_cache_timeout', self._image_cache_timeout.GetValue() )
self._new_options.SetInteger( 'image_tile_cache_timeout', self._image_tile_cache_timeout.GetValue() )
self._new_options.SetInteger( 'ideal_tile_dimension', self._ideal_tile_dimension.value() )
self._new_options.SetInteger( 'media_viewer_prefetch_delay_base_ms', self._media_viewer_prefetch_delay_base_ms.value() )
self._new_options.SetInteger( 'media_viewer_prefetch_num_previous', self._media_viewer_prefetch_num_previous.value() )
self._new_options.SetInteger( 'media_viewer_prefetch_num_next', self._media_viewer_prefetch_num_next.value() )
self._new_options.SetInteger( 'image_cache_storage_limit_percentage', self._image_cache_storage_limit_percentage.value() )
self._new_options.SetInteger( 'image_cache_prefetch_limit_percentage', self._image_cache_prefetch_limit_percentage.value() )
self._new_options.SetInteger( 'video_buffer_size_mb', self._video_buffer_size_mb.value() )
self._new_options.SetNoneableInteger( 'forced_search_limit', self._forced_search_limit.GetValue() )
class _StylePanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
self._qt_style_name = ClientGUICommon.BetterChoice( self )
self._qt_stylesheet_name = ClientGUICommon.BetterChoice( self )
self._qt_style_name.addItem( 'use default ("{}")'.format( ClientGUIStyle.ORIGINAL_STYLE_NAME ), None )
try:
for name in ClientGUIStyle.GetAvailableStyles():
self._qt_style_name.addItem( name, name )
except HydrusExceptions.DataMissing as e:
HydrusData.ShowException( e )
self._qt_stylesheet_name.addItem( 'use default', None )
try:
for name in ClientGUIStyle.GetAvailableStylesheets():
self._qt_stylesheet_name.addItem( name, name )
except HydrusExceptions.DataMissing as e:
HydrusData.ShowException( e )
#
self._qt_style_name.SetValue( self._new_options.GetNoneableString( 'qt_style_name' ) )
self._qt_stylesheet_name.SetValue( self._new_options.GetNoneableString( 'qt_stylesheet_name' ) )
#
vbox = QP.VBoxLayout()
#
text = 'The current styles are what your Qt has available, the stylesheets are what .css and .qss files are currently in install_dir/static/qss.'
st = ClientGUICommon.BetterStaticText( self, label = text )
QP.AddToLayout( vbox, st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'Qt style:', self._qt_style_name ) )
rows.append( ( 'Qt stylesheet:', self._qt_stylesheet_name ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self.setLayout( vbox )
self._qt_style_name.currentIndexChanged.connect( self.StyleChanged )
self._qt_stylesheet_name.currentIndexChanged.connect( self.StyleChanged )
def StyleChanged( self ):
qt_style_name = self._qt_style_name.GetValue()
qt_stylesheet_name = self._qt_stylesheet_name.GetValue()
try:
if qt_style_name is None:
ClientGUIStyle.SetStyleFromName( ClientGUIStyle.ORIGINAL_STYLE_NAME )
else:
ClientGUIStyle.SetStyleFromName( qt_style_name )
except Exception as e:
QW.QMessageBox.critical( self, 'Critical', 'Could not apply style: {}'.format( str( e ) ) )
try:
if qt_stylesheet_name is None:
ClientGUIStyle.ClearStylesheet()
else:
ClientGUIStyle.SetStylesheetFromPath( qt_stylesheet_name )
except Exception as e:
QW.QMessageBox.critical( self, 'Critical', 'Could not apply stylesheet: {}'.format( str( e ) ) )
def UpdateOptions( self ):
self._new_options.SetNoneableString( 'qt_style_name', self._qt_style_name.GetValue() )
self._new_options.SetNoneableString( 'qt_stylesheet_name', self._qt_stylesheet_name.GetValue() )
class _SystemPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
sleep_panel = ClientGUICommon.StaticBox( self, 'system sleep' )
self._wake_delay_period = QP.MakeQSpinBox( sleep_panel, min = 0, max = 60 )
tt = 'It sometimes takes a few seconds for your network adapter to reconnect after a wake. This adds a grace period after a detected wake-from-sleep to allow your OS to sort that out before Hydrus starts making requests.'
self._wake_delay_period.setToolTip( tt )
self._file_system_waits_on_wakeup = QW.QCheckBox( sleep_panel )
self._file_system_waits_on_wakeup.setToolTip( 'This is useful if your hydrus is stored on a NAS that takes a few seconds to get going after your machine resumes from sleep.' )
#
self._wake_delay_period.setValue( self._new_options.GetInteger( 'wake_delay_period' ) )
self._file_system_waits_on_wakeup.setChecked( self._new_options.GetBoolean( 'file_system_waits_on_wakeup' ) )
#
rows = []
rows.append( ( 'After a wake from system sleep, wait this many seconds before allowing new network access:', self._wake_delay_period ) )
rows.append( ( 'Include the file system in this wait: ', self._file_system_waits_on_wakeup ) )
gridbox = ClientGUICommon.WrapInGrid( sleep_panel, rows )
sleep_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, sleep_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetInteger( 'wake_delay_period', self._wake_delay_period.value() )
self._new_options.SetBoolean( 'file_system_waits_on_wakeup', self._file_system_waits_on_wakeup.isChecked() )
class _SystemTrayPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
self._always_show_system_tray_icon = QW.QCheckBox( self )
self._minimise_client_to_system_tray = QW.QCheckBox( self )
self._close_client_to_system_tray = QW.QCheckBox( self )
self._start_client_in_system_tray = QW.QCheckBox( self )
#
self._always_show_system_tray_icon.setChecked( self._new_options.GetBoolean( 'always_show_system_tray_icon' ) )
self._minimise_client_to_system_tray.setChecked( self._new_options.GetBoolean( 'minimise_client_to_system_tray' ) )
self._close_client_to_system_tray.setChecked( self._new_options.GetBoolean( 'close_client_to_system_tray' ) )
self._start_client_in_system_tray.setChecked( self._new_options.GetBoolean( 'start_client_in_system_tray' ) )
#
vbox = QP.VBoxLayout()
rows = []
rows.append( ( 'Always show the hydrus system tray icon: ', self._always_show_system_tray_icon ) )
rows.append( ( 'Minimise the main window to system tray: ', self._minimise_client_to_system_tray ) )
rows.append( ( 'Close the main window to system tray: ', self._close_client_to_system_tray ) )
rows.append( ( 'Start the client minimised to system tray: ', self._start_client_in_system_tray ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
from hydrus.client.gui import ClientGUISystemTray
if not ClientGUISystemTray.SystemTrayAvailable():
QP.AddToLayout( vbox, ClientGUICommon.BetterStaticText( self, 'Unfortunately, your system does not seem to have a supported system tray.' ), CC.FLAGS_EXPAND_PERPENDICULAR )
self._always_show_system_tray_icon.setEnabled( False )
self._minimise_client_to_system_tray.setEnabled( False )
self._close_client_to_system_tray.setEnabled( False )
self._start_client_in_system_tray.setEnabled( False )
elif not HC.PLATFORM_WINDOWS:
if not HG.client_controller.new_options.GetBoolean( 'advanced_mode' ):
label = 'This is turned off for non-advanced non-Windows users for now.'
self._always_show_system_tray_icon.setEnabled( False )
self._minimise_client_to_system_tray.setEnabled( False )
self._close_client_to_system_tray.setEnabled( False )
self._start_client_in_system_tray.setEnabled( False )
else:
label = 'This can be buggy/crashy on non-Windows, hydev will keep working on this.'
QP.AddToLayout( vbox, ClientGUICommon.BetterStaticText( self, label ), CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetBoolean( 'always_show_system_tray_icon', self._always_show_system_tray_icon.isChecked() )
self._new_options.SetBoolean( 'minimise_client_to_system_tray', self._minimise_client_to_system_tray.isChecked() )
self._new_options.SetBoolean( 'close_client_to_system_tray', self._close_client_to_system_tray.isChecked() )
self._new_options.SetBoolean( 'start_client_in_system_tray', self._start_client_in_system_tray.isChecked() )
class _TagsPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
general_panel = ClientGUICommon.StaticBox( self, 'general tag options' )
self._default_tag_sort = ClientGUITagSorting.TagSortControl( general_panel, self._new_options.GetDefaultTagSort(), show_siblings = True )
self._default_tag_repository = ClientGUICommon.BetterChoice( general_panel )
self._default_tag_service_search_page = ClientGUICommon.BetterChoice( general_panel )
self._expand_parents_on_storage_taglists = QW.QCheckBox( general_panel )
self._expand_parents_on_storage_autocomplete_taglists = QW.QCheckBox( general_panel )
self._ac_select_first_with_count = QW.QCheckBox( general_panel )
#
favourites_panel = ClientGUICommon.StaticBox( self, 'favourite tags' )
desc = 'These tags will appear in your tag autocomplete results area, under the \'favourites\' tab.'
favourites_st = ClientGUICommon.BetterStaticText( favourites_panel, desc )
favourites_st.setWordWrap( True )
self._favourites = ClientGUIListBoxes.ListBoxTagsStringsAddRemove( favourites_panel, CC.COMBINED_TAG_SERVICE_KEY, ClientTags.TAG_DISPLAY_STORAGE )
self._favourites_input = ClientGUIACDropdown.AutoCompleteDropdownTagsWrite( favourites_panel, self._favourites.AddTags, CC.LOCAL_FILE_SERVICE_KEY, CC.COMBINED_TAG_SERVICE_KEY, show_paste_button = True )
#
self._default_tag_service_search_page.addItem( 'all known tags', CC.COMBINED_TAG_SERVICE_KEY )
services = HG.client_controller.services_manager.GetServices( HC.REAL_TAG_SERVICES )
for service in services:
self._default_tag_repository.addItem( service.GetName(), service.GetServiceKey() )
self._default_tag_service_search_page.addItem( service.GetName(), service.GetServiceKey() )
default_tag_repository_key = HC.options[ 'default_tag_repository' ]
self._default_tag_repository.SetValue( default_tag_repository_key )
self._default_tag_service_search_page.SetValue( new_options.GetKey( 'default_tag_service_search_page' ) )
self._expand_parents_on_storage_taglists.setChecked( self._new_options.GetBoolean( 'expand_parents_on_storage_taglists' ) )
self._expand_parents_on_storage_taglists.setToolTip( 'This affects taglists in places like the manage tags dialog, where you edit tags as they actually are, and implied parents hang below tags.' )
self._expand_parents_on_storage_autocomplete_taglists.setChecked( self._new_options.GetBoolean( 'expand_parents_on_storage_autocomplete_taglists' ) )
self._expand_parents_on_storage_autocomplete_taglists.setToolTip( 'This affects the autocomplete results taglist.' )
self._ac_select_first_with_count.setChecked( self._new_options.GetBoolean( 'ac_select_first_with_count' ) )
#
self._favourites.SetTags( new_options.GetStringList( 'favourite_tags' ) )
#
vbox = QP.VBoxLayout()
rows = []
rows.append( ( 'Default tag service in manage tag dialogs: ', self._default_tag_repository ) )
rows.append( ( 'Default tag service in search pages: ', self._default_tag_service_search_page ) )
rows.append( ( 'Default tag sort: ', self._default_tag_sort ) )
rows.append( ( 'Show parents expanded by default on edit/write taglists: ', self._expand_parents_on_storage_taglists ) )
rows.append( ( 'Show parents expanded by default on edit/write autocomplete taglists: ', self._expand_parents_on_storage_autocomplete_taglists ) )
rows.append( ( 'By default, select the first tag result with actual count in write-autocomplete: ', self._ac_select_first_with_count ) )
gridbox = ClientGUICommon.WrapInGrid( general_panel, rows )
general_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, general_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
favourites_panel.Add( favourites_st, CC.FLAGS_EXPAND_PERPENDICULAR )
favourites_panel.Add( self._favourites, CC.FLAGS_EXPAND_BOTH_WAYS )
favourites_panel.Add( self._favourites_input )
QP.AddToLayout( vbox, favourites_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
#
self.setLayout( vbox )
def UpdateOptions( self ):
HC.options[ 'default_tag_repository' ] = self._default_tag_repository.GetValue()
self._new_options.SetDefaultTagSort( self._default_tag_sort.GetValue() )
self._new_options.SetBoolean( 'expand_parents_on_storage_taglists', self._expand_parents_on_storage_taglists.isChecked() )
self._new_options.SetBoolean( 'expand_parents_on_storage_autocomplete_taglists', self._expand_parents_on_storage_autocomplete_taglists.isChecked() )
self._new_options.SetBoolean( 'ac_select_first_with_count', self._ac_select_first_with_count.isChecked() )
self._new_options.SetKey( 'default_tag_service_search_page', self._default_tag_service_search_page.GetValue() )
#
self._new_options.SetStringList( 'favourite_tags', list( self._favourites.GetTags() ) )
class _TagPresentationPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
tag_summary_generator = self._new_options.GetTagSummaryGenerator( 'thumbnail_top' )
self._thumbnail_top = ClientGUITags.TagSummaryGeneratorButton( self, tag_summary_generator )
tag_summary_generator = self._new_options.GetTagSummaryGenerator( 'thumbnail_bottom_right' )
self._thumbnail_bottom_right = ClientGUITags.TagSummaryGeneratorButton( self, tag_summary_generator )
tag_summary_generator = self._new_options.GetTagSummaryGenerator( 'media_viewer_top' )
self._media_viewer_top = ClientGUITags.TagSummaryGeneratorButton( self, tag_summary_generator )
#
render_panel = ClientGUICommon.StaticBox( self, 'namespace rendering' )
render_st = ClientGUICommon.BetterStaticText( render_panel, label = 'Namespaced tags are stored and directly edited in hydrus as "namespace:subtag", but most presentation windows can display them differently.' )
self._show_namespaces = QW.QCheckBox( render_panel )
self._namespace_connector = QW.QLineEdit( render_panel )
self._replace_tag_underscores_with_spaces = QW.QCheckBox( render_panel )
#
namespace_colours_panel = ClientGUICommon.StaticBox( self, 'namespace colours' )
self._namespace_colours = ClientGUIListBoxes.ListBoxTagsColourOptions( namespace_colours_panel, HC.options[ 'namespace_colours' ] )
self._edit_namespace_colour = QW.QPushButton( 'edit selected', namespace_colours_panel )
self._edit_namespace_colour.clicked.connect( self.EventEditNamespaceColour )
self._new_namespace_colour = QW.QLineEdit( namespace_colours_panel )
self._new_namespace_colour.installEventFilter( ClientGUICommon.TextCatchEnterEventFilter( self._new_namespace_colour, self.AddNamespaceColour ) )
#
self._show_namespaces.setChecked( new_options.GetBoolean( 'show_namespaces' ) )
self._namespace_connector.setText( new_options.GetString( 'namespace_connector' ) )
self._replace_tag_underscores_with_spaces.setChecked( new_options.GetBoolean( 'replace_tag_underscores_with_spaces' ) )
#
namespace_colours_panel.Add( self._namespace_colours, CC.FLAGS_EXPAND_BOTH_WAYS )
namespace_colours_panel.Add( self._new_namespace_colour, CC.FLAGS_EXPAND_PERPENDICULAR )
namespace_colours_panel.Add( self._edit_namespace_colour, CC.FLAGS_EXPAND_PERPENDICULAR )
#
vbox = QP.VBoxLayout()
#
rows = []
rows.append( ( 'On thumbnail top:', self._thumbnail_top ) )
rows.append( ( 'On thumbnail bottom-right:', self._thumbnail_bottom_right ) )
rows.append( ( 'On media viewer top:', self._media_viewer_top ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
rows = []
rows.append( ( 'Show namespaces: ', self._show_namespaces ) )
rows.append( ( 'If shown, namespace connecting string: ', self._namespace_connector ) )
rows.append( ( 'EXPERIMENTAL: Replace all underscores with spaces: ', self._replace_tag_underscores_with_spaces ) )
gridbox = ClientGUICommon.WrapInGrid( render_panel, rows )
render_panel.Add( render_st, CC.FLAGS_EXPAND_PERPENDICULAR )
render_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, render_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
QP.AddToLayout( vbox, namespace_colours_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
#
self.setLayout( vbox )
def EventEditNamespaceColour( self ):
results = self._namespace_colours.GetSelectedNamespaceColours()
for ( namespace, ( r, g, b ) ) in list( results.items() ):
colour = QG.QColor( r, g, b )
colour = QW.QColorDialog.getColor( colour, self, 'Namespace colour', QW.QColorDialog.ShowAlphaChannel )
if colour.isValid():
self._namespace_colours.SetNamespaceColour( namespace, colour )
def AddNamespaceColour( self ):
namespace = self._new_namespace_colour.text()
if namespace != '':
self._namespace_colours.SetNamespaceColour( namespace, QG.QColor( random.randint(0,255), random.randint(0,255), random.randint(0,255) ) )
self._new_namespace_colour.clear()
def UpdateOptions( self ):
self._new_options.SetTagSummaryGenerator( 'thumbnail_top', self._thumbnail_top.GetValue() )
self._new_options.SetTagSummaryGenerator( 'thumbnail_bottom_right', self._thumbnail_bottom_right.GetValue() )
self._new_options.SetTagSummaryGenerator( 'media_viewer_top', self._media_viewer_top.GetValue() )
self._new_options.SetBoolean( 'show_namespaces', self._show_namespaces.isChecked() )
self._new_options.SetString( 'namespace_connector', self._namespace_connector.text() )
self._new_options.SetBoolean( 'replace_tag_underscores_with_spaces', self._replace_tag_underscores_with_spaces.isChecked() )
HC.options[ 'namespace_colours' ] = self._namespace_colours.GetNamespaceColours()
class _TagSuggestionsPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
suggested_tags_panel = ClientGUICommon.StaticBox( self, 'suggested tags' )
self._suggested_tags_width = QP.MakeQSpinBox( suggested_tags_panel, min=20, max=65535 )
self._suggested_tags_layout = ClientGUICommon.BetterChoice( suggested_tags_panel )
self._suggested_tags_layout.addItem( 'notebook', 'notebook' )
self._suggested_tags_layout.addItem( 'side-by-side', 'columns' )
suggest_tags_panel_notebook = QW.QTabWidget( suggested_tags_panel )
#
suggested_tags_favourites_panel = QW.QWidget( suggest_tags_panel_notebook )
suggested_tags_favourites_panel.setMinimumWidth( 400 )
self._suggested_favourites_services = ClientGUICommon.BetterChoice( suggested_tags_favourites_panel )
tag_services = HG.client_controller.services_manager.GetServices( HC.REAL_TAG_SERVICES )
for tag_service in tag_services:
self._suggested_favourites_services.addItem( tag_service.GetName(), tag_service.GetServiceKey() )
self._suggested_favourites = ClientGUIListBoxes.ListBoxTagsStringsAddRemove( suggested_tags_favourites_panel, CC.COMBINED_TAG_SERVICE_KEY, ClientTags.TAG_DISPLAY_STORAGE )
self._current_suggested_favourites_service = None
self._suggested_favourites_dict = {}
self._suggested_favourites_input = ClientGUIACDropdown.AutoCompleteDropdownTagsWrite( suggested_tags_favourites_panel, self._suggested_favourites.AddTags, CC.LOCAL_FILE_SERVICE_KEY, CC.COMBINED_TAG_SERVICE_KEY, show_paste_button = True )
#
suggested_tags_related_panel = QW.QWidget( suggest_tags_panel_notebook )
self._show_related_tags = QW.QCheckBox( suggested_tags_related_panel )
self._related_tags_search_1_duration_ms = QP.MakeQSpinBox( suggested_tags_related_panel, min=50, max=60000 )
self._related_tags_search_2_duration_ms = QP.MakeQSpinBox( suggested_tags_related_panel, min=50, max=60000 )
self._related_tags_search_3_duration_ms = QP.MakeQSpinBox( suggested_tags_related_panel, min=50, max=60000 )
#
suggested_tags_file_lookup_script_panel = QW.QWidget( suggest_tags_panel_notebook )
self._show_file_lookup_script_tags = QW.QCheckBox( suggested_tags_file_lookup_script_panel )
self._favourite_file_lookup_script = ClientGUICommon.BetterChoice( suggested_tags_file_lookup_script_panel )
script_names = sorted( HG.client_controller.Read( 'serialisable_names', HydrusSerialisable.SERIALISABLE_TYPE_PARSE_ROOT_FILE_LOOKUP ) )
for name in script_names:
self._favourite_file_lookup_script.addItem( name, name )
#
suggested_tags_recent_panel = QW.QWidget( suggest_tags_panel_notebook )
self._num_recent_tags = ClientGUICommon.NoneableSpinCtrl( suggested_tags_recent_panel, 'number of recent tags to show', min = 1, none_phrase = 'do not show' )
#
self._suggested_tags_width.setValue( self._new_options.GetInteger( 'suggested_tags_width' ) )
self._suggested_tags_layout.SetValue( self._new_options.GetNoneableString( 'suggested_tags_layout' ) )
self._show_related_tags.setChecked( self._new_options.GetBoolean( 'show_related_tags' ) )
self._related_tags_search_1_duration_ms.setValue( self._new_options.GetInteger( 'related_tags_search_1_duration_ms' ) )
self._related_tags_search_2_duration_ms.setValue( self._new_options.GetInteger( 'related_tags_search_2_duration_ms' ) )
self._related_tags_search_3_duration_ms.setValue( self._new_options.GetInteger( 'related_tags_search_3_duration_ms' ) )
self._show_file_lookup_script_tags.setChecked( self._new_options.GetBoolean( 'show_file_lookup_script_tags' ) )
self._favourite_file_lookup_script.SetValue( self._new_options.GetNoneableString( 'favourite_file_lookup_script' ) )
self._num_recent_tags.SetValue( self._new_options.GetNoneableInteger( 'num_recent_tags' ) )
#
panel_vbox = QP.VBoxLayout()
QP.AddToLayout( panel_vbox, self._suggested_favourites_services, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( panel_vbox, self._suggested_favourites, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( panel_vbox, self._suggested_favourites_input, CC.FLAGS_EXPAND_PERPENDICULAR )
suggested_tags_favourites_panel.setLayout( panel_vbox )
#
panel_vbox = QP.VBoxLayout()
rows = []
rows.append( ( 'Show related tags on single-file manage tags windows: ', self._show_related_tags ) )
rows.append( ( 'Initial search duration (ms): ', self._related_tags_search_1_duration_ms ) )
rows.append( ( 'Medium search duration (ms): ', self._related_tags_search_2_duration_ms ) )
rows.append( ( 'Thorough search duration (ms): ', self._related_tags_search_3_duration_ms ) )
gridbox = ClientGUICommon.WrapInGrid( suggested_tags_related_panel, rows )
desc = 'This will search the database for statistically related tags based on what your focused file already has.'
QP.AddToLayout( panel_vbox, ClientGUICommon.BetterStaticText(suggested_tags_related_panel,desc), CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( panel_vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
suggested_tags_related_panel.setLayout( panel_vbox )
#
panel_vbox = QP.VBoxLayout()
rows = []
rows.append( ( 'Show file lookup scripts on single-file manage tags windows: ', self._show_file_lookup_script_tags ) )
rows.append( ( 'Favourite file lookup script: ', self._favourite_file_lookup_script ) )
gridbox = ClientGUICommon.WrapInGrid( suggested_tags_file_lookup_script_panel, rows )
QP.AddToLayout( panel_vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
suggested_tags_file_lookup_script_panel.setLayout( panel_vbox )
#
panel_vbox = QP.VBoxLayout()
QP.AddToLayout( panel_vbox, self._num_recent_tags, CC.FLAGS_EXPAND_PERPENDICULAR )
panel_vbox.addStretch( 1 )
suggested_tags_recent_panel.setLayout( panel_vbox )
#
suggest_tags_panel_notebook.addTab( suggested_tags_favourites_panel, 'favourites' )
suggest_tags_panel_notebook.addTab( suggested_tags_related_panel, 'related' )
suggest_tags_panel_notebook.addTab( suggested_tags_file_lookup_script_panel, 'file lookup scripts' )
suggest_tags_panel_notebook.addTab( suggested_tags_recent_panel, 'recent' )
#
rows = []
rows.append( ( 'Width of suggested tags columns: ', self._suggested_tags_width ) )
rows.append( ( 'Column layout: ', self._suggested_tags_layout ) )
gridbox = ClientGUICommon.WrapInGrid( suggested_tags_panel, rows )
desc = 'The manage tags dialog can provide several kinds of tag suggestions. For simplicity, most are turned off by default.'
suggested_tags_panel.Add( ClientGUICommon.BetterStaticText( suggested_tags_panel, desc ), CC.FLAGS_EXPAND_PERPENDICULAR )
suggested_tags_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
suggested_tags_panel.Add( suggest_tags_panel_notebook, CC.FLAGS_EXPAND_BOTH_WAYS )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, suggested_tags_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
#
self._suggested_favourites_services.currentIndexChanged.connect( self.EventSuggestedFavouritesService )
self.EventSuggestedFavouritesService( None )
def _SaveCurrentSuggestedFavourites( self ):
if self._current_suggested_favourites_service is not None:
self._suggested_favourites_dict[ self._current_suggested_favourites_service ] = self._suggested_favourites.GetTags()
def EventSuggestedFavouritesService( self, index ):
self._SaveCurrentSuggestedFavourites()
self._current_suggested_favourites_service = self._suggested_favourites_services.GetValue()
if self._current_suggested_favourites_service in self._suggested_favourites_dict:
favourites = self._suggested_favourites_dict[ self._current_suggested_favourites_service ]
else:
favourites = self._new_options.GetSuggestedTagsFavourites( self._current_suggested_favourites_service )
self._suggested_favourites.SetTagServiceKey( self._current_suggested_favourites_service )
self._suggested_favourites.SetTags( favourites )
self._suggested_favourites_input.SetTagServiceKey( self._current_suggested_favourites_service )
self._suggested_favourites_input.SetDisplayTagServiceKey( self._current_suggested_favourites_service )
def UpdateOptions( self ):
self._new_options.SetInteger( 'suggested_tags_width', self._suggested_tags_width.value() )
self._new_options.SetNoneableString( 'suggested_tags_layout', self._suggested_tags_layout.GetValue() )
self._SaveCurrentSuggestedFavourites()
for ( service_key, favourites ) in list(self._suggested_favourites_dict.items()):
self._new_options.SetSuggestedTagsFavourites( service_key, favourites )
self._new_options.SetBoolean( 'show_related_tags', self._show_related_tags.isChecked() )
self._new_options.SetInteger( 'related_tags_search_1_duration_ms', self._related_tags_search_1_duration_ms.value() )
self._new_options.SetInteger( 'related_tags_search_2_duration_ms', self._related_tags_search_2_duration_ms.value() )
self._new_options.SetInteger( 'related_tags_search_3_duration_ms', self._related_tags_search_3_duration_ms.value() )
self._new_options.SetBoolean( 'show_file_lookup_script_tags', self._show_file_lookup_script_tags.isChecked() )
self._new_options.SetNoneableString( 'favourite_file_lookup_script', self._favourite_file_lookup_script.GetValue() )
self._new_options.SetNoneableInteger( 'num_recent_tags', self._num_recent_tags.GetValue() )
class _ThumbnailsPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
self._thumbnail_width = QP.MakeQSpinBox( self, min=20, max=2048 )
self._thumbnail_height = QP.MakeQSpinBox( self, min=20, max=2048 )
self._thumbnail_border = QP.MakeQSpinBox( self, min=0, max=20 )
self._thumbnail_margin = QP.MakeQSpinBox( self, min=0, max=20 )
self._video_thumbnail_percentage_in = QP.MakeQSpinBox( self, min=0, max=100 )
self._thumbnail_scroll_rate = QW.QLineEdit( self )
self._thumbnail_fill = QW.QCheckBox( self )
self._thumbnail_visibility_scroll_percent = QP.MakeQSpinBox( self, min=1, max=99 )
self._thumbnail_visibility_scroll_percent.setToolTip( 'Lower numbers will cause fewer scrolls, higher numbers more.' )
self._media_background_bmp_path = QP.FilePickerCtrl( self )
#
( thumbnail_width, thumbnail_height ) = HC.options[ 'thumbnail_dimensions' ]
self._thumbnail_width.setValue( thumbnail_width )
self._thumbnail_height.setValue( thumbnail_height )
self._thumbnail_border.setValue( self._new_options.GetInteger( 'thumbnail_border' ) )
self._thumbnail_margin.setValue( self._new_options.GetInteger( 'thumbnail_margin' ) )
self._video_thumbnail_percentage_in.setValue( self._new_options.GetInteger( 'video_thumbnail_percentage_in' ) )
self._thumbnail_scroll_rate.setText( self._new_options.GetString( 'thumbnail_scroll_rate' ) )
self._thumbnail_fill.setChecked( self._new_options.GetBoolean( 'thumbnail_fill' ) )
self._thumbnail_visibility_scroll_percent.setValue( self._new_options.GetInteger( 'thumbnail_visibility_scroll_percent' ) )
media_background_bmp_path = self._new_options.GetNoneableString( 'media_background_bmp_path' )
if media_background_bmp_path is not None:
self._media_background_bmp_path.SetPath( media_background_bmp_path )
#
rows = []
rows.append( ( 'Thumbnail width: ', self._thumbnail_width ) )
rows.append( ( 'Thumbnail height: ', self._thumbnail_height ) )
rows.append( ( 'Thumbnail border: ', self._thumbnail_border ) )
rows.append( ( 'Thumbnail margin: ', self._thumbnail_margin ) )
rows.append( ( 'Generate video thumbnails this % in: ', self._video_thumbnail_percentage_in ) )
rows.append( ( 'Do not scroll down on key navigation if thumbnail at least this % visible: ', self._thumbnail_visibility_scroll_percent ) )
rows.append( ( 'EXPERIMENTAL: Scroll thumbnails at this rate per scroll tick: ', self._thumbnail_scroll_rate ) )
rows.append( ( 'EXPERIMENTAL: Zoom thumbnails so they \'fill\' their space: ', self._thumbnail_fill ) )
rows.append( ( 'EXPERIMENTAL: Image path for thumbnail panel background image (set blank to clear): ', self._media_background_bmp_path ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self.setLayout( vbox )
def UpdateOptions( self ):
new_thumbnail_dimensions = [self._thumbnail_width.value(), self._thumbnail_height.value()]
HC.options[ 'thumbnail_dimensions' ] = new_thumbnail_dimensions
self._new_options.SetInteger( 'thumbnail_border', self._thumbnail_border.value() )
self._new_options.SetInteger( 'thumbnail_margin', self._thumbnail_margin.value() )
self._new_options.SetInteger( 'video_thumbnail_percentage_in', self._video_thumbnail_percentage_in.value() )
try:
thumbnail_scroll_rate = self._thumbnail_scroll_rate.text()
float( thumbnail_scroll_rate )
self._new_options.SetString( 'thumbnail_scroll_rate', thumbnail_scroll_rate )
except:
pass
self._new_options.SetBoolean( 'thumbnail_fill', self._thumbnail_fill.isChecked() )
self._new_options.SetInteger( 'thumbnail_visibility_scroll_percent', self._thumbnail_visibility_scroll_percent.value() )
media_background_bmp_path = self._media_background_bmp_path.GetPath()
if media_background_bmp_path == '':
media_background_bmp_path = None
self._new_options.SetNoneableString( 'media_background_bmp_path', media_background_bmp_path )
def CommitChanges( self ):
for page in self._listbook.GetActivePages():
page.UpdateOptions()
try:
HG.client_controller.WriteSynchronous( 'save_options', HC.options )
HG.client_controller.WriteSynchronous( 'serialisable', self._new_options )
except:
QW.QMessageBox.critical( self, 'Error', traceback.format_exc() )
class ManageURLsPanel( ClientGUIScrolledPanels.ManagePanel ):
def __init__( self, parent, media ):
ClientGUIScrolledPanels.ManagePanel.__init__( self, parent )
media = ClientMedia.FlattenMedia( media )
self._current_media = [ m.Duplicate() for m in media ]
self._multiple_files_warning = ClientGUICommon.BetterStaticText( self, label = 'Warning: you are editing urls for multiple files!\nBe very careful about adding URLs here, as they will apply to everything.\nAdding the same URL to multiple files is only appropriate for gallery-type URLs!' )
self._multiple_files_warning.setObjectName( 'HydrusWarning' )
if len( self._current_media ) == 1:
self._multiple_files_warning.hide()
self._urls_listbox = QW.QListWidget( self )
self._urls_listbox.setSortingEnabled( True )
self._urls_listbox.setSelectionMode( QW.QAbstractItemView.ExtendedSelection )
self._urls_listbox.itemDoubleClicked.connect( self.EventListDoubleClick )
self._listbox_event_filter = QP.WidgetEventFilter( self._urls_listbox )
self._listbox_event_filter.EVT_KEY_DOWN( self.EventListKeyDown )
( width, height ) = ClientGUIFunctions.ConvertTextToPixels( self._urls_listbox, ( 120, 10 ) )
self._urls_listbox.setMinimumWidth( width )
self._urls_listbox.setMinimumHeight( height )
self._url_input = QW.QLineEdit( self )
self._url_input.installEventFilter( ClientGUICommon.TextCatchEnterEventFilter( self._url_input, self.AddURL ) )
self._copy_button = ClientGUICommon.BetterButton( self, 'copy all', self._Copy )
self._paste_button = ClientGUICommon.BetterButton( self, 'paste', self._Paste )
self._urls_to_add = set()
self._urls_to_remove = set()
#
self._pending_content_updates = []
self._current_urls_count = collections.Counter()
self._UpdateList()
#
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._copy_button, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._paste_button, CC.FLAGS_CENTER_PERPENDICULAR )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._multiple_files_warning, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._urls_listbox, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._url_input, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, hbox, CC.FLAGS_ON_RIGHT )
self.widget().setLayout( vbox )
self._my_shortcut_handler = ClientGUIShortcuts.ShortcutsHandler( self, [ 'global', 'media', 'main_gui' ] )
ClientGUIFunctions.SetFocusLater( self._url_input )
def _Copy( self ):
urls = sorted( self._current_urls_count.keys() )
text = os.linesep.join( urls )
HG.client_controller.pub( 'clipboard', 'text', text )
def _EnterURL( self, url, only_add = False ):
normalised_url = HG.client_controller.network_engine.domain_manager.NormaliseURL( url )
addee_media = set()
for m in self._current_media:
locations_manager = m.GetLocationsManager()
if normalised_url not in locations_manager.GetURLs():
addee_media.add( m )
if len( addee_media ) > 0:
addee_hashes = { m.GetHash() for m in addee_media }
content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_ADD, ( ( normalised_url, ), addee_hashes ) )
for m in addee_media:
m.GetMediaResult().ProcessContentUpdate( CC.COMBINED_LOCAL_FILE_SERVICE_KEY, content_update )
self._pending_content_updates.append( content_update )
#
self._UpdateList()
def _Paste( self ):
try:
raw_text = HG.client_controller.GetClipboardText()
except HydrusExceptions.DataMissing as e:
QW.QMessageBox.warning( self, 'Warning', str(e) )
return
try:
for url in HydrusText.DeserialiseNewlinedTexts( raw_text ):
if url != '':
self._EnterURL( url, only_add = True )
except Exception as e:
QW.QMessageBox.warning( self, 'Warning', 'I could not understand what was in the clipboard: {}'.format( e ) )
def _RemoveURL( self, url ):
removee_media = set()
for m in self._current_media:
locations_manager = m.GetLocationsManager()
if url in locations_manager.GetURLs():
removee_media.add( m )
if len( removee_media ) > 0:
removee_hashes = { m.GetHash() for m in removee_media }
content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_DELETE, ( ( url, ), removee_hashes ) )
for m in removee_media:
m.GetMediaResult().ProcessContentUpdate( CC.COMBINED_LOCAL_FILE_SERVICE_KEY, content_update )
self._pending_content_updates.append( content_update )
#
self._UpdateList()
def _SetSearchFocus( self ):
self._url_input.setFocus( QC.Qt.OtherFocusReason )
def _UpdateList( self ):
self._urls_listbox.clear()
self._current_urls_count = collections.Counter()
for m in self._current_media:
locations_manager = m.GetLocationsManager()
for url in locations_manager.GetURLs():
self._current_urls_count[ url ] += 1
for ( url, count ) in self._current_urls_count.items():
if len( self._current_media ) == 1:
label = url
else:
label = '{} ({})'.format( url, count )
item = QW.QListWidgetItem()
item.setText( label )
item.setData( QC.Qt.UserRole, url )
self._urls_listbox.addItem( item )
def EventListDoubleClick( self, item ):
urls = [ QP.GetClientData( self._urls_listbox, selection.row() ) for selection in list( self._urls_listbox.selectedIndexes() ) ]
for url in urls:
self._RemoveURL( url )
if len( urls ) == 1:
url = urls[0]
self._url_input.setText( url )
def EventListKeyDown( self, event ):
( modifier, key ) = ClientGUIShortcuts.ConvertKeyEventToSimpleTuple( event )
if key in ClientGUIShortcuts.DELETE_KEYS_QT:
urls = [ QP.GetClientData( self._urls_listbox, selection.row() ) for selection in list( self._urls_listbox.selectedIndexes() ) ]
for url in urls:
self._RemoveURL( url )
else:
return True # was: event.ignore()
def AddURL( self ):
url = self._url_input.text()
if url == '':
self.parentWidget().DoOK()
else:
try:
self._EnterURL( url )
self._url_input.clear()
except Exception as e:
QW.QMessageBox.warning( self, 'Warning', 'I could not add that URL: {}'.format( e ) )
def CommitChanges( self ):
if len( self._pending_content_updates ) > 0:
service_keys_to_content_updates = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY : self._pending_content_updates }
HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
def ProcessApplicationCommand( self, command: CAC.ApplicationCommand ):
command_processed = True
if command.IsSimpleCommand():
action = command.GetSimpleAction()
if action == CAC.SIMPLE_MANAGE_FILE_URLS:
self._OKParent()
elif action == CAC.SIMPLE_SET_SEARCH_FOCUS:
self._SetSearchFocus()
else:
command_processed = False
else:
command_processed = False
return command_processed
class RepairFileSystemPanel( ClientGUIScrolledPanels.ManagePanel ):
def __init__( self, parent, missing_locations ):
ClientGUIScrolledPanels.ManagePanel.__init__( self, parent )
self._only_thumbs = True
self._incorrect_locations = {}
self._correct_locations = {}
for ( incorrect_location, prefix ) in missing_locations:
self._incorrect_locations[ prefix ] = incorrect_location
if prefix.startswith( 'f' ):
self._only_thumbs = False
text = 'This dialog has launched because some expected file storage directories were not found. This is a serious error. You have two options:'
text += os.linesep * 2
text += '1) If you know what these should be (e.g. you recently remapped their external drive to another location), update the paths here manually. For most users, this will be clicking _add a possibly correct location_ and then select the new folder where the subdirectories all went. You can repeat this if your folders are missing in multiple locations. Check everything reports _ok!_'
text += os.linesep * 2
text += 'Although it is best if you can find everything, you only _have_ to fix the subdirectories starting with \'f\', which store your original files. Those starting \'t\' and \'r\' are for your thumbnails, which can be regenerated with a bit of work.'
text += os.linesep * 2
text += 'Then hit \'apply\', and the client will launch. You should double-check all your locations under database->migrate database immediately.'
text += os.linesep * 2
text += '2) If the locations are not available, or you do not know what they should be, or you wish to fix this outside of the program, hit \'cancel\' to gracefully cancel client boot. Feel free to contact hydrus dev for help.'
if self._only_thumbs:
text += os.linesep * 2
text += 'SPECIAL NOTE FOR YOUR SITUATION: The only paths missing are thumbnail paths. If you cannot recover these folders, you can hit apply to create empty paths at the original or corrected locations and then run a maintenance routine to regenerate the thumbnails from their originals.'
st = ClientGUICommon.BetterStaticText( self, text )
st.setWordWrap( True )
self._locations = ClientGUIListCtrl.BetterListCtrl( self, CGLC.COLUMN_LIST_REPAIR_LOCATIONS.ID, 12, self._ConvertPrefixToListCtrlTuples, activation_callback = self._SetLocations )
self._set_button = ClientGUICommon.BetterButton( self, 'set correct location', self._SetLocations )
self._add_button = ClientGUICommon.BetterButton( self, 'add a possibly correct location (let the client figure out what it contains)', self._AddLocation )
# add a button here for 'try to fill them in for me'. you give it a dir, and it tries to figure out and fill in the prefixes for you
#
self._locations.AddDatas( [ prefix for ( incorrect_location, prefix ) in missing_locations ] )
self._locations.Sort()
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._locations, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._set_button, CC.FLAGS_ON_RIGHT )
QP.AddToLayout( vbox, self._add_button, CC.FLAGS_ON_RIGHT )
self.widget().setLayout( vbox )
def _AddLocation( self ):
with QP.DirDialog( self, 'Select the potential correct location.' ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
path = dlg.GetPath()
for prefix in self._locations.GetData():
ok = os.path.exists( os.path.join( path, prefix ) )
if ok:
self._correct_locations[ prefix ] = ( path, ok )
self._locations.UpdateDatas()
def _ConvertPrefixToListCtrlTuples( self, prefix ):
incorrect_location = self._incorrect_locations[ prefix ]
if prefix in self._correct_locations:
( correct_location, ok ) = self._correct_locations[ prefix ]
if ok:
pretty_ok = 'ok!'
else:
pretty_ok = 'not found'
else:
correct_location = ''
ok = None
pretty_ok = ''
pretty_incorrect_location = incorrect_location
pretty_prefix = prefix
pretty_correct_location = correct_location
display_tuple = ( pretty_incorrect_location, pretty_prefix, pretty_correct_location, pretty_ok )
sort_tuple = ( incorrect_location, prefix, correct_location, ok )
return ( display_tuple, sort_tuple )
def _GetValue( self ):
correct_rows = []
thumb_problems = False
for prefix in self._locations.GetData():
incorrect_location = self._incorrect_locations[ prefix ]
if prefix not in self._correct_locations:
if prefix.startswith( 'f' ):
raise HydrusExceptions.VetoException( 'You did not correct all the file locations!' )
else:
thumb_problems = True
correct_location = incorrect_location
else:
( correct_location, ok ) = self._correct_locations[ prefix ]
if not ok:
if prefix.startswith( 'f' ):
raise HydrusExceptions.VetoException( 'You did not find all the correct file locations!' )
else:
thumb_problems = True
correct_rows.append( ( prefix, correct_location ) )
return ( correct_rows, thumb_problems )
def _SetLocations( self ):
prefixes = self._locations.GetData( only_selected = True )
if len( prefixes ) > 0:
with QP.DirDialog( self, 'Select correct location.' ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
path = dlg.GetPath()
for prefix in prefixes:
ok = os.path.exists( os.path.join( path, prefix ) )
self._correct_locations[ prefix ] = ( path, ok )
self._locations.UpdateDatas()
def CheckValid( self ):
# raises veto if invalid
self._GetValue()
def CommitChanges( self ):
( correct_rows, thumb_problems ) = self._GetValue()
HG.client_controller.WriteSynchronous( 'repair_client_files', correct_rows )
def UserIsOKToOK( self ):
( correct_rows, thumb_problems ) = self._GetValue()
if thumb_problems:
message = 'Some or all of your incorrect paths have not been corrected, but they are all thumbnail paths.'
message += os.linesep * 2
message += 'Would you like instead to create new empty subdirectories at the previous (or corrected, if you have entered them) locations?'
message += os.linesep * 2
message += 'You can run database->regenerate->thumbnails to fill them up again.'
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return False
return True
| 51.109224 | 396 | 0.61604 | import collections
import os
import random
import re
import traceback
from qtpy import QtCore as QC
from qtpy import QtWidgets as QW
from qtpy import QtGui as QG
from hydrus.core import HydrusConstants as HC
from hydrus.core import HydrusData
from hydrus.core import HydrusExceptions
from hydrus.core import HydrusGlobals as HG
from hydrus.core import HydrusPaths
from hydrus.core import HydrusSerialisable
from hydrus.core import HydrusTags
from hydrus.core import HydrusText
from hydrus.client import ClientApplicationCommand as CAC
from hydrus.client import ClientConstants as CC
from hydrus.client.gui import ClientGUIDialogs
from hydrus.client.gui import ClientGUIDialogsQuick
from hydrus.client.gui import ClientGUIFunctions
from hydrus.client.gui import ClientGUIImport
from hydrus.client.gui import ClientGUIScrolledPanels
from hydrus.client.gui import ClientGUIScrolledPanelsEdit
from hydrus.client.gui import ClientGUIShortcuts
from hydrus.client.gui import ClientGUIStyle
from hydrus.client.gui import ClientGUITags
from hydrus.client.gui import ClientGUITagSorting
from hydrus.client.gui import ClientGUITime
from hydrus.client.gui import ClientGUITopLevelWindowsPanels
from hydrus.client.gui import QtPorting as QP
from hydrus.client.gui.lists import ClientGUIListBoxes
from hydrus.client.gui.lists import ClientGUIListConstants as CGLC
from hydrus.client.gui.lists import ClientGUIListCtrl
from hydrus.client.gui.pages import ClientGUIResultsSortCollect
from hydrus.client.gui.search import ClientGUIACDropdown
from hydrus.client.gui.search import ClientGUISearch
from hydrus.client.gui.widgets import ClientGUICommon
from hydrus.client.gui.widgets import ClientGUIControls
from hydrus.client.media import ClientMedia
from hydrus.client.metadata import ClientTags
from hydrus.client.networking import ClientNetworkingSessions
class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
def __init__( self, parent ):
ClientGUIScrolledPanels.ManagePanel.__init__( self, parent )
self._new_options = HG.client_controller.new_options
self._listbook = ClientGUICommon.ListBook( self )
self._listbook.AddPage( 'gui', 'gui', self._GUIPanel( self._listbook ) )
self._listbook.AddPage( 'gui pages', 'gui pages', self._GUIPagesPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'connection', 'connection', self._ConnectionPanel( self._listbook ) )
self._listbook.AddPage( 'external programs', 'external programs', self._ExternalProgramsPanel( self._listbook ) )
self._listbook.AddPage( 'files and trash', 'files and trash', self._FilesAndTrashPanel( self._listbook ) )
self._listbook.AddPage( 'file viewing statistics', 'file viewing statistics', self._FileViewingStatisticsPanel( self._listbook ) )
self._listbook.AddPage( 'speed and memory', 'speed and memory', self._SpeedAndMemoryPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'maintenance and processing', 'maintenance and processing', self._MaintenanceAndProcessingPanel( self._listbook ) )
self._listbook.AddPage( 'media', 'media', self._MediaPanel( self._listbook ) )
self._listbook.AddPage( 'audio', 'audio', self._AudioPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'system tray', 'system tray', self._SystemTrayPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'search', 'search', self._SearchPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'colours', 'colours', self._ColoursPanel( self._listbook ) )
self._listbook.AddPage( 'popups', 'popups', self._PopupPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'regex favourites', 'regex favourites', self._RegexPanel( self._listbook ) )
self._listbook.AddPage( 'sort/collect', 'sort/collect', self._SortCollectPanel( self._listbook ) )
self._listbook.AddPage( 'downloading', 'downloading', self._DownloadingPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'duplicates', 'duplicates', self._DuplicatesPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'importing', 'importing', self._ImportingPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'style', 'style', self._StylePanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'tag presentation', 'tag presentation', self._TagPresentationPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'tag suggestions', 'tag suggestions', self._TagSuggestionsPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'tags', 'tags', self._TagsPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'thumbnails', 'thumbnails', self._ThumbnailsPanel( self._listbook, self._new_options ) )
self._listbook.AddPage( 'system', 'system', self._SystemPanel( self._listbook, self._new_options ) )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._listbook, CC.FLAGS_EXPAND_BOTH_WAYS )
self.widget().setLayout( vbox )
class _AudioPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
self._preview_uses_its_own_audio_volume = QW.QCheckBox( self )
self._has_audio_label = QW.QLineEdit( self )
tt = 'If unchecked, this media canvas will use the \'global\' audio volume slider. If checked, this media canvas will have its own separate one.'
tt += os.linesep * 2
tt += 'Keep this on if you would like the preview viewer to be quieter than the main media viewer.'
self._preview_uses_its_own_audio_volume.setChecked( self._new_options.GetBoolean( 'preview_uses_its_own_audio_volume' ) )
self._preview_uses_its_own_audio_volume.setToolTip( tt )
self._has_audio_label.setText( self._new_options.GetString( 'has_audio_label' ) )
vbox = QP.VBoxLayout()
rows = []
rows.append( ( 'The preview window has its own volume: ', self._preview_uses_its_own_audio_volume ) )
rows.append( ( 'Label for files with audio: ', self._has_audio_label ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetBoolean( 'preview_uses_its_own_audio_volume', self._preview_uses_its_own_audio_volume.isChecked() )
self._new_options.SetString( 'has_audio_label', self._has_audio_label.text() )
class _ColoursPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._new_options = HG.client_controller.new_options
coloursets_panel = ClientGUICommon.StaticBox( self, 'coloursets' )
self._current_colourset = ClientGUICommon.BetterChoice( coloursets_panel )
self._current_colourset.addItem( 'default', 'default' )
self._current_colourset.addItem( 'darkmode', 'darkmode' )
self._current_colourset.SetValue( self._new_options.GetString( 'current_colourset' ) )
self._notebook = QW.QTabWidget( coloursets_panel )
self._gui_colours = {}
for colourset in ( 'default', 'darkmode' ):
self._gui_colours[ colourset ] = {}
colour_panel = QW.QWidget( self._notebook )
colour_types = []
colour_types.append( CC.COLOUR_THUMB_BACKGROUND )
colour_types.append( CC.COLOUR_THUMB_BACKGROUND_SELECTED )
colour_types.append( CC.COLOUR_THUMB_BACKGROUND_REMOTE )
colour_types.append( CC.COLOUR_THUMB_BACKGROUND_REMOTE_SELECTED )
colour_types.append( CC.COLOUR_THUMB_BORDER )
colour_types.append( CC.COLOUR_THUMB_BORDER_SELECTED )
colour_types.append( CC.COLOUR_THUMB_BORDER_REMOTE )
colour_types.append( CC.COLOUR_THUMB_BORDER_REMOTE_SELECTED )
colour_types.append( CC.COLOUR_THUMBGRID_BACKGROUND )
colour_types.append( CC.COLOUR_AUTOCOMPLETE_BACKGROUND )
colour_types.append( CC.COLOUR_MEDIA_BACKGROUND )
colour_types.append( CC.COLOUR_MEDIA_TEXT )
colour_types.append( CC.COLOUR_TAGS_BOX )
for colour_type in colour_types:
ctrl = ClientGUICommon.BetterColourControl( colour_panel )
ctrl.setMaximumWidth( 20 )
ctrl.SetColour( self._new_options.GetColour( colour_type, colourset ) )
self._gui_colours[ colourset ][ colour_type ] = ctrl
rows = []
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BACKGROUND], CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BACKGROUND_SELECTED], CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BACKGROUND_REMOTE], CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BACKGROUND_REMOTE_SELECTED], CC.FLAGS_CENTER_PERPENDICULAR )
rows.append( ( 'thumbnail background (local: normal/selected, remote: normal/selected): ', hbox ) )
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BORDER], CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BORDER_SELECTED], CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BORDER_REMOTE], CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._gui_colours[colourset][CC.COLOUR_THUMB_BORDER_REMOTE_SELECTED], CC.FLAGS_CENTER_PERPENDICULAR )
rows.append( ( 'thumbnail border (local: normal/selected, remote: normal/selected): ', hbox ) )
rows.append( ( 'thumbnail grid background: ', self._gui_colours[ colourset ][ CC.COLOUR_THUMBGRID_BACKGROUND ] ) )
rows.append( ( 'autocomplete background: ', self._gui_colours[ colourset ][ CC.COLOUR_AUTOCOMPLETE_BACKGROUND ] ) )
rows.append( ( 'media viewer background: ', self._gui_colours[ colourset ][ CC.COLOUR_MEDIA_BACKGROUND ] ) )
rows.append( ( 'media viewer text: ', self._gui_colours[ colourset ][ CC.COLOUR_MEDIA_TEXT ] ) )
rows.append( ( 'tags box background: ', self._gui_colours[ colourset ][ CC.COLOUR_TAGS_BOX ] ) )
gridbox = ClientGUICommon.WrapInGrid( colour_panel, rows )
colour_panel.setLayout( gridbox )
select = colourset == 'default'
self._notebook.addTab( colour_panel, colourset )
if select: self._notebook.setCurrentWidget( colour_panel )
coloursets_panel.Add( ClientGUICommon.WrapInText( self._current_colourset, coloursets_panel, 'current colourset: ' ), CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
coloursets_panel.Add( self._notebook, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, coloursets_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
for colourset in self._gui_colours:
for ( colour_type, ctrl ) in list(self._gui_colours[ colourset ].items()):
colour = ctrl.GetColour()
self._new_options.SetColour( colour_type, colourset, colour )
self._new_options.SetString( 'current_colourset', self._current_colourset.GetValue() )
class _ConnectionPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._new_options = HG.client_controller.new_options
general = ClientGUICommon.StaticBox( self, 'general' )
self._verify_regular_https = QW.QCheckBox( general )
if self._new_options.GetBoolean( 'advanced_mode' ):
network_timeout_min = 1
network_timeout_max = 86400 * 30
error_wait_time_min = 1
error_wait_time_max = 86400 * 30
max_network_jobs_max = 1000
max_network_jobs_per_domain_max = 100
else:
network_timeout_min = 3
network_timeout_max = 600
error_wait_time_min = 3
error_wait_time_max = 1800
max_network_jobs_max = 30
max_network_jobs_per_domain_max = 5
self._network_timeout = QP.MakeQSpinBox( general, min = network_timeout_min, max = network_timeout_max )
self._network_timeout.setToolTip( 'If a network connection cannot be made in this duration or, if once started, it experiences uninterrupted inactivity for six times this duration, it will be abandoned.' )
self._connection_error_wait_time = QP.MakeQSpinBox( general, min = error_wait_time_min, max = error_wait_time_max )
self._connection_error_wait_time.setToolTip( 'If a network connection times out as above, it will wait increasing multiples of this base time before retrying.' )
self._serverside_bandwidth_wait_time = QP.MakeQSpinBox( general, min = error_wait_time_min, max = error_wait_time_max )
self._serverside_bandwidth_wait_time.setToolTip( 'If a server returns a failure status code indicating it is short on bandwidth, the network job will wait increasing multiples of this base time before retrying.' )
self._domain_network_infrastructure_error_velocity = ClientGUITime.VelocityCtrl( general, 0, 100, 30, hours = True, minutes = True, seconds = True, per_phrase = 'within', unit = 'errors' )
self._max_network_jobs = QP.MakeQSpinBox( general, min = 1, max = max_network_jobs_max )
self._max_network_jobs_per_domain = QP.MakeQSpinBox( general, min = 1, max = max_network_jobs_per_domain_max )
proxy_panel = ClientGUICommon.StaticBox( self, 'proxy settings' )
self._http_proxy = ClientGUICommon.NoneableTextCtrl( proxy_panel )
self._https_proxy = ClientGUICommon.NoneableTextCtrl( proxy_panel )
self._no_proxy = ClientGUICommon.NoneableTextCtrl( proxy_panel )
self._verify_regular_https.setChecked( self._new_options.GetBoolean( 'verify_regular_https' ) )
self._http_proxy.SetValue( self._new_options.GetNoneableString( 'http_proxy' ) )
self._https_proxy.SetValue( self._new_options.GetNoneableString( 'https_proxy' ) )
self._no_proxy.SetValue( self._new_options.GetNoneableString( 'no_proxy' ) )
self._network_timeout.setValue( self._new_options.GetInteger( 'network_timeout' ) )
self._connection_error_wait_time.setValue( self._new_options.GetInteger( 'connection_error_wait_time' ) )
self._serverside_bandwidth_wait_time.setValue( self._new_options.GetInteger( 'serverside_bandwidth_wait_time' ) )
number = self._new_options.GetInteger( 'domain_network_infrastructure_error_number' )
time_delta = self._new_options.GetInteger( 'domain_network_infrastructure_error_time_delta' )
self._domain_network_infrastructure_error_velocity.SetValue( ( number, time_delta ) )
self._max_network_jobs.setValue( self._new_options.GetInteger( 'max_network_jobs' ) )
self._max_network_jobs_per_domain.setValue( self._new_options.GetInteger( 'max_network_jobs_per_domain' ) )
if self._new_options.GetBoolean( 'advanced_mode' ):
label = 'As you are in advanced mode, these options have very low and high limits. Be very careful about lowering delay time or raising max number of connections too far, as things will break.'
st = ClientGUICommon.BetterStaticText( general, label = label )
st.setObjectName( 'HydrusWarning' )
st.setWordWrap( True )
general.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'network timeout (seconds): ', self._network_timeout ) )
rows.append( ( 'connection error retry wait (seconds): ', self._connection_error_wait_time ) )
rows.append( ( 'serverside bandwidth retry wait (seconds): ', self._serverside_bandwidth_wait_time ) )
rows.append( ( 'Halt new jobs as long as this many network infrastructure errors on their domain (0 for never wait): ', self._domain_network_infrastructure_error_velocity ) )
rows.append( ( 'max number of simultaneous active network jobs: ', self._max_network_jobs ) )
rows.append( ( 'max number of simultaneous active network jobs per domain: ', self._max_network_jobs_per_domain ) )
rows.append( ( 'BUGFIX: verify regular https traffic:', self._verify_regular_https ) )
gridbox = ClientGUICommon.WrapInGrid( general, rows )
general.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
text = 'Enter strings such as "http://ip:port" or "http://user:pass@ip:port" to use for http and https traffic. It should take effect immediately on dialog ok.'
text += os.linesep * 2
text += 'NO PROXY DOES NOT WORK UNLESS YOU HAVE A CUSTOM BUILD OF REQUESTS, SORRY! no_proxy takes the form of comma-separated hosts/domains, just as in curl or the NO_PROXY environment variable. When http and/or https proxies are set, they will not be used for these.'
text += os.linesep * 2
if ClientNetworkingSessions.SOCKS_PROXY_OK:
text += 'It looks like you have socks support! You should also be able to enter (socks4 or) "socks5://ip:port".'
text += os.linesep
text += 'Use socks4a or socks5h to force remote DNS resolution, on the proxy server.'
else:
text += 'It does not look like you have socks support! If you want it, try adding "pysocks" (or "requests[socks]")!'
st = ClientGUICommon.BetterStaticText( proxy_panel, text )
st.setWordWrap( True )
proxy_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'http: ', self._http_proxy ) )
rows.append( ( 'https: ', self._https_proxy ) )
rows.append( ( 'no_proxy: ', self._no_proxy ) )
gridbox = ClientGUICommon.WrapInGrid( proxy_panel, rows )
proxy_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, general, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, proxy_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetBoolean( 'verify_regular_https', self._verify_regular_https.isChecked() )
self._new_options.SetNoneableString( 'http_proxy', self._http_proxy.GetValue() )
self._new_options.SetNoneableString( 'https_proxy', self._https_proxy.GetValue() )
self._new_options.SetNoneableString( 'no_proxy', self._no_proxy.GetValue() )
self._new_options.SetInteger( 'network_timeout', self._network_timeout.value() )
self._new_options.SetInteger( 'connection_error_wait_time', self._connection_error_wait_time.value() )
self._new_options.SetInteger( 'serverside_bandwidth_wait_time', self._serverside_bandwidth_wait_time.value() )
self._new_options.SetInteger( 'max_network_jobs', self._max_network_jobs.value() )
self._new_options.SetInteger( 'max_network_jobs_per_domain', self._max_network_jobs_per_domain.value() )
( number, time_delta ) = self._domain_network_infrastructure_error_velocity.GetValue()
self._new_options.SetInteger( 'domain_network_infrastructure_error_number', number )
self._new_options.SetInteger( 'domain_network_infrastructure_error_time_delta', time_delta )
class _DownloadingPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
gallery_downloader = ClientGUICommon.StaticBox( self, 'gallery downloader' )
gug_key_and_name = HG.client_controller.network_engine.domain_manager.GetDefaultGUGKeyAndName()
self._default_gug = ClientGUIImport.GUGKeyAndNameSelector( gallery_downloader, gug_key_and_name )
self._gallery_page_wait_period_pages = QP.MakeQSpinBox( gallery_downloader, min=1, max=120 )
self._gallery_file_limit = ClientGUICommon.NoneableSpinCtrl( gallery_downloader, none_phrase = 'no limit', min = 1, max = 1000000 )
self._highlight_new_query = QW.QCheckBox( gallery_downloader )
subscriptions = ClientGUICommon.StaticBox( self, 'subscriptions' )
self._gallery_page_wait_period_subscriptions = QP.MakeQSpinBox( subscriptions, min=1, max=30 )
self._max_simultaneous_subscriptions = QP.MakeQSpinBox( subscriptions, min=1, max=100 )
self._subscription_file_error_cancel_threshold = ClientGUICommon.NoneableSpinCtrl( subscriptions, min = 1, max = 1000000, unit = 'errors' )
self._subscription_file_error_cancel_threshold.setToolTip( 'This is a simple patch and will be replaced with a better "retry network errors later" system at some point, but is useful to increase if you have subs to unreliable websites.' )
self._process_subs_in_random_order = QW.QCheckBox( subscriptions )
self._process_subs_in_random_order.setToolTip( 'Processing in random order is useful whenever bandwidth is tight, as it stops an \'aardvark\' subscription from always getting first whack at what is available. Otherwise, they will be processed in alphabetical order.' )
checker_options = self._new_options.GetDefaultSubscriptionCheckerOptions()
self._subscription_checker_options = ClientGUIImport.CheckerOptionsButton( subscriptions, checker_options )
watchers = ClientGUICommon.StaticBox( self, 'watchers' )
self._watcher_page_wait_period = QP.MakeQSpinBox( watchers, min=1, max=120 )
self._highlight_new_watcher = QW.QCheckBox( watchers )
checker_options = self._new_options.GetDefaultWatcherCheckerOptions()
self._watcher_checker_options = ClientGUIImport.CheckerOptionsButton( watchers, checker_options )
misc = ClientGUICommon.StaticBox( self, 'misc' )
self._pause_character = QW.QLineEdit( misc )
self._stop_character = QW.QLineEdit( misc )
self._show_new_on_file_seed_short_summary = QW.QCheckBox( misc )
self._show_deleted_on_file_seed_short_summary = QW.QCheckBox( misc )
if self._new_options.GetBoolean( 'advanced_mode' ):
delay_min = 1
else:
delay_min = 600
self._subscription_network_error_delay = ClientGUITime.TimeDeltaButton( misc, min = delay_min, days = True, hours = True, minutes = True, seconds = True )
self._subscription_other_error_delay = ClientGUITime.TimeDeltaButton( misc, min = delay_min, days = True, hours = True, minutes = True, seconds = True )
self._downloader_network_error_delay = ClientGUITime.TimeDeltaButton( misc, min = delay_min, days = True, hours = True, minutes = True, seconds = True )
gallery_page_tt = 'Gallery page fetches are heavy requests with unusual fetch-time requirements. It is important they not wait too long, but it is also useful to throttle them:'
gallery_page_tt += os.linesep * 2
gallery_page_tt += '- So they do not compete with file downloads for bandwidth, leading to very unbalanced 20/4400-type queues.'
gallery_page_tt += os.linesep
gallery_page_tt += '- So you do not get 1000 items in your queue before realising you did not like that tag anyway.'
gallery_page_tt += os.linesep
gallery_page_tt += '- To give servers a break (some gallery pages can be CPU-expensive to generate).'
gallery_page_tt += os.linesep * 2
gallery_page_tt += 'These delays/lots are per-domain.'
gallery_page_tt += os.linesep * 2
gallery_page_tt += 'If you do not understand this stuff, you can just leave it alone.'
self._gallery_page_wait_period_pages.setValue( self._new_options.GetInteger( 'gallery_page_wait_period_pages' ) )
self._gallery_page_wait_period_pages.setToolTip( gallery_page_tt )
self._gallery_file_limit.SetValue( HC.options['gallery_file_limit'] )
self._highlight_new_query.setChecked( self._new_options.GetBoolean( 'highlight_new_query' ) )
self._gallery_page_wait_period_subscriptions.setValue( self._new_options.GetInteger( 'gallery_page_wait_period_subscriptions' ) )
self._gallery_page_wait_period_subscriptions.setToolTip( gallery_page_tt )
self._max_simultaneous_subscriptions.setValue( self._new_options.GetInteger( 'max_simultaneous_subscriptions' ) )
self._subscription_file_error_cancel_threshold.SetValue( self._new_options.GetNoneableInteger( 'subscription_file_error_cancel_threshold' ) )
self._process_subs_in_random_order.setChecked( self._new_options.GetBoolean( 'process_subs_in_random_order' ) )
self._pause_character.setText( self._new_options.GetString( 'pause_character' ) )
self._stop_character.setText( self._new_options.GetString( 'stop_character' ) )
self._show_new_on_file_seed_short_summary.setChecked( self._new_options.GetBoolean( 'show_new_on_file_seed_short_summary' ) )
self._show_deleted_on_file_seed_short_summary.setChecked( self._new_options.GetBoolean( 'show_deleted_on_file_seed_short_summary' ) )
self._watcher_page_wait_period.setValue( self._new_options.GetInteger( 'watcher_page_wait_period' ) )
self._watcher_page_wait_period.setToolTip( gallery_page_tt )
self._highlight_new_watcher.setChecked( self._new_options.GetBoolean( 'highlight_new_watcher' ) )
self._subscription_network_error_delay.SetValue( self._new_options.GetInteger( 'subscription_network_error_delay' ) )
self._subscription_other_error_delay.SetValue( self._new_options.GetInteger( 'subscription_other_error_delay' ) )
self._downloader_network_error_delay.SetValue( self._new_options.GetInteger( 'downloader_network_error_delay' ) )
rows = []
rows.append( ( 'Default download source:', self._default_gug ) )
rows.append( ( 'If new query entered and no current highlight, highlight the new query:', self._highlight_new_query ) )
rows.append( ( 'Additional fixed time (in seconds) to wait between gallery page fetches:', self._gallery_page_wait_period_pages ) )
rows.append( ( 'By default, stop searching once this many files are found:', self._gallery_file_limit ) )
gridbox = ClientGUICommon.WrapInGrid( gallery_downloader, rows )
gallery_downloader.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
rows = []
rows.append( ( 'Additional fixed time (in seconds) to wait between gallery page fetches:', self._gallery_page_wait_period_subscriptions ) )
rows.append( ( 'Maximum number of subscriptions that can sync simultaneously:', self._max_simultaneous_subscriptions ) )
rows.append( ( 'If a subscription has this many failed file imports, stop and continue later:', self._subscription_file_error_cancel_threshold ) )
rows.append( ( 'Sync subscriptions in random order:', self._process_subs_in_random_order ) )
gridbox = ClientGUICommon.WrapInGrid( subscriptions, rows )
subscriptions.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
subscriptions.Add( self._subscription_checker_options, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'Additional fixed time (in seconds) to wait between watcher checks:', self._watcher_page_wait_period ) )
rows.append( ( 'If new watcher entered and no current highlight, highlight the new watcher:', self._highlight_new_watcher ) )
gridbox = ClientGUICommon.WrapInGrid( watchers, rows )
watchers.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
watchers.Add( self._watcher_checker_options, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'Pause character:', self._pause_character ) )
rows.append( ( 'Stop character:', self._stop_character ) )
rows.append( ( 'Show a \'N\' (for \'new\') count on short file import summaries:', self._show_new_on_file_seed_short_summary ) )
rows.append( ( 'Show a \'D\' (for \'deleted\') count on short file import summaries:', self._show_deleted_on_file_seed_short_summary ) )
rows.append( ( 'Delay time on a gallery/watcher network error:', self._downloader_network_error_delay ) )
rows.append( ( 'Delay time on a subscription network error:', self._subscription_network_error_delay ) )
rows.append( ( 'Delay time on a subscription other error:', self._subscription_other_error_delay ) )
gridbox = ClientGUICommon.WrapInGrid( misc, rows )
misc.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, gallery_downloader, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, subscriptions, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, watchers, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, misc, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
HG.client_controller.network_engine.domain_manager.SetDefaultGUGKeyAndName( self._default_gug.GetValue() )
self._new_options.SetInteger( 'gallery_page_wait_period_pages', self._gallery_page_wait_period_pages.value() )
HC.options[ 'gallery_file_limit' ] = self._gallery_file_limit.GetValue()
self._new_options.SetBoolean( 'highlight_new_query', self._highlight_new_query.isChecked() )
self._new_options.SetInteger( 'gallery_page_wait_period_subscriptions', self._gallery_page_wait_period_subscriptions.value() )
self._new_options.SetInteger( 'max_simultaneous_subscriptions', self._max_simultaneous_subscriptions.value() )
self._new_options.SetNoneableInteger( 'subscription_file_error_cancel_threshold', self._subscription_file_error_cancel_threshold.GetValue() )
self._new_options.SetBoolean( 'process_subs_in_random_order', self._process_subs_in_random_order.isChecked() )
self._new_options.SetInteger( 'watcher_page_wait_period', self._watcher_page_wait_period.value() )
self._new_options.SetBoolean( 'highlight_new_watcher', self._highlight_new_watcher.isChecked() )
self._new_options.SetDefaultWatcherCheckerOptions( self._watcher_checker_options.GetValue() )
self._new_options.SetDefaultSubscriptionCheckerOptions( self._subscription_checker_options.GetValue() )
self._new_options.SetString( 'pause_character', self._pause_character.text() )
self._new_options.SetString( 'stop_character', self._stop_character.text() )
self._new_options.SetBoolean( 'show_new_on_file_seed_short_summary', self._show_new_on_file_seed_short_summary.isChecked() )
self._new_options.SetBoolean( 'show_deleted_on_file_seed_short_summary', self._show_deleted_on_file_seed_short_summary.isChecked() )
self._new_options.SetInteger( 'subscription_network_error_delay', self._subscription_network_error_delay.GetValue() )
self._new_options.SetInteger( 'subscription_other_error_delay', self._subscription_other_error_delay.GetValue() )
self._new_options.SetInteger( 'downloader_network_error_delay', self._downloader_network_error_delay.GetValue() )
class _DuplicatesPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
weights_panel = ClientGUICommon.StaticBox( self, 'duplicate filter comparison score weights' )
self._duplicate_comparison_score_higher_jpeg_quality = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_much_higher_jpeg_quality = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_higher_filesize = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_much_higher_filesize = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_higher_resolution = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_much_higher_resolution = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_more_tags = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_older = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_nicer_ratio = QP.MakeQSpinBox( weights_panel, min=-100, max=100 )
self._duplicate_comparison_score_nicer_ratio.setToolTip( 'For instance, 16:9 vs 640:357.')
self._duplicate_filter_max_batch_size = QP.MakeQSpinBox( self, min = 10, max = 1024 )
self._duplicate_comparison_score_higher_jpeg_quality.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_higher_jpeg_quality' ) )
self._duplicate_comparison_score_much_higher_jpeg_quality.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_much_higher_jpeg_quality' ) )
self._duplicate_comparison_score_higher_filesize.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_higher_filesize' ) )
self._duplicate_comparison_score_much_higher_filesize.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_much_higher_filesize' ) )
self._duplicate_comparison_score_higher_resolution.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_higher_resolution' ) )
self._duplicate_comparison_score_much_higher_resolution.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_much_higher_resolution' ) )
self._duplicate_comparison_score_more_tags.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_more_tags' ) )
self._duplicate_comparison_score_older.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_older' ) )
self._duplicate_comparison_score_nicer_ratio.setValue( self._new_options.GetInteger( 'duplicate_comparison_score_nicer_ratio' ) )
self._duplicate_filter_max_batch_size.setValue( self._new_options.GetInteger( 'duplicate_filter_max_batch_size' ) )
rows = []
rows.append( ( 'Score for jpeg with non-trivially higher jpeg quality:', self._duplicate_comparison_score_higher_jpeg_quality ) )
rows.append( ( 'Score for jpeg with significantly higher jpeg quality:', self._duplicate_comparison_score_much_higher_jpeg_quality ) )
rows.append( ( 'Score for file with non-trivially higher filesize:', self._duplicate_comparison_score_higher_filesize ) )
rows.append( ( 'Score for file with significantly higher filesize:', self._duplicate_comparison_score_much_higher_filesize ) )
rows.append( ( 'Score for file with higher resolution (as num pixels):', self._duplicate_comparison_score_higher_resolution ) )
rows.append( ( 'Score for file with significantly higher resolution (as num pixels):', self._duplicate_comparison_score_much_higher_resolution ) )
rows.append( ( 'Score for file with more tags:', self._duplicate_comparison_score_more_tags ) )
rows.append( ( 'Score for file with non-trivially earlier import time:', self._duplicate_comparison_score_older ) )
rows.append( ( 'Score for file with \'nicer\' resolution ratio:', self._duplicate_comparison_score_nicer_ratio ) )
gridbox = ClientGUICommon.WrapInGrid( weights_panel, rows )
label = 'When processing potential duplicate pairs in the duplicate filter, the client tries to present the \'best\' file first. It judges the two files on a variety of potential differences, each with a score. The file with the greatest total score is presented first. Here you can tinker with these scores.'
label += os.linesep * 2
label += 'I recommend you leave all these as positive numbers, but if you wish, you can set a negative number to reduce the score.'
st = ClientGUICommon.BetterStaticText( weights_panel, label )
st.setWordWrap( True )
weights_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
weights_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, weights_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'Max size of duplicate filter pair batches:', self._duplicate_filter_max_batch_size ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetInteger( 'duplicate_comparison_score_higher_jpeg_quality', self._duplicate_comparison_score_higher_jpeg_quality.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_much_higher_jpeg_quality', self._duplicate_comparison_score_much_higher_jpeg_quality.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_higher_filesize', self._duplicate_comparison_score_higher_filesize.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_much_higher_filesize', self._duplicate_comparison_score_much_higher_filesize.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_higher_resolution', self._duplicate_comparison_score_higher_resolution.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_much_higher_resolution', self._duplicate_comparison_score_much_higher_resolution.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_more_tags', self._duplicate_comparison_score_more_tags.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_older', self._duplicate_comparison_score_older.value() )
self._new_options.SetInteger( 'duplicate_comparison_score_nicer_ratio', self._duplicate_comparison_score_nicer_ratio.value() )
self._new_options.SetInteger( 'duplicate_filter_max_batch_size', self._duplicate_filter_max_batch_size.value() )
class _ExternalProgramsPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._new_options = HG.client_controller.new_options
mime_panel = ClientGUICommon.StaticBox( self, '\'open externally\' launch paths' )
self._web_browser_path = QW.QLineEdit( mime_panel )
self._mime_launch_listctrl = ClientGUIListCtrl.BetterListCtrl( mime_panel, CGLC.COLUMN_LIST_EXTERNAL_PROGRAMS.ID, 15, self._ConvertMimeToListCtrlTuples, activation_callback = self._EditMimeLaunch )
web_browser_path = self._new_options.GetNoneableString( 'web_browser_path' )
if web_browser_path is not None:
self._web_browser_path.setText( web_browser_path )
for mime in HC.SEARCHABLE_MIMES:
launch_path = self._new_options.GetMimeLaunch( mime )
self._mime_launch_listctrl.AddDatas( [ ( mime, launch_path ) ] )
self._mime_launch_listctrl.Sort()
vbox = QP.VBoxLayout()
text = 'Setting a specific web browser path here--like \'C:\\program files\\firefox\\firefox.exe "%path%"\'--can help with the \'share->open->in web browser\' command, which is buggy working with OS defaults, particularly on Windows. It also fixes #anchors, which are dropped in some OSes using default means. Use the same %path% format for the \'open externally\' commands below.'
st = ClientGUICommon.BetterStaticText( mime_panel, text )
st.setWordWrap( True )
mime_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'Manual web browser launch path: ', self._web_browser_path ) )
gridbox = ClientGUICommon.WrapInGrid( mime_panel, rows )
mime_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
mime_panel.Add( self._mime_launch_listctrl, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, mime_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
def _ConvertMimeToListCtrlTuples( self, data ):
( mime, launch_path ) = data
pretty_mime = HC.mime_string_lookup[ mime ]
if launch_path is None:
pretty_launch_path = 'default: {}'.format( HydrusPaths.GetDefaultLaunchPath() )
else:
pretty_launch_path = launch_path
display_tuple = ( pretty_mime, pretty_launch_path )
sort_tuple = display_tuple
return ( display_tuple, sort_tuple )
def _EditMimeLaunch( self ):
for ( mime, launch_path ) in self._mime_launch_listctrl.GetData( only_selected = True ):
message = 'Enter the new launch path for {}'.format( HC.mime_string_lookup[ mime ] )
message += os.linesep * 2
message += 'Hydrus will insert the file\'s full path wherever you put %path%, even multiple times!'
message += os.linesep * 2
message += 'Set as blank to reset to default.'
if launch_path is None:
default = 'program "%path%"'
else:
default = launch_path
with ClientGUIDialogs.DialogTextEntry( self, message, default = default, allow_blank = True ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
new_launch_path = dlg.GetValue()
if new_launch_path == '':
new_launch_path = None
if new_launch_path not in ( launch_path, default ):
self._mime_launch_listctrl.DeleteDatas( [ ( mime, launch_path ) ] )
self._mime_launch_listctrl.AddDatas( [ ( mime, new_launch_path ) ] )
else:
break
self._mime_launch_listctrl.Sort()
def UpdateOptions( self ):
web_browser_path = self._web_browser_path.text()
if web_browser_path == '':
web_browser_path = None
self._new_options.SetNoneableString( 'web_browser_path', web_browser_path )
for ( mime, launch_path ) in self._mime_launch_listctrl.GetData():
self._new_options.SetMimeLaunch( mime, launch_path )
class _FilesAndTrashPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._new_options = HG.client_controller.new_options
self._export_location = QP.DirPickerCtrl( self )
self._prefix_hash_when_copying = QW.QCheckBox( self )
self._prefix_hash_when_copying.setToolTip( 'If you often paste hashes into boorus, check this to automatically prefix with the type, like "md5:2496dabcbd69e3c56a5d8caabb7acde5".' )
self._delete_to_recycle_bin = QW.QCheckBox( self )
self._confirm_trash = QW.QCheckBox( self )
self._confirm_archive = QW.QCheckBox( self )
self._remove_filtered_files = QW.QCheckBox( self )
self._remove_trashed_files = QW.QCheckBox( self )
self._trash_max_age = ClientGUICommon.NoneableSpinCtrl( self, '', none_phrase = 'no age limit', min = 0, max = 8640 )
self._trash_max_size = ClientGUICommon.NoneableSpinCtrl( self, '', none_phrase = 'no size limit', min = 0, max = 20480 )
delete_lock_panel = ClientGUICommon.StaticBox( self, 'delete lock' )
self._delete_lock_for_archived_files = QW.QCheckBox( delete_lock_panel )
advanced_file_deletion_panel = ClientGUICommon.StaticBox( self, 'advanced file deletion and custom reasons' )
self._use_advanced_file_deletion_dialog = QW.QCheckBox( advanced_file_deletion_panel )
self._use_advanced_file_deletion_dialog.setToolTip( 'If this is set, the client will present a more complicated file deletion confirmation dialog that will permit you to set your own deletion reason and perform \'clean\' deletes that leave no deletion record (making later re-import easier).' )
self._remember_last_advanced_file_deletion_special_action = QW.QCheckBox( advanced_file_deletion_panel )
self._remember_last_advanced_file_deletion_special_action.setToolTip( 'This will try to remember and restore the last action you set, whether that was trash, physical delete, or physical delete and clear history.')
self._remember_last_advanced_file_deletion_reason = QW.QCheckBox( advanced_file_deletion_panel )
self._remember_last_advanced_file_deletion_reason.setToolTip( 'This will remember and restore the last reason you set for a delete.' )
self._advanced_file_deletion_reasons = ClientGUIListBoxes.QueueListBox( advanced_file_deletion_panel, 5, str, add_callable = self._AddAFDR, edit_callable = self._EditAFDR )
#
if HC.options[ 'export_path' ] is not None:
abs_path = HydrusPaths.ConvertPortablePathToAbsPath( HC.options[ 'export_path' ] )
if abs_path is not None:
self._export_location.SetPath( abs_path )
self._prefix_hash_when_copying.setChecked( self._new_options.GetBoolean( 'prefix_hash_when_copying' ) )
self._delete_to_recycle_bin.setChecked( HC.options[ 'delete_to_recycle_bin' ] )
self._confirm_trash.setChecked( HC.options[ 'confirm_trash' ] )
self._confirm_archive.setChecked( HC.options[ 'confirm_archive' ] )
self._remove_filtered_files.setChecked( HC.options[ 'remove_filtered_files' ] )
self._remove_trashed_files.setChecked( HC.options[ 'remove_trashed_files' ] )
self._trash_max_age.SetValue( HC.options[ 'trash_max_age' ] )
self._trash_max_size.SetValue( HC.options[ 'trash_max_size' ] )
self._delete_lock_for_archived_files.setChecked( self._new_options.GetBoolean( 'delete_lock_for_archived_files' ) )
self._use_advanced_file_deletion_dialog.setChecked( self._new_options.GetBoolean( 'use_advanced_file_deletion_dialog' ) )
self._use_advanced_file_deletion_dialog.clicked.connect( self._UpdateAdvancedControls )
self._remember_last_advanced_file_deletion_special_action.setChecked( HG.client_controller.new_options.GetBoolean( 'remember_last_advanced_file_deletion_special_action' ) )
self._remember_last_advanced_file_deletion_reason.setChecked( HG.client_controller.new_options.GetBoolean( 'remember_last_advanced_file_deletion_reason' ) )
self._advanced_file_deletion_reasons.AddDatas( self._new_options.GetStringList( 'advanced_file_deletion_reasons' ) )
self._UpdateAdvancedControls()
#
vbox = QP.VBoxLayout()
text = 'If you set the default export directory blank, the client will use \'hydrus_export\' under the current user\'s home directory.'
QP.AddToLayout( vbox, ClientGUICommon.BetterStaticText(self,text), CC.FLAGS_CENTER )
rows = []
rows.append( ( 'When copying a file hashes, prefix with booru-friendly hash type: ', self._prefix_hash_when_copying ) )
rows.append( ( 'Confirm sending files to trash: ', self._confirm_trash ) )
rows.append( ( 'Confirm sending more than one file to archive or inbox: ', self._confirm_archive ) )
rows.append( ( 'When deleting files or folders, send them to the OS\'s recycle bin: ', self._delete_to_recycle_bin ) )
rows.append( ( 'Remove files from view when they are filtered: ', self._remove_filtered_files ) )
rows.append( ( 'Remove files from view when they are sent to the trash: ', self._remove_trashed_files ) )
rows.append( ( 'Number of hours a file can be in the trash before being deleted: ', self._trash_max_age ) )
rows.append( ( 'Maximum size of trash (MB): ', self._trash_max_size ) )
rows.append( ( 'Default export directory: ', self._export_location ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
rows = []
rows.append( ( 'Do not permit archived files to be trashed or deleted: ', self._delete_lock_for_archived_files ) )
gridbox = ClientGUICommon.WrapInGrid( delete_lock_panel, rows )
delete_lock_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, delete_lock_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
rows = []
rows.append( ( 'Use the advanced file deletion dialog: ', self._use_advanced_file_deletion_dialog ) )
rows.append( ( 'Remember the last action: ', self._remember_last_advanced_file_deletion_special_action ) )
rows.append( ( 'Remember the last reason: ', self._remember_last_advanced_file_deletion_reason ) )
gridbox = ClientGUICommon.WrapInGrid( advanced_file_deletion_panel, rows )
advanced_file_deletion_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
advanced_file_deletion_panel.Add( self._advanced_file_deletion_reasons, CC.FLAGS_EXPAND_BOTH_WAYS )
#
QP.AddToLayout( vbox, advanced_file_deletion_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
def _AddAFDR( self ):
reason = 'I do not like the file.'
return self._EditAFDR( reason )
def _EditAFDR( self, reason ):
with ClientGUIDialogs.DialogTextEntry( self, 'enter the reason', default = reason, allow_blank = False ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
reason = dlg.GetValue()
return reason
else:
raise HydrusExceptions.VetoException()
def _UpdateAdvancedControls( self ):
advanced_enabled = self._use_advanced_file_deletion_dialog.isChecked()
self._remember_last_advanced_file_deletion_special_action.setEnabled( advanced_enabled )
self._remember_last_advanced_file_deletion_reason.setEnabled( advanced_enabled )
self._advanced_file_deletion_reasons.setEnabled( advanced_enabled )
def UpdateOptions( self ):
HC.options[ 'export_path' ] = HydrusPaths.ConvertAbsPathToPortablePath( self._export_location.GetPath() )
self._new_options.SetBoolean( 'prefix_hash_when_copying', self._prefix_hash_when_copying.isChecked() )
HC.options[ 'delete_to_recycle_bin' ] = self._delete_to_recycle_bin.isChecked()
HC.options[ 'confirm_trash' ] = self._confirm_trash.isChecked()
HC.options[ 'confirm_archive' ] = self._confirm_archive.isChecked()
HC.options[ 'remove_filtered_files' ] = self._remove_filtered_files.isChecked()
HC.options[ 'remove_trashed_files' ] = self._remove_trashed_files.isChecked()
HC.options[ 'trash_max_age' ] = self._trash_max_age.GetValue()
HC.options[ 'trash_max_size' ] = self._trash_max_size.GetValue()
self._new_options.SetBoolean( 'delete_lock_for_archived_files', self._delete_lock_for_archived_files.isChecked() )
self._new_options.SetBoolean( 'use_advanced_file_deletion_dialog', self._use_advanced_file_deletion_dialog.isChecked() )
self._new_options.SetStringList( 'advanced_file_deletion_reasons', self._advanced_file_deletion_reasons.GetData() )
HG.client_controller.new_options.SetBoolean( 'remember_last_advanced_file_deletion_special_action', self._remember_last_advanced_file_deletion_special_action.isChecked() )
HG.client_controller.new_options.SetBoolean( 'remember_last_advanced_file_deletion_reason', self._remember_last_advanced_file_deletion_reason.isChecked() )
class _FileViewingStatisticsPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._new_options = HG.client_controller.new_options
self._file_viewing_statistics_active = QW.QCheckBox( self )
self._file_viewing_statistics_active_on_dupe_filter = QW.QCheckBox( self )
self._file_viewing_statistics_media_min_time = ClientGUICommon.NoneableSpinCtrl( self )
self._file_viewing_statistics_media_max_time = ClientGUICommon.NoneableSpinCtrl( self )
self._file_viewing_statistics_preview_min_time = ClientGUICommon.NoneableSpinCtrl( self )
self._file_viewing_statistics_preview_max_time = ClientGUICommon.NoneableSpinCtrl( self )
self._file_viewing_stats_menu_display = ClientGUICommon.BetterChoice( self )
self._file_viewing_stats_menu_display.addItem( 'do not show', CC.FILE_VIEWING_STATS_MENU_DISPLAY_NONE )
self._file_viewing_stats_menu_display.addItem( 'show media', CC.FILE_VIEWING_STATS_MENU_DISPLAY_MEDIA_ONLY )
self._file_viewing_stats_menu_display.addItem( 'show media, and put preview in a submenu', CC.FILE_VIEWING_STATS_MENU_DISPLAY_MEDIA_AND_PREVIEW_IN_SUBMENU )
self._file_viewing_stats_menu_display.addItem( 'show media and preview in two lines', CC.FILE_VIEWING_STATS_MENU_DISPLAY_MEDIA_AND_PREVIEW_STACKED )
self._file_viewing_stats_menu_display.addItem( 'show media and preview combined', CC.FILE_VIEWING_STATS_MENU_DISPLAY_MEDIA_AND_PREVIEW_SUMMED )
#
self._file_viewing_statistics_active.setChecked( self._new_options.GetBoolean( 'file_viewing_statistics_active' ) )
self._file_viewing_statistics_active_on_dupe_filter.setChecked( self._new_options.GetBoolean( 'file_viewing_statistics_active_on_dupe_filter' ) )
self._file_viewing_statistics_media_min_time.SetValue( self._new_options.GetNoneableInteger( 'file_viewing_statistics_media_min_time' ) )
self._file_viewing_statistics_media_max_time.SetValue( self._new_options.GetNoneableInteger( 'file_viewing_statistics_media_max_time' ) )
self._file_viewing_statistics_preview_min_time.SetValue( self._new_options.GetNoneableInteger( 'file_viewing_statistics_preview_min_time' ) )
self._file_viewing_statistics_preview_max_time.SetValue( self._new_options.GetNoneableInteger( 'file_viewing_statistics_preview_max_time' ) )
self._file_viewing_stats_menu_display.SetValue( self._new_options.GetInteger( 'file_viewing_stats_menu_display' ) )
#
vbox = QP.VBoxLayout()
rows = []
rows.append( ( 'Enable file viewing statistics tracking?:', self._file_viewing_statistics_active ) )
rows.append( ( 'Enable file viewing statistics tracking on the duplicate filter?:', self._file_viewing_statistics_active_on_dupe_filter ) )
rows.append( ( 'Min time to view on media viewer to count as a view (seconds):', self._file_viewing_statistics_media_min_time ) )
rows.append( ( 'Cap any view on the media viewer to this maximum time (seconds):', self._file_viewing_statistics_media_max_time ) )
rows.append( ( 'Min time to view on preview viewer to count as a view (seconds):', self._file_viewing_statistics_preview_min_time ) )
rows.append( ( 'Cap any view on the preview viewer to this maximum time (seconds):', self._file_viewing_statistics_preview_max_time ) )
rows.append( ( 'Show media/preview viewing stats on media right-click menus?:', self._file_viewing_stats_menu_display ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetBoolean( 'file_viewing_statistics_active', self._file_viewing_statistics_active.isChecked() )
self._new_options.SetBoolean( 'file_viewing_statistics_active_on_dupe_filter', self._file_viewing_statistics_active_on_dupe_filter.isChecked() )
self._new_options.SetNoneableInteger( 'file_viewing_statistics_media_min_time', self._file_viewing_statistics_media_min_time.GetValue() )
self._new_options.SetNoneableInteger( 'file_viewing_statistics_media_max_time', self._file_viewing_statistics_media_max_time.GetValue() )
self._new_options.SetNoneableInteger( 'file_viewing_statistics_preview_min_time', self._file_viewing_statistics_preview_min_time.GetValue() )
self._new_options.SetNoneableInteger( 'file_viewing_statistics_preview_max_time', self._file_viewing_statistics_preview_max_time.GetValue() )
self._new_options.SetInteger( 'file_viewing_stats_menu_display', self._file_viewing_stats_menu_display.GetValue() )
class _GUIPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._main_gui_panel = ClientGUICommon.StaticBox( self, 'main window' )
self._app_display_name = QW.QLineEdit( self._main_gui_panel )
self._app_display_name.setToolTip( 'This is placed in every window title, with current version name. Rename if you want to personalise or differentiate.' )
self._confirm_client_exit = QW.QCheckBox( self._main_gui_panel )
self._activate_window_on_tag_search_page_activation = QW.QCheckBox( self._main_gui_panel )
tt = 'Middle-clicking one or more tags in a taglist will cause the creation of a new search page for those tags. If you do this from the media viewer or a child manage tags dialog, do you want to switch immediately to the main gui?'
self._activate_window_on_tag_search_page_activation.setToolTip( tt )
#
self._misc_panel = ClientGUICommon.StaticBox( self, 'misc' )
self._always_show_iso_time = QW.QCheckBox( self._misc_panel )
tt = 'In many places across the program (typically import status lists), the client will state a timestamp as "5 days ago". If you would prefer a standard ISO string, like "2018-03-01 12:40:23", check this.'
self._always_show_iso_time.setToolTip( tt )
self._human_bytes_sig_figs = QP.MakeQSpinBox( self._misc_panel, min = 1, max = 6 )
self._human_bytes_sig_figs.setToolTip( 'When the program presents a bytes size above 1KB, like 21.3KB or 4.11GB, how many total digits do we want in the number? 2 or 3 is best.')
self._discord_dnd_fix = QW.QCheckBox( self._misc_panel )
self._discord_dnd_fix.setToolTip( 'This makes small file drag-and-drops a little laggier in exchange for discord support.' )
self._discord_dnd_filename_pattern = QW.QLineEdit( self._misc_panel )
self._discord_dnd_filename_pattern.setToolTip( 'When discord DnD is enabled, this will use this export phrase to rename your files. If no filename can be generated, hash will be used instead.' )
self._secret_discord_dnd_fix = QW.QCheckBox( self._misc_panel )
self._secret_discord_dnd_fix.setToolTip( 'This saves the lag but is potentially dangerous, as it (may) treat the from-db-files-drag as a move rather than a copy and hence only works when the drop destination will not consume the files. It requires an additional secret Alternate key to unlock.' )
self._do_macos_debug_dialog_menus = QW.QCheckBox( self._misc_panel )
self._do_macos_debug_dialog_menus.setToolTip( 'There is a bug in Big Sur Qt regarding interacting with some menus in dialogs. The menus show but cannot be clicked. This shows the menu items in a debug dialog instead.' )
self._use_qt_file_dialogs = QW.QCheckBox( self._misc_panel )
self._use_qt_file_dialogs.setToolTip( 'If you get crashes opening file/directory dialogs, try this.' )
#
frame_locations_panel = ClientGUICommon.StaticBox( self, 'frame locations' )
self._frame_locations = ClientGUIListCtrl.BetterListCtrl( frame_locations_panel, CGLC.COLUMN_LIST_FRAME_LOCATIONS.ID, 15, data_to_tuples_func = lambda x: (self._GetPrettyFrameLocationInfo( x ), self._GetPrettyFrameLocationInfo( x )), activation_callback = self.EditFrameLocations )
self._frame_locations_edit_button = QW.QPushButton( 'edit', frame_locations_panel )
self._frame_locations_edit_button.clicked.connect( self.EditFrameLocations )
#
self._new_options = HG.client_controller.new_options
self._app_display_name.setText( self._new_options.GetString( 'app_display_name' ) )
self._confirm_client_exit.setChecked( HC.options[ 'confirm_client_exit' ] )
self._activate_window_on_tag_search_page_activation.setChecked( self._new_options.GetBoolean( 'activate_window_on_tag_search_page_activation' ) )
self._always_show_iso_time.setChecked( self._new_options.GetBoolean( 'always_show_iso_time' ) )
self._human_bytes_sig_figs.setValue( self._new_options.GetInteger( 'human_bytes_sig_figs' ) )
self._discord_dnd_fix.setChecked( self._new_options.GetBoolean( 'discord_dnd_fix' ) )
self._discord_dnd_filename_pattern.setText( self._new_options.GetString( 'discord_dnd_filename_pattern' ) )
self._secret_discord_dnd_fix.setChecked( self._new_options.GetBoolean( 'secret_discord_dnd_fix' ) )
self._do_macos_debug_dialog_menus.setChecked( self._new_options.GetBoolean( 'do_macos_debug_dialog_menus' ) )
self._use_qt_file_dialogs.setChecked( self._new_options.GetBoolean( 'use_qt_file_dialogs' ) )
for ( name, info ) in self._new_options.GetFrameLocations():
listctrl_list = QP.ListsToTuples( [ name ] + list( info ) )
self._frame_locations.AddDatas( ( listctrl_list, ) )
#self._frame_locations.SortListItems( col = 0 )
#
rows = []
rows.append( ( 'Application display name: ', self._app_display_name ) )
rows.append( ( 'Confirm client exit: ', self._confirm_client_exit ) )
rows.append( ( 'Switch to main window when opening tag search page from media viewer: ', self._activate_window_on_tag_search_page_activation ) )
gridbox = ClientGUICommon.WrapInGrid( self._main_gui_panel, rows )
self._main_gui_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
rows = []
rows.append( ( 'Prefer ISO time ("2018-03-01 12:40:23") to "5 days ago": ', self._always_show_iso_time ) )
rows.append( ( 'BUGFIX: Discord file drag-and-drop fix (works for <=25, <200MB file DnDs): ', self._discord_dnd_fix ) )
rows.append( ( 'Discord drag-and-drop filename pattern: ', self._discord_dnd_filename_pattern ) )
rows.append( ( 'Export pattern shortcuts: ', ClientGUICommon.ExportPatternButton( self ) ) )
rows.append( ( 'EXPERIMENTAL: Bytes strings >1KB pseudo significant figures: ', self._human_bytes_sig_figs ) )
rows.append( ( 'EXPERIMENTAL BUGFIX: Secret discord file drag-and-drop fix: ', self._secret_discord_dnd_fix ) )
rows.append( ( 'BUGFIX: If on macOS, show dialog menus in a debug menu: ', self._do_macos_debug_dialog_menus ) )
rows.append( ( 'ANTI-CRASH BUGFIX: Use Qt file/directory selection dialogs, rather than OS native: ', self._use_qt_file_dialogs ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
self._misc_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
text = 'Here you can override the current and default values for many frame and dialog sizing and positioning variables.'
text += os.linesep
text += 'This is an advanced control. If you aren\'t confident of what you are doing here, come back later!'
frame_locations_panel.Add( QW.QLabel( text, frame_locations_panel ), CC.FLAGS_EXPAND_PERPENDICULAR )
frame_locations_panel.Add( self._frame_locations, CC.FLAGS_EXPAND_BOTH_WAYS )
frame_locations_panel.Add( self._frame_locations_edit_button, CC.FLAGS_ON_RIGHT )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._main_gui_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, self._misc_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, frame_locations_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
def _GetPrettyFrameLocationInfo( self, listctrl_list ):
pretty_listctrl_list = []
for item in listctrl_list:
pretty_listctrl_list.append( str( item ) )
return pretty_listctrl_list
def EditFrameLocations( self ):
for listctrl_list in self._frame_locations.GetData( only_selected = True ):
title = 'set frame location information'
with ClientGUITopLevelWindowsPanels.DialogEdit( self, title ) as dlg:
panel = ClientGUIScrolledPanelsEdit.EditFrameLocationPanel( dlg, listctrl_list )
dlg.SetPanel( panel )
if dlg.exec() == QW.QDialog.Accepted:
new_listctrl_list = panel.GetValue()
self._frame_locations.ReplaceData( listctrl_list, new_listctrl_list )
def UpdateOptions( self ):
HC.options[ 'confirm_client_exit' ] = self._confirm_client_exit.isChecked()
self._new_options.SetBoolean( 'always_show_iso_time', self._always_show_iso_time.isChecked() )
self._new_options.SetInteger( 'human_bytes_sig_figs', self._human_bytes_sig_figs.value() )
self._new_options.SetBoolean( 'activate_window_on_tag_search_page_activation', self._activate_window_on_tag_search_page_activation.isChecked() )
app_display_name = self._app_display_name.text()
if app_display_name == '':
app_display_name = 'hydrus client'
self._new_options.SetString( 'app_display_name', app_display_name )
self._new_options.SetBoolean( 'discord_dnd_fix', self._discord_dnd_fix.isChecked() )
self._new_options.SetString( 'discord_dnd_filename_pattern', self._discord_dnd_filename_pattern.text() )
self._new_options.SetBoolean( 'secret_discord_dnd_fix', self._secret_discord_dnd_fix.isChecked() )
self._new_options.SetBoolean( 'do_macos_debug_dialog_menus', self._do_macos_debug_dialog_menus.isChecked() )
self._new_options.SetBoolean( 'use_qt_file_dialogs', self._use_qt_file_dialogs.isChecked() )
for listctrl_list in self._frame_locations.GetData():
( name, remember_size, remember_position, last_size, last_position, default_gravity, default_position, maximised, fullscreen ) = listctrl_list
self._new_options.SetFrameLocation( name, remember_size, remember_position, last_size, last_position, default_gravity, default_position, maximised, fullscreen )
class _GUIPagesPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
self._sessions_panel = ClientGUICommon.StaticBox( self, 'sessions' )
self._default_gui_session = QW.QComboBox( self._sessions_panel )
self._last_session_save_period_minutes = QP.MakeQSpinBox( self._sessions_panel, min = 1, max = 1440 )
self._only_save_last_session_during_idle = QW.QCheckBox( self._sessions_panel )
self._only_save_last_session_during_idle.setToolTip( 'This is useful if you usually have a very large session (200,000+ files/import items open) and a client that is always on.' )
self._number_of_gui_session_backups = QP.MakeQSpinBox( self._sessions_panel, min = 1, max = 32 )
self._number_of_gui_session_backups.setToolTip( 'The client keeps multiple rolling backups of your gui sessions. If you have very large sessions, you might like to reduce this number.' )
self._show_session_size_warnings = QW.QCheckBox( self._sessions_panel )
self._show_session_size_warnings.setToolTip( 'This will give you a once-per-boot warning popup if your active session contains more than 10M weight.' )
self._pages_panel = ClientGUICommon.StaticBox( self, 'pages' )
self._default_new_page_goes = ClientGUICommon.BetterChoice( self._pages_panel )
for value in [ CC.NEW_PAGE_GOES_FAR_LEFT, CC.NEW_PAGE_GOES_LEFT_OF_CURRENT, CC.NEW_PAGE_GOES_RIGHT_OF_CURRENT, CC.NEW_PAGE_GOES_FAR_RIGHT ]:
self._default_new_page_goes.addItem( CC.new_page_goes_string_lookup[ value], value )
self._notebook_tab_alignment = ClientGUICommon.BetterChoice( self._pages_panel )
for value in [ CC.DIRECTION_UP, CC.DIRECTION_LEFT, CC.DIRECTION_RIGHT, CC.DIRECTION_DOWN ]:
self._notebook_tab_alignment.addItem( CC.directions_alignment_string_lookup[ value ], value )
self._total_pages_warning = QP.MakeQSpinBox( self._pages_panel, min=5, max=500 )
self._reverse_page_shift_drag_behaviour = QW.QCheckBox( self._pages_panel )
self._reverse_page_shift_drag_behaviour.setToolTip( 'By default, holding down shift when you drop off a page tab means the client will not \'chase\' the page tab. This makes this behaviour default, with shift-drop meaning to chase.' )
self._page_names_panel = ClientGUICommon.StaticBox( self._pages_panel, 'page tab names' )
self._max_page_name_chars = QP.MakeQSpinBox( self._page_names_panel, min=1, max=256 )
self._elide_page_tab_names = QW.QCheckBox( self._page_names_panel )
self._page_file_count_display = ClientGUICommon.BetterChoice( self._page_names_panel )
for display_type in ( CC.PAGE_FILE_COUNT_DISPLAY_ALL, CC.PAGE_FILE_COUNT_DISPLAY_ONLY_IMPORTERS, CC.PAGE_FILE_COUNT_DISPLAY_NONE ):
self._page_file_count_display.addItem( CC.page_file_count_display_string_lookup[ display_type], display_type )
self._import_page_progress_display = QW.QCheckBox( self._page_names_panel )
self._controls_panel = ClientGUICommon.StaticBox( self, 'controls' )
self._set_search_focus_on_page_change = QW.QCheckBox( self._controls_panel )
self._hide_preview = QW.QCheckBox( self._controls_panel )
gui_session_names = HG.client_controller.Read( 'serialisable_names', HydrusSerialisable.SERIALISABLE_TYPE_GUI_SESSION_CONTAINER )
if CC.LAST_SESSION_SESSION_NAME not in gui_session_names:
gui_session_names.insert( 0, CC.LAST_SESSION_SESSION_NAME )
self._default_gui_session.addItem( 'just a blank page', None )
for name in gui_session_names:
self._default_gui_session.addItem( name, name )
try:
QP.SetStringSelection( self._default_gui_session, HC.options['default_gui_session'] )
except:
self._default_gui_session.setCurrentIndex( 0 )
self._last_session_save_period_minutes.setValue( self._new_options.GetInteger( 'last_session_save_period_minutes' ) )
self._only_save_last_session_during_idle.setChecked( self._new_options.GetBoolean( 'only_save_last_session_during_idle' ) )
self._number_of_gui_session_backups.setValue( self._new_options.GetInteger( 'number_of_gui_session_backups' ) )
self._show_session_size_warnings.setChecked( self._new_options.GetBoolean( 'show_session_size_warnings' ) )
self._default_new_page_goes.SetValue( self._new_options.GetInteger( 'default_new_page_goes' ) )
self._notebook_tab_alignment.SetValue( self._new_options.GetInteger( 'notebook_tab_alignment' ) )
self._max_page_name_chars.setValue( self._new_options.GetInteger( 'max_page_name_chars' ) )
self._elide_page_tab_names.setChecked( self._new_options.GetBoolean( 'elide_page_tab_names' ) )
self._page_file_count_display.SetValue( self._new_options.GetInteger( 'page_file_count_display' ) )
self._import_page_progress_display.setChecked( self._new_options.GetBoolean( 'import_page_progress_display' ) )
self._total_pages_warning.setValue( self._new_options.GetInteger( 'total_pages_warning' ) )
self._reverse_page_shift_drag_behaviour.setChecked( self._new_options.GetBoolean( 'reverse_page_shift_drag_behaviour' ) )
self._set_search_focus_on_page_change.setChecked( self._new_options.GetBoolean( 'set_search_focus_on_page_change' ) )
self._hide_preview.setChecked( HC.options[ 'hide_preview' ] )
rows = []
rows.append( ( 'Default session on startup: ', self._default_gui_session ) )
rows.append( ( 'If \'last session\' above, autosave it how often (minutes)?', self._last_session_save_period_minutes ) )
rows.append( ( 'If \'last session\' above, only autosave during idle time?', self._only_save_last_session_during_idle ) )
rows.append( ( 'Number of session backups to keep: ', self._number_of_gui_session_backups ) )
rows.append( ( 'Show warning popup if session size exceeds 10,000,000: ', self._show_session_size_warnings ) )
sessions_gridbox = ClientGUICommon.WrapInGrid( self._sessions_panel, rows )
self._sessions_panel.Add( sessions_gridbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
rows = []
rows.append( ( 'By default, put new page tabs on: ', self._default_new_page_goes ) )
rows.append( ( 'Notebook tab alignment: ', self._notebook_tab_alignment ) )
rows.append( ( 'Reverse page tab shift-drag behaviour: ', self._reverse_page_shift_drag_behaviour ) )
rows.append( ( 'Warn at this many total pages: ', self._total_pages_warning ) )
gridbox = ClientGUICommon.WrapInGrid( self._pages_panel, rows )
rows = []
rows.append( ( 'Max characters to display in a page name: ', self._max_page_name_chars ) )
rows.append( ( 'When there are too many tabs to fit, \'...\' elide their names so they fit: ', self._elide_page_tab_names ) )
rows.append( ( 'Show page file count after its name: ', self._page_file_count_display ) )
rows.append( ( 'Show import page x/y progress after its name: ', self._import_page_progress_display ) )
page_names_gridbox = ClientGUICommon.WrapInGrid( self._page_names_panel, rows )
label = 'If you have enough pages in a row, left/right arrows will appear to navigate them back and forth.'
label += os.linesep
label += 'Due to an unfortunate Qt issue, the tab bar will scroll so the current tab is right-most visible whenever a page is renamed.'
label += os.linesep
label += 'Therefore, if you set pages to have current file count or import progress in their name (which will update from time to time), do not put import pages in a long row of tabs, as it will reset scroll position on every progress update.'
label += os.linesep
label += 'Just make some nested \'page of pages\' so they are not all in the same row.'
st = ClientGUICommon.BetterStaticText( self._page_names_panel, label )
st.setWordWrap( True )
self._page_names_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
self._page_names_panel.Add( page_names_gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._pages_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._pages_panel.Add( self._page_names_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
rows = []
rows.append( ( 'When switching to a page, focus its text input field (if any): ', self._set_search_focus_on_page_change ) )
rows.append( ( 'Hide the bottom-left preview window: ', self._hide_preview ) )
gridbox = ClientGUICommon.WrapInGrid( self._controls_panel, rows )
self._controls_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._sessions_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, self._pages_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._controls_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
HC.options[ 'default_gui_session' ] = self._default_gui_session.currentText()
self._new_options.SetInteger( 'notebook_tab_alignment', self._notebook_tab_alignment.GetValue() )
self._new_options.SetInteger( 'last_session_save_period_minutes', self._last_session_save_period_minutes.value() )
self._new_options.SetInteger( 'number_of_gui_session_backups', self._number_of_gui_session_backups.value() )
self._new_options.SetBoolean( 'show_session_size_warnings', self._show_session_size_warnings.isChecked() )
self._new_options.SetBoolean( 'only_save_last_session_during_idle', self._only_save_last_session_during_idle.isChecked() )
self._new_options.SetInteger( 'default_new_page_goes', self._default_new_page_goes.GetValue() )
self._new_options.SetInteger( 'max_page_name_chars', self._max_page_name_chars.value() )
self._new_options.SetBoolean( 'elide_page_tab_names', self._elide_page_tab_names.isChecked() )
self._new_options.SetInteger( 'page_file_count_display', self._page_file_count_display.GetValue() )
self._new_options.SetBoolean( 'import_page_progress_display', self._import_page_progress_display.isChecked() )
self._new_options.SetInteger( 'total_pages_warning', self._total_pages_warning.value() )
self._new_options.SetBoolean( 'reverse_page_shift_drag_behaviour', self._reverse_page_shift_drag_behaviour.isChecked() )
self._new_options.SetBoolean( 'set_search_focus_on_page_change', self._set_search_focus_on_page_change.isChecked() )
HC.options[ 'hide_preview' ] = self._hide_preview.isChecked()
class _ImportingPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
default_fios = ClientGUICommon.StaticBox( self, 'default file import options' )
show_downloader_options = True
quiet_file_import_options = self._new_options.GetDefaultFileImportOptions( 'quiet' )
self._quiet_fios = ClientGUIImport.FileImportOptionsButton( default_fios, quiet_file_import_options, show_downloader_options )
loud_file_import_options = self._new_options.GetDefaultFileImportOptions( 'loud' )
self._loud_fios = ClientGUIImport.FileImportOptionsButton( default_fios, loud_file_import_options, show_downloader_options )
rows = []
rows.append( ( 'For \'quiet\' import contexts like import folders and subscriptions:', self._quiet_fios ) )
rows.append( ( 'For import contexts that work on pages:', self._loud_fios ) )
gridbox = ClientGUICommon.WrapInGrid( default_fios, rows )
default_fios.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, default_fios, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetDefaultFileImportOptions( 'quiet', self._quiet_fios.GetValue() )
self._new_options.SetDefaultFileImportOptions( 'loud', self._loud_fios.GetValue() )
class _MaintenanceAndProcessingPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._new_options = HG.client_controller.new_options
self._jobs_panel = ClientGUICommon.StaticBox( self, 'when to run high cpu jobs' )
self._file_maintenance_panel = ClientGUICommon.StaticBox( self, 'file maintenance' )
self._idle_panel = ClientGUICommon.StaticBox( self._jobs_panel, 'idle' )
self._shutdown_panel = ClientGUICommon.StaticBox( self._jobs_panel, 'shutdown' )
self._idle_normal = QW.QCheckBox( self._idle_panel )
self._idle_normal.clicked.connect( self._EnableDisableIdleNormal )
self._idle_period = ClientGUICommon.NoneableSpinCtrl( self._idle_panel, '', min = 1, max = 1000, multiplier = 60, unit = 'minutes', none_phrase = 'ignore normal browsing' )
self._idle_mouse_period = ClientGUICommon.NoneableSpinCtrl( self._idle_panel, '', min = 1, max = 1000, multiplier = 60, unit = 'minutes', none_phrase = 'ignore mouse movements' )
self._idle_mode_client_api_timeout = ClientGUICommon.NoneableSpinCtrl( self._idle_panel, '', min = 1, max = 1000, multiplier = 60, unit = 'minutes', none_phrase = 'ignore client api' )
self._system_busy_cpu_percent = QP.MakeQSpinBox( self._idle_panel, min = 5, max = 99 )
self._system_busy_cpu_count = ClientGUICommon.NoneableSpinCtrl( self._idle_panel, min = 1, max = 64, unit = 'cores', none_phrase = 'ignore cpu usage' )
self._idle_shutdown = ClientGUICommon.BetterChoice( self._shutdown_panel )
for idle_id in ( CC.IDLE_NOT_ON_SHUTDOWN, CC.IDLE_ON_SHUTDOWN, CC.IDLE_ON_SHUTDOWN_ASK_FIRST ):
self._idle_shutdown.addItem( CC.idle_string_lookup[ idle_id], idle_id )
self._idle_shutdown.currentIndexChanged.connect( self._EnableDisableIdleShutdown )
self._idle_shutdown_max_minutes = QP.MakeQSpinBox( self._shutdown_panel, min=1, max=1440 )
self._shutdown_work_period = ClientGUITime.TimeDeltaButton( self._shutdown_panel, min = 60, days = True, hours = True, minutes = True )
min_unit_value = 1
max_unit_value = 1000
min_time_delta = 1
self._file_maintenance_during_idle = QW.QCheckBox( self._file_maintenance_panel )
self._file_maintenance_idle_throttle_velocity = ClientGUITime.VelocityCtrl( self._file_maintenance_panel, min_unit_value, max_unit_value, min_time_delta, minutes = True, seconds = True, per_phrase = 'every', unit = 'heavy work units' )
self._file_maintenance_during_active = QW.QCheckBox( self._file_maintenance_panel )
self._file_maintenance_active_throttle_velocity = ClientGUITime.VelocityCtrl( self._file_maintenance_panel, min_unit_value, max_unit_value, min_time_delta, minutes = True, seconds = True, per_phrase = 'every', unit = 'heavy work units' )
tt = 'Different jobs will count for more or less weight. A file metadata reparse will count as one work unit, but quicker jobs like checking for file presence will count as fractions of one and will will work more frequently.'
tt += os.linesep * 2
tt += 'Please note that this throttle is not rigorous for long timescales, as file processing history is not currently saved on client exit. If you restart the client, the file manager thinks it has run 0 jobs and will be happy to run until the throttle kicks in again.'
self._file_maintenance_idle_throttle_velocity.setToolTip( tt )
self._file_maintenance_active_throttle_velocity.setToolTip( tt )
self._idle_normal.setChecked( HC.options[ 'idle_normal' ] )
self._idle_period.SetValue( HC.options['idle_period'] )
self._idle_mouse_period.SetValue( HC.options['idle_mouse_period'] )
self._idle_mode_client_api_timeout.SetValue( self._new_options.GetNoneableInteger( 'idle_mode_client_api_timeout' ) )
self._system_busy_cpu_percent.setValue( self._new_options.GetInteger( 'system_busy_cpu_percent' ) )
self._system_busy_cpu_count.SetValue( self._new_options.GetNoneableInteger( 'system_busy_cpu_count' ) )
self._idle_shutdown.SetValue( HC.options[ 'idle_shutdown' ] )
self._idle_shutdown_max_minutes.setValue( HC.options['idle_shutdown_max_minutes'] )
self._shutdown_work_period.SetValue( self._new_options.GetInteger( 'shutdown_work_period' ) )
self._file_maintenance_during_idle.setChecked( self._new_options.GetBoolean( 'file_maintenance_during_idle' ) )
file_maintenance_idle_throttle_files = self._new_options.GetInteger( 'file_maintenance_idle_throttle_files' )
file_maintenance_idle_throttle_time_delta = self._new_options.GetInteger( 'file_maintenance_idle_throttle_time_delta' )
file_maintenance_idle_throttle_velocity = ( file_maintenance_idle_throttle_files, file_maintenance_idle_throttle_time_delta )
self._file_maintenance_idle_throttle_velocity.SetValue( file_maintenance_idle_throttle_velocity )
self._file_maintenance_during_active.setChecked( self._new_options.GetBoolean( 'file_maintenance_during_active' ) )
file_maintenance_active_throttle_files = self._new_options.GetInteger( 'file_maintenance_active_throttle_files' )
file_maintenance_active_throttle_time_delta = self._new_options.GetInteger( 'file_maintenance_active_throttle_time_delta' )
file_maintenance_active_throttle_velocity = ( file_maintenance_active_throttle_files, file_maintenance_active_throttle_time_delta )
self._file_maintenance_active_throttle_velocity.SetValue( file_maintenance_active_throttle_velocity )
rows = []
rows.append( ( 'Run maintenance jobs when the client is idle and the system is not otherwise busy: ', self._idle_normal ) )
rows.append( ( 'Permit idle mode if no general browsing activity has occurred in the past: ', self._idle_period ) )
rows.append( ( 'Permit idle mode if the mouse has not been moved in the past: ', self._idle_mouse_period ) )
rows.append( ( 'Permit idle mode if no Client API requests in the past: ', self._idle_mode_client_api_timeout ) )
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._system_busy_cpu_percent, CC.FLAGS_CENTER )
QP.AddToLayout( hbox, ClientGUICommon.BetterStaticText( self._idle_panel, label = '% on ' ), CC.FLAGS_CENTER )
QP.AddToLayout( hbox, self._system_busy_cpu_count, CC.FLAGS_CENTER )
import psutil
num_cores = psutil.cpu_count()
QP.AddToLayout( hbox, ClientGUICommon.BetterStaticText( self._idle_panel, label = '(you appear to have {} cores)'.format( num_cores ) ), CC.FLAGS_CENTER )
rows.append( ( 'Consider the system busy if CPU usage is above: ', hbox ) )
gridbox = ClientGUICommon.WrapInGrid( self._idle_panel, rows )
self._idle_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
rows = []
rows.append( ( 'Run jobs on shutdown: ', self._idle_shutdown ) )
rows.append( ( 'Only run shutdown jobs once per: ', self._shutdown_work_period ) )
rows.append( ( 'Max number of minutes to run shutdown jobs: ', self._idle_shutdown_max_minutes ) )
gridbox = ClientGUICommon.WrapInGrid( self._shutdown_panel, rows )
self._shutdown_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
text = '***'
text += os.linesep
text +='If you are a new user or do not completely understand these options, please do not touch them! Do not set the client to be idle all the time unless you know what you are doing or are testing something and are prepared for potential problems!'
text += os.linesep
text += '***'
text += os.linesep * 2
text += 'Sometimes, the client needs to do some heavy maintenance. This could be reformatting the database to keep it running fast or processing a large number of tags from a repository. Typically, these jobs will not allow you to use the gui while they run, and on slower computers--or those with not much memory--they can take a long time to complete.'
text += os.linesep * 2
text += 'You can set these jobs to run only when the client is idle, or only during shutdown, or neither, or both. If you leave the client on all the time in the background, focusing on \'idle time\' processing is often ideal. If you have a slow computer, relying on \'shutdown\' processing (which you can manually start when convenient), is often better.'
text += os.linesep * 2
text += 'If the client switches from idle to not idle during a job, it will try to abandon it and give you back control. This is not always possible, and even when it is, it will sometimes take several minutes, particularly on slower machines or those on HDDs rather than SSDs.'
text += os.linesep * 2
text += 'If the client believes the system is busy, it will generally not start jobs.'
st = ClientGUICommon.BetterStaticText( self._jobs_panel, label = text )
st.setWordWrap( True )
self._jobs_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
self._jobs_panel.Add( self._idle_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
self._jobs_panel.Add( self._shutdown_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
message = 'Scheduled jobs such as reparsing file metadata and regenerating thumbnails are performed in the background.'
self._file_maintenance_panel.Add( ClientGUICommon.BetterStaticText( self._file_maintenance_panel, label = message ), CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'Run file maintenance during idle time: ', self._file_maintenance_during_idle ) )
rows.append( ( 'Idle throttle: ', self._file_maintenance_idle_throttle_velocity ) )
rows.append( ( 'Run file maintenance during normal time: ', self._file_maintenance_during_active ) )
rows.append( ( 'Normal throttle: ', self._file_maintenance_active_throttle_velocity ) )
gridbox = ClientGUICommon.WrapInGrid( self._file_maintenance_panel, rows )
self._file_maintenance_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._jobs_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._file_maintenance_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
self._EnableDisableIdleNormal()
self._EnableDisableIdleShutdown()
self._system_busy_cpu_count.valueChanged.connect( self._EnableDisableCPUPercent )
def _EnableDisableCPUPercent( self ):
enabled = self._system_busy_cpu_count.isEnabled() and self._system_busy_cpu_count.GetValue() is not None
self._system_busy_cpu_percent.setEnabled( enabled )
def _EnableDisableIdleNormal( self ):
enabled = self._idle_normal.isChecked()
self._idle_period.setEnabled( enabled )
self._idle_mouse_period.setEnabled( enabled )
self._idle_mode_client_api_timeout.setEnabled( enabled )
self._system_busy_cpu_count.setEnabled( enabled )
self._EnableDisableCPUPercent()
def _EnableDisableIdleShutdown( self ):
enabled = self._idle_shutdown.GetValue() != CC.IDLE_NOT_ON_SHUTDOWN
self._shutdown_work_period.setEnabled( enabled )
self._idle_shutdown_max_minutes.setEnabled( enabled )
def UpdateOptions( self ):
HC.options[ 'idle_normal' ] = self._idle_normal.isChecked()
HC.options[ 'idle_period' ] = self._idle_period.GetValue()
HC.options[ 'idle_mouse_period' ] = self._idle_mouse_period.GetValue()
self._new_options.SetNoneableInteger( 'idle_mode_client_api_timeout', self._idle_mode_client_api_timeout.GetValue() )
self._new_options.SetInteger( 'system_busy_cpu_percent', self._system_busy_cpu_percent.value() )
self._new_options.SetNoneableInteger( 'system_busy_cpu_count', self._system_busy_cpu_count.GetValue() )
HC.options[ 'idle_shutdown' ] = self._idle_shutdown.GetValue()
HC.options[ 'idle_shutdown_max_minutes' ] = self._idle_shutdown_max_minutes.value()
self._new_options.SetInteger( 'shutdown_work_period', self._shutdown_work_period.GetValue() )
self._new_options.SetBoolean( 'file_maintenance_during_idle', self._file_maintenance_during_idle.isChecked() )
file_maintenance_idle_throttle_velocity = self._file_maintenance_idle_throttle_velocity.GetValue()
( file_maintenance_idle_throttle_files, file_maintenance_idle_throttle_time_delta ) = file_maintenance_idle_throttle_velocity
self._new_options.SetInteger( 'file_maintenance_idle_throttle_files', file_maintenance_idle_throttle_files )
self._new_options.SetInteger( 'file_maintenance_idle_throttle_time_delta', file_maintenance_idle_throttle_time_delta )
self._new_options.SetBoolean( 'file_maintenance_during_active', self._file_maintenance_during_active.isChecked() )
file_maintenance_active_throttle_velocity = self._file_maintenance_active_throttle_velocity.GetValue()
( file_maintenance_active_throttle_files, file_maintenance_active_throttle_time_delta ) = file_maintenance_active_throttle_velocity
self._new_options.SetInteger( 'file_maintenance_active_throttle_files', file_maintenance_active_throttle_files )
self._new_options.SetInteger( 'file_maintenance_active_throttle_time_delta', file_maintenance_active_throttle_time_delta )
class _MediaPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._new_options = HG.client_controller.new_options
self._animation_start_position = QP.MakeQSpinBox( self, min=0, max=100 )
self._disable_cv_for_gifs = QW.QCheckBox( self )
self._disable_cv_for_gifs.setToolTip( 'OpenCV is good at rendering gifs, but if you have problems with it and your graphics card, check this and the less reliable and slower PIL will be used instead. EDIT: OpenCV is much better these days--this is mostly not needed.' )
self._load_images_with_pil = QW.QCheckBox( self )
self._load_images_with_pil.setToolTip( 'OpenCV is much faster than PIL, but it is sometimes less reliable. Switch this on if you experience crashes or other unusual problems while importing or viewing certain images. EDIT: OpenCV is much better these days--this is mostly not needed.' )
self._use_system_ffmpeg = QW.QCheckBox( self )
self._use_system_ffmpeg.setToolTip( 'Check this to always default to the system ffmpeg in your path, rather than using the static ffmpeg in hydrus\'s bin directory. (requires restart)' )
self._always_loop_gifs = QW.QCheckBox( self )
self._always_loop_gifs.setToolTip( 'Some GIFS have metadata specifying how many times they should be played, usually 1. Uncheck this to obey that number.' )
self._media_viewer_cursor_autohide_time_ms = ClientGUICommon.NoneableSpinCtrl( self, none_phrase = 'do not autohide', min = 100, max = 100000, unit = 'ms' )
self._anchor_and_hide_canvas_drags = QW.QCheckBox( self )
self._touchscreen_canvas_drags_unanchor = QW.QCheckBox( self )
from hydrus.client.gui.canvas import ClientGUICanvas
self._media_viewer_zoom_center = ClientGUICommon.BetterChoice()
for zoom_centerpoint_type in ClientGUICanvas.ZOOM_CENTERPOINT_TYPES:
self._media_viewer_zoom_center.addItem( ClientGUICanvas.zoom_centerpoints_str_lookup[ zoom_centerpoint_type ], zoom_centerpoint_type )
tt = 'When you zoom in or out, there is a centerpoint about which the image zooms. This point \'stays still\' while the image expands or shrinks around it. Different centerpoints give different feels, especially if you drag images around a bit.'
self._media_viewer_zoom_center.setToolTip( tt )
self._media_zooms = QW.QLineEdit( self )
self._media_zooms.textChanged.connect( self.EventZoomsChanged )
self._mpv_conf_path = QP.FilePickerCtrl( self, starting_directory = os.path.join( HC.STATIC_DIR, 'mpv-conf' ) )
self._animated_scanbar_height = QP.MakeQSpinBox( self, min=1, max=255 )
self._animated_scanbar_nub_width = QP.MakeQSpinBox( self, min=1, max=63 )
self._media_viewer_panel = ClientGUICommon.StaticBox( self, 'media viewer mime handling' )
media_viewer_list_panel = ClientGUIListCtrl.BetterListCtrlPanel( self._media_viewer_panel )
self._media_viewer_options = ClientGUIListCtrl.BetterListCtrl( media_viewer_list_panel, CGLC.COLUMN_LIST_MEDIA_VIEWER_OPTIONS.ID, 20, data_to_tuples_func = self._GetListCtrlData, activation_callback = self.EditMediaViewerOptions, use_simple_delete = True )
media_viewer_list_panel.SetListCtrl( self._media_viewer_options )
media_viewer_list_panel.AddButton( 'add', self.AddMediaViewerOptions, enabled_check_func = self._CanAddMediaViewOption )
media_viewer_list_panel.AddButton( 'edit', self.EditMediaViewerOptions, enabled_only_on_selection = True )
media_viewer_list_panel.AddDeleteButton( enabled_check_func = self._CanDeleteMediaViewOptions )
#
self._animation_start_position.setValue( int( HC.options['animation_start_position'] * 100.0 ) )
self._disable_cv_for_gifs.setChecked( self._new_options.GetBoolean( 'disable_cv_for_gifs' ) )
self._load_images_with_pil.setChecked( self._new_options.GetBoolean( 'load_images_with_pil' ) )
self._use_system_ffmpeg.setChecked( self._new_options.GetBoolean( 'use_system_ffmpeg' ) )
self._always_loop_gifs.setChecked( self._new_options.GetBoolean( 'always_loop_gifs' ) )
self._media_viewer_cursor_autohide_time_ms.SetValue( self._new_options.GetNoneableInteger( 'media_viewer_cursor_autohide_time_ms' ) )
self._anchor_and_hide_canvas_drags.setChecked( self._new_options.GetBoolean( 'anchor_and_hide_canvas_drags' ) )
self._touchscreen_canvas_drags_unanchor.setChecked( self._new_options.GetBoolean( 'touchscreen_canvas_drags_unanchor' ) )
self._animated_scanbar_height.setValue( self._new_options.GetInteger( 'animated_scanbar_height' ) )
self._animated_scanbar_nub_width.setValue( self._new_options.GetInteger( 'animated_scanbar_nub_width' ) )
self._media_viewer_zoom_center.SetValue( self._new_options.GetInteger( 'media_viewer_zoom_center' ) )
media_zooms = self._new_options.GetMediaZooms()
self._media_zooms.setText( ','.join( ( str( media_zoom ) for media_zoom in media_zooms ) ) )
all_media_view_options = self._new_options.GetMediaViewOptions()
for ( mime, view_options ) in all_media_view_options.items():
data = QP.ListsToTuples( [ mime ] + list( view_options ) )
self._media_viewer_options.AddDatas( ( data, ) )
self._media_viewer_options.Sort()
#
vbox = QP.VBoxLayout()
text = 'Please be warned that hydrus does not currently zoom in very efficiently at high zooms!'
text += os.linesep
text += 'Just be careful at >400%, particularly for already large files--it can lag out and eat a chunk of memory.'
st = ClientGUICommon.BetterStaticText( self, text )
st.setObjectName( 'HydrusWarning' )
QP.AddToLayout( vbox, st )
rows = []
rows.append( ( 'Start animations this % in:', self._animation_start_position ) )
rows.append( ( 'Prefer system FFMPEG:', self._use_system_ffmpeg ) )
rows.append( ( 'Always Loop GIFs:', self._always_loop_gifs ) )
rows.append( ( 'Centerpoint for media zooming:', self._media_viewer_zoom_center ) )
rows.append( ( 'Media zooms:', self._media_zooms ) )
rows.append( ( 'Set a new mpv.conf on dialog ok?:', self._mpv_conf_path ) )
rows.append( ( 'Animation scanbar height:', self._animated_scanbar_height ) )
rows.append( ( 'Animation scanbar nub width:', self._animated_scanbar_nub_width ) )
rows.append( ( 'Time until mouse cursor autohides on media viewer:', self._media_viewer_cursor_autohide_time_ms ) )
rows.append( ( 'RECOMMEND WINDOWS ONLY: Hide and anchor mouse cursor on media viewer drags:', self._anchor_and_hide_canvas_drags ) )
rows.append( ( 'RECOMMEND WINDOWS ONLY: If set to hide and anchor, undo on apparent touchscreen drag:', self._touchscreen_canvas_drags_unanchor ) )
rows.append( ( 'BUGFIX: Load images with PIL (slower):', self._load_images_with_pil ) )
rows.append( ( 'BUGFIX: Load gifs with PIL instead of OpenCV (slower, bad transparency):', self._disable_cv_for_gifs ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._media_viewer_panel.Add( media_viewer_list_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._media_viewer_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
def _CanAddMediaViewOption( self ):
return len( self._GetUnsetMediaViewFiletypes() ) > 0
def _CanDeleteMediaViewOptions( self ):
deletable_mimes = set( HC.SEARCHABLE_MIMES )
selected_mimes = set()
for ( mime, media_show_action, media_start_paused, media_start_with_embed, preview_show_action, preview_start_paused, preview_start_with_embed, zoom_info ) in self._media_viewer_options.GetData( only_selected = True ):
selected_mimes.add( mime )
if len( selected_mimes ) == 0:
return False
all_selected_are_deletable = selected_mimes.issubset( deletable_mimes )
return all_selected_are_deletable
def _GetCopyOfGeneralMediaViewOptions( self, desired_mime ):
general_mime_type = HC.mimes_to_general_mimetypes[ desired_mime ]
for ( mime, media_show_action, media_start_paused, media_start_with_embed, preview_show_action, preview_start_paused, preview_start_with_embed, zoom_info ) in self._media_viewer_options.GetData():
if mime == general_mime_type:
view_options = ( desired_mime, media_show_action, media_start_paused, media_start_with_embed, preview_show_action, preview_start_paused, preview_start_with_embed, zoom_info )
return view_options
def _GetUnsetMediaViewFiletypes( self ):
editable_mimes = set( HC.SEARCHABLE_MIMES )
set_mimes = set()
for ( mime, media_show_action, media_start_paused, media_start_with_embed, preview_show_action, preview_start_paused, preview_start_with_embed, zoom_info ) in self._media_viewer_options.GetData():
set_mimes.add( mime )
unset_mimes = editable_mimes.difference( set_mimes )
return unset_mimes
def _GetListCtrlData( self, data ):
( mime, media_show_action, media_start_paused, media_start_with_embed, preview_show_action, preview_start_paused, preview_start_with_embed, zoom_info ) = data
pretty_mime = self._GetPrettyMime( mime )
pretty_media_show_action = CC.media_viewer_action_string_lookup[ media_show_action ]
if media_start_paused:
pretty_media_show_action += ', start paused'
if media_start_with_embed:
pretty_media_show_action += ', start with embed button'
pretty_preview_show_action = CC.media_viewer_action_string_lookup[ preview_show_action ]
if preview_start_paused:
pretty_preview_show_action += ', start paused'
if preview_start_with_embed:
pretty_preview_show_action += ', start with embed button'
no_show = { media_show_action, preview_show_action }.isdisjoint( { CC.MEDIA_VIEWER_ACTION_SHOW_WITH_NATIVE, CC.MEDIA_VIEWER_ACTION_SHOW_WITH_MPV } )
if no_show:
pretty_zoom_info = ''
else:
pretty_zoom_info = str( zoom_info )
display_tuple = ( pretty_mime, pretty_media_show_action, pretty_preview_show_action, pretty_zoom_info )
sort_tuple = ( pretty_mime, pretty_media_show_action, pretty_preview_show_action, pretty_zoom_info )
return ( display_tuple, sort_tuple )
def _GetPrettyMime( self, mime ):
pretty_mime = HC.mime_string_lookup[ mime ]
if mime not in HC.GENERAL_FILETYPES:
pretty_mime = '{}: {}'.format( HC.mime_string_lookup[ HC.mimes_to_general_mimetypes[ mime ] ], pretty_mime )
return pretty_mime
def AddMediaViewerOptions( self ):
unset_filetypes = self._GetUnsetMediaViewFiletypes()
if len( unset_filetypes ) == 0:
QW.QMessageBox.warning( self, 'Warning', 'You cannot add any more specific filetype options!' )
return
choice_tuples = [ ( self._GetPrettyMime( mime ), mime ) for mime in unset_filetypes ]
try:
mime = ClientGUIDialogsQuick.SelectFromList( self, 'select the filetype to add', choice_tuples, sort_tuples = True )
except HydrusExceptions.CancelledException:
return
data = self._GetCopyOfGeneralMediaViewOptions( mime )
title = 'add media view options information'
with ClientGUITopLevelWindowsPanels.DialogEdit( self, title ) as dlg:
panel = ClientGUIScrolledPanelsEdit.EditMediaViewOptionsPanel( dlg, data )
dlg.SetPanel( panel )
if dlg.exec() == QW.QDialog.Accepted:
new_data = panel.GetValue()
self._media_viewer_options.AddDatas( ( new_data, ) )
def EditMediaViewerOptions( self ):
for data in self._media_viewer_options.GetData( only_selected = True ):
title = 'edit media view options information'
with ClientGUITopLevelWindowsPanels.DialogEdit( self, title ) as dlg:
panel = ClientGUIScrolledPanelsEdit.EditMediaViewOptionsPanel( dlg, data )
dlg.SetPanel( panel )
if dlg.exec() == QW.QDialog.Accepted:
new_data = panel.GetValue()
self._media_viewer_options.ReplaceData( data, new_data )
def EventZoomsChanged( self, text ):
try:
media_zooms = [ float( media_zoom ) for media_zoom in self._media_zooms.text().split( ',' ) ]
self._media_zooms.setObjectName( '' )
except ValueError:
self._media_zooms.setObjectName( 'HydrusInvalid' )
self._media_zooms.style().polish( self._media_zooms )
self._media_zooms.update()
def UpdateOptions( self ):
HC.options[ 'animation_start_position' ] = self._animation_start_position.value() / 100.0
self._new_options.SetBoolean( 'disable_cv_for_gifs', self._disable_cv_for_gifs.isChecked() )
self._new_options.SetBoolean( 'load_images_with_pil', self._load_images_with_pil.isChecked() )
self._new_options.SetBoolean( 'use_system_ffmpeg', self._use_system_ffmpeg.isChecked() )
self._new_options.SetBoolean( 'always_loop_gifs', self._always_loop_gifs.isChecked() )
self._new_options.SetBoolean( 'anchor_and_hide_canvas_drags', self._anchor_and_hide_canvas_drags.isChecked() )
self._new_options.SetBoolean( 'touchscreen_canvas_drags_unanchor', self._touchscreen_canvas_drags_unanchor.isChecked() )
self._new_options.SetNoneableInteger( 'media_viewer_cursor_autohide_time_ms', self._media_viewer_cursor_autohide_time_ms.GetValue() )
mpv_conf_path = self._mpv_conf_path.GetPath()
if mpv_conf_path is not None and mpv_conf_path != '' and os.path.exists( mpv_conf_path ) and os.path.isfile( mpv_conf_path ):
dest_mpv_conf_path = HG.client_controller.GetMPVConfPath()
try:
HydrusPaths.MirrorFile( mpv_conf_path, dest_mpv_conf_path )
except Exception as e:
HydrusData.ShowText( 'Could not set the mpv conf path "{}" to "{}"! Error follows!'.format( mpv_conf_path, dest_mpv_conf_path ) )
HydrusData.ShowException( e )
self._new_options.SetInteger( 'animated_scanbar_height', self._animated_scanbar_height.value() )
self._new_options.SetInteger( 'animated_scanbar_nub_width', self._animated_scanbar_nub_width.value() )
self._new_options.SetInteger( 'media_viewer_zoom_center', self._media_viewer_zoom_center.GetValue() )
try:
media_zooms = [ float( media_zoom ) for media_zoom in self._media_zooms.text().split( ',' ) ]
media_zooms = [ media_zoom for media_zoom in media_zooms if media_zoom > 0.0 ]
if len( media_zooms ) > 0:
self._new_options.SetMediaZooms( media_zooms )
except ValueError:
HydrusData.ShowText( 'Could not parse those zooms, so they were not saved!' )
mimes_to_media_view_options = {}
for data in self._media_viewer_options.GetData():
data = list( data )
mime = data[0]
value = data[1:]
mimes_to_media_view_options[ mime ] = value
self._new_options.SetMediaViewOptions( mimes_to_media_view_options )
class _PopupPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
self._popup_panel = ClientGUICommon.StaticBox( self, 'popup window toaster' )
self._popup_message_character_width = QP.MakeQSpinBox( self._popup_panel, min = 16, max = 256 )
self._popup_message_force_min_width = QW.QCheckBox( self._popup_panel )
self._freeze_message_manager_when_mouse_on_other_monitor = QW.QCheckBox( self._popup_panel )
self._freeze_message_manager_when_mouse_on_other_monitor.setToolTip( 'This is useful if you have a virtual desktop and find the popup manager restores strangely when you hop back to the hydrus display.' )
self._freeze_message_manager_when_main_gui_minimised = QW.QCheckBox( self._popup_panel )
self._freeze_message_manager_when_main_gui_minimised.setToolTip( 'This is useful if the popup toaster restores strangely after minimised changes.' )
self._hide_message_manager_on_gui_iconise = QW.QCheckBox( self._popup_panel )
self._hide_message_manager_on_gui_iconise.setToolTip( 'If your message manager does not automatically minimise with your main gui, try this. It can lead to unusual show and positioning behaviour on window managers that do not support it, however.' )
self._hide_message_manager_on_gui_deactive = QW.QCheckBox( self._popup_panel )
self._hide_message_manager_on_gui_deactive.setToolTip( 'If your message manager stays up after you minimise the program to the system tray using a custom window manager, try this out! It hides the popup messages as soon as the main gui loses focus.' )
self._notify_client_api_cookies = QW.QCheckBox( self._popup_panel )
self._notify_client_api_cookies.setToolTip( 'This will make a short-lived popup message every time you get new cookie information over the Client API.' )
#
self._popup_message_character_width.setValue( self._new_options.GetInteger( 'popup_message_character_width' ) )
self._popup_message_force_min_width.setChecked( self._new_options.GetBoolean( 'popup_message_force_min_width' ) )
self._freeze_message_manager_when_mouse_on_other_monitor.setChecked( self._new_options.GetBoolean( 'freeze_message_manager_when_mouse_on_other_monitor' ) )
self._freeze_message_manager_when_main_gui_minimised.setChecked( self._new_options.GetBoolean( 'freeze_message_manager_when_main_gui_minimised' ) )
self._hide_message_manager_on_gui_iconise.setChecked( self._new_options.GetBoolean( 'hide_message_manager_on_gui_iconise' ) )
self._hide_message_manager_on_gui_deactive.setChecked( self._new_options.GetBoolean( 'hide_message_manager_on_gui_deactive' ) )
self._notify_client_api_cookies.setChecked( self._new_options.GetBoolean( 'notify_client_api_cookies' ) )
#
rows = []
rows.append( ( 'Approximate max width of popup messages (in characters): ', self._popup_message_character_width ) )
rows.append( ( 'BUGFIX: Force this width as the minimum width for all popup messages: ', self._popup_message_force_min_width ) )
rows.append( ( 'Freeze the popup toaster when mouse is on another display: ', self._freeze_message_manager_when_mouse_on_other_monitor ) )
rows.append( ( 'Freeze the popup toaster when the main gui is minimised: ', self._freeze_message_manager_when_main_gui_minimised ) )
rows.append( ( 'BUGFIX: Hide the popup toaster when the main gui is minimised: ', self._hide_message_manager_on_gui_iconise ) )
rows.append( ( 'BUGFIX: Hide the popup toaster when the main gui loses focus: ', self._hide_message_manager_on_gui_deactive ) )
rows.append( ( 'Make a short-lived popup on cookie updates through the Client API: ', self._notify_client_api_cookies ) )
gridbox = ClientGUICommon.WrapInGrid( self._popup_panel, rows )
self._popup_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._popup_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetInteger( 'popup_message_character_width', self._popup_message_character_width.value() )
self._new_options.SetBoolean( 'popup_message_force_min_width', self._popup_message_force_min_width.isChecked() )
self._new_options.SetBoolean( 'freeze_message_manager_when_mouse_on_other_monitor', self._freeze_message_manager_when_mouse_on_other_monitor.isChecked() )
self._new_options.SetBoolean( 'freeze_message_manager_when_main_gui_minimised', self._freeze_message_manager_when_main_gui_minimised.isChecked() )
self._new_options.SetBoolean( 'hide_message_manager_on_gui_iconise', self._hide_message_manager_on_gui_iconise.isChecked() )
self._new_options.SetBoolean( 'hide_message_manager_on_gui_deactive', self._hide_message_manager_on_gui_deactive.isChecked() )
self._new_options.SetBoolean( 'notify_client_api_cookies', self._notify_client_api_cookies.isChecked() )
class _RegexPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
regex_favourites = HC.options[ 'regex_favourites' ]
self._regex_panel = ClientGUIScrolledPanelsEdit.EditRegexFavourites( self, regex_favourites )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._regex_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
def UpdateOptions( self ):
regex_favourites = self._regex_panel.GetValue()
HC.options[ 'regex_favourites' ] = regex_favourites
class _SearchPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
self._autocomplete_panel = ClientGUICommon.StaticBox( self, 'autocomplete' )
self._autocomplete_float_main_gui = QW.QCheckBox( self._autocomplete_panel )
tt = 'The autocomplete dropdown can either \'float\' on top of the main window, or if that does not work well for you, it can embed into the parent panel.'
self._autocomplete_float_main_gui.setToolTip( tt )
self._autocomplete_float_frames = QW.QCheckBox( self._autocomplete_panel )
tt = 'The autocomplete dropdown can either \'float\' on top of dialogs like _manage tags_, or if that does not work well for you (it can sometimes annoyingly overlap the ok/cancel buttons), it can embed into the parent dialog panel.'
self._autocomplete_float_frames.setToolTip( tt )
self._ac_read_list_height_num_chars = QP.MakeQSpinBox( self._autocomplete_panel, min = 1, max = 128 )
tt = 'Read autocompletes are those in search pages, where you are looking through existing tags to find your files.'
self._ac_read_list_height_num_chars.setToolTip( tt )
self._ac_write_list_height_num_chars = QP.MakeQSpinBox( self._autocomplete_panel, min = 1, max = 128 )
tt = 'Write autocompletes are those in most dialogs, where you are adding new tags to files.'
self._ac_write_list_height_num_chars.setToolTip( tt )
self._always_show_system_everything = QW.QCheckBox( self._autocomplete_panel )
tt = 'After users get some experience with the program and a larger collection, they tend to have less use for system:everything.'
self._always_show_system_everything.setToolTip( tt )
self._filter_inbox_and_archive_predicates = QW.QCheckBox( self._autocomplete_panel )
tt = 'If everything is current in the inbox (or archive), then there is no use listing it or its opposite--it either does not change the search or it produces nothing. If you find it jarring though, turn it off here!'
self._filter_inbox_and_archive_predicates.setToolTip( tt )
#
self._autocomplete_float_main_gui.setChecked( self._new_options.GetBoolean( 'autocomplete_float_main_gui' ) )
self._autocomplete_float_frames.setChecked( self._new_options.GetBoolean( 'autocomplete_float_frames' ) )
self._ac_read_list_height_num_chars.setValue( self._new_options.GetInteger( 'ac_read_list_height_num_chars' ) )
self._ac_write_list_height_num_chars.setValue( self._new_options.GetInteger( 'ac_write_list_height_num_chars' ) )
self._always_show_system_everything.setChecked( self._new_options.GetBoolean( 'always_show_system_everything' ) )
self._filter_inbox_and_archive_predicates.setChecked( self._new_options.GetBoolean( 'filter_inbox_and_archive_predicates' ) )
#
vbox = QP.VBoxLayout()
message = 'The autocomplete dropdown list is the panel that hangs below the tag input text box on search pages.'
st = ClientGUICommon.BetterStaticText( self._autocomplete_panel, label = message )
self._autocomplete_panel.Add( st, CC.FLAGS_CENTER )
rows = []
#
rows.append( ( 'Autocomplete results float in main gui: ', self._autocomplete_float_main_gui ) )
rows.append( ( 'Autocomplete results float in other windows: ', self._autocomplete_float_frames ) )
rows.append( ( '\'Read\' autocomplete list height: ', self._ac_read_list_height_num_chars ) )
rows.append( ( '\'Write\' autocomplete list height: ', self._ac_write_list_height_num_chars ) )
rows.append( ( 'show system:everything even if total files is over 10,000: ', self._always_show_system_everything ) )
rows.append( ( 'hide inbox and archive system predicates if either has no files: ', self._filter_inbox_and_archive_predicates ) )
gridbox = ClientGUICommon.WrapInGrid( self._autocomplete_panel, rows )
self._autocomplete_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
#
QP.AddToLayout( vbox, self._autocomplete_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetBoolean( 'autocomplete_float_main_gui', self._autocomplete_float_main_gui.isChecked() )
self._new_options.SetBoolean( 'autocomplete_float_frames', self._autocomplete_float_frames.isChecked() )
self._new_options.SetInteger( 'ac_read_list_height_num_chars', self._ac_read_list_height_num_chars.value() )
self._new_options.SetInteger( 'ac_write_list_height_num_chars', self._ac_write_list_height_num_chars.value() )
self._new_options.SetBoolean( 'always_show_system_everything', self._always_show_system_everything.isChecked() )
self._new_options.SetBoolean( 'filter_inbox_and_archive_predicates', self._filter_inbox_and_archive_predicates.isChecked() )
class _SortCollectPanel( QW.QWidget ):
def __init__( self, parent ):
QW.QWidget.__init__( self, parent )
self._default_media_sort = ClientGUIResultsSortCollect.MediaSortControl( self )
self._fallback_media_sort = ClientGUIResultsSortCollect.MediaSortControl( self )
self._save_page_sort_on_change = QW.QCheckBox( self )
self._default_media_collect = ClientGUIResultsSortCollect.MediaCollectControl( self, silent = True )
namespace_sorting_box = ClientGUICommon.StaticBox( self, 'namespace sorting' )
self._namespace_sort_by = ClientGUIListBoxes.QueueListBox( namespace_sorting_box, 8, self._ConvertNamespaceTupleToSortString, self._AddNamespaceSort, self._EditNamespaceSort )
#
self._new_options = HG.client_controller.new_options
try:
self._default_media_sort.SetSort( self._new_options.GetDefaultSort() )
except:
media_sort = ClientMedia.MediaSort( ( 'system', CC.SORT_FILES_BY_FILESIZE ), CC.SORT_ASC )
self._default_media_sort.SetSort( media_sort )
try:
self._fallback_media_sort.SetSort( self._new_options.GetFallbackSort() )
except:
media_sort = ClientMedia.MediaSort( ( 'system', CC.SORT_FILES_BY_IMPORT_TIME ), CC.SORT_ASC )
self._fallback_media_sort.SetSort( media_sort )
self._namespace_sort_by.AddDatas( [ media_sort.sort_type[1] for media_sort in HG.client_controller.new_options.GetDefaultNamespaceSorts() ] )
self._save_page_sort_on_change.setChecked( self._new_options.GetBoolean( 'save_page_sort_on_change' ) )
#
sort_by_text = 'You can manage your namespace sorting schemes here.'
sort_by_text += os.linesep
sort_by_text += 'The client will sort media by comparing their namespaces, moving from left to right until an inequality is found.'
sort_by_text += os.linesep
sort_by_text += 'Any namespaces here will also appear in your collect-by dropdowns.'
namespace_sorting_box.Add( ClientGUICommon.BetterStaticText( namespace_sorting_box, sort_by_text ), CC.FLAGS_EXPAND_PERPENDICULAR )
namespace_sorting_box.Add( self._namespace_sort_by, CC.FLAGS_EXPAND_BOTH_WAYS )
rows = []
rows.append( ( 'Default sort: ', self._default_media_sort ) )
rows.append( ( 'Secondary sort (when primary gives two equal values): ', self._fallback_media_sort ) )
rows.append( ( 'Update default sort every time a new sort is manually chosen: ', self._save_page_sort_on_change ) )
rows.append( ( 'Default collect: ', self._default_media_collect ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, namespace_sorting_box, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
def _AddNamespaceSort( self ):
default = ( ( 'creator', 'series', 'page' ), ClientTags.TAG_DISPLAY_ACTUAL )
return self._EditNamespaceSort( default )
def _ConvertNamespaceTupleToSortString( self, sort_data ):
( namespaces, tag_display_type ) = sort_data
return '-'.join( namespaces )
def _EditNamespaceSort( self, sort_data ):
return ClientGUITags.EditNamespaceSort( self, sort_data )
def UpdateOptions( self ):
self._new_options.SetDefaultSort( self._default_media_sort.GetSort() )
self._new_options.SetFallbackSort( self._fallback_media_sort.GetSort() )
self._new_options.SetBoolean( 'save_page_sort_on_change', self._save_page_sort_on_change.isChecked() )
self._new_options.SetDefaultCollect( self._default_media_collect.GetValue() )
namespace_sorts = [ ClientMedia.MediaSort( sort_type = ( 'namespaces', sort_data ) ) for sort_data in self._namespace_sort_by.GetData() ]
self._new_options.SetDefaultNamespaceSorts( namespace_sorts )
class _SpeedAndMemoryPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
thumbnail_cache_panel = ClientGUICommon.StaticBox( self, 'thumbnail cache' )
self._thumbnail_cache_size = QP.MakeQSpinBox( thumbnail_cache_panel, min=5, max=3000 )
self._thumbnail_cache_size.valueChanged.connect( self.EventThumbnailsUpdate )
self._estimated_number_thumbnails = QW.QLabel( '', thumbnail_cache_panel )
self._thumbnail_cache_timeout = ClientGUITime.TimeDeltaButton( thumbnail_cache_panel, min = 300, days = True, hours = True, minutes = True )
self._thumbnail_cache_timeout.setToolTip( 'The amount of time after which a thumbnail in the cache will naturally be removed, if it is not shunted out due to a new member exceeding the size limit.' )
image_cache_panel = ClientGUICommon.StaticBox( self, 'image cache' )
self._fullscreen_cache_size = QP.MakeQSpinBox( image_cache_panel, min=25, max=8192 )
self._fullscreen_cache_size.valueChanged.connect( self.EventImageCacheUpdate )
self._estimated_number_fullscreens = QW.QLabel( '', image_cache_panel )
self._image_cache_timeout = ClientGUITime.TimeDeltaButton( image_cache_panel, min = 300, days = True, hours = True, minutes = True )
self._image_cache_timeout.setToolTip( 'The amount of time after which a rendered image in the cache will naturally be removed, if it is not shunted out due to a new member exceeding the size limit.' )
self._media_viewer_prefetch_delay_base_ms = QP.MakeQSpinBox( image_cache_panel, min = 0, max = 2000 )
tt = 'How long to wait, after the current image is rendered, to start rendering neighbours. Does not matter so much any more, but if you have CPU lag, you can try boosting it a bit.'
self._media_viewer_prefetch_delay_base_ms.setToolTip( tt )
self._media_viewer_prefetch_num_previous = QP.MakeQSpinBox( image_cache_panel, min = 0, max = 5 )
self._media_viewer_prefetch_num_next = QP.MakeQSpinBox( image_cache_panel, min = 0, max = 5 )
self._image_cache_storage_limit_percentage = QP.MakeQSpinBox( image_cache_panel, min = 20, max = 50 )
self._image_cache_storage_limit_percentage_st = ClientGUICommon.BetterStaticText( image_cache_panel, label = '' )
self._image_cache_prefetch_limit_percentage = QP.MakeQSpinBox( image_cache_panel, min = 5, max = 20 )
self._image_cache_prefetch_limit_percentage_st = ClientGUICommon.BetterStaticText( image_cache_panel, label = '' )
image_tile_cache_panel = ClientGUICommon.StaticBox( self, 'image tile cache' )
self._image_tile_cache_size = ClientGUIControls.BytesControl( image_tile_cache_panel )
self._image_tile_cache_size.valueChanged.connect( self.EventImageTilesUpdate )
self._estimated_number_image_tiles = QW.QLabel( '', image_tile_cache_panel )
self._image_tile_cache_timeout = ClientGUITime.TimeDeltaButton( image_tile_cache_panel, min = 300, hours = True, minutes = True )
self._image_tile_cache_timeout.setToolTip( 'The amount of time after which a rendered image tile in the cache will naturally be removed, if it is not shunted out due to a new member exceeding the size limit.' )
self._ideal_tile_dimension = QP.MakeQSpinBox( image_tile_cache_panel, min = 256, max = 4096 )
self._ideal_tile_dimension.setToolTip( 'This is the square size the system will aim for. Smaller tiles are more memory efficient but prone to warping and other artifacts. Extreme values may waste CPU.' )
#
buffer_panel = ClientGUICommon.StaticBox( self, 'video buffer' )
self._video_buffer_size_mb = QP.MakeQSpinBox( buffer_panel, min=48, max=16*1024 )
self._video_buffer_size_mb.valueChanged.connect( self.EventVideoBufferUpdate )
self._estimated_number_video_frames = QW.QLabel( '', buffer_panel )
#
misc_panel = ClientGUICommon.StaticBox( self, 'misc' )
self._forced_search_limit = ClientGUICommon.NoneableSpinCtrl( misc_panel, '', min = 1, max = 100000 )
#
self._thumbnail_cache_size.setValue( int( HC.options['thumbnail_cache_size'] // 1048576 ) )
self._fullscreen_cache_size.setValue( int( HC.options['fullscreen_cache_size'] // 1048576 ) )
self._image_tile_cache_size.SetValue( self._new_options.GetInteger( 'image_tile_cache_size' ) )
self._thumbnail_cache_timeout.SetValue( self._new_options.GetInteger( 'thumbnail_cache_timeout' ) )
self._image_cache_timeout.SetValue( self._new_options.GetInteger( 'image_cache_timeout' ) )
self._image_tile_cache_timeout.SetValue( self._new_options.GetInteger( 'image_tile_cache_timeout' ) )
self._ideal_tile_dimension.setValue( self._new_options.GetInteger( 'ideal_tile_dimension' ) )
self._video_buffer_size_mb.setValue( self._new_options.GetInteger( 'video_buffer_size_mb' ) )
self._forced_search_limit.SetValue( self._new_options.GetNoneableInteger( 'forced_search_limit' ) )
self._media_viewer_prefetch_delay_base_ms.setValue( self._new_options.GetInteger( 'media_viewer_prefetch_delay_base_ms' ) )
self._media_viewer_prefetch_num_previous.setValue( self._new_options.GetInteger( 'media_viewer_prefetch_num_previous' ) )
self._media_viewer_prefetch_num_next.setValue( self._new_options.GetInteger( 'media_viewer_prefetch_num_next' ) )
self._image_cache_storage_limit_percentage.setValue( self._new_options.GetInteger( 'image_cache_storage_limit_percentage' ) )
self._image_cache_prefetch_limit_percentage.setValue( self._new_options.GetInteger( 'image_cache_prefetch_limit_percentage' ) )
#
vbox = QP.VBoxLayout()
text = 'These options are advanced! PROTIP: Do not go crazy here.'
st = ClientGUICommon.BetterStaticText( self, text )
QP.AddToLayout( vbox, st, CC.FLAGS_CENTER )
#
thumbnails_sizer = QP.HBoxLayout()
QP.AddToLayout( thumbnails_sizer, self._thumbnail_cache_size, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( thumbnails_sizer, self._estimated_number_thumbnails, CC.FLAGS_CENTER_PERPENDICULAR )
fullscreens_sizer = QP.HBoxLayout()
QP.AddToLayout( fullscreens_sizer, self._fullscreen_cache_size, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( fullscreens_sizer, self._estimated_number_fullscreens, CC.FLAGS_CENTER_PERPENDICULAR )
image_tiles_sizer = QP.HBoxLayout()
QP.AddToLayout( image_tiles_sizer, self._image_tile_cache_size, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( image_tiles_sizer, self._estimated_number_image_tiles, CC.FLAGS_CENTER_PERPENDICULAR )
image_cache_storage_sizer = QP.HBoxLayout()
QP.AddToLayout( image_cache_storage_sizer, self._image_cache_storage_limit_percentage, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( image_cache_storage_sizer, self._image_cache_storage_limit_percentage_st, CC.FLAGS_CENTER_PERPENDICULAR )
image_cache_prefetch_sizer = QP.HBoxLayout()
QP.AddToLayout( image_cache_prefetch_sizer, self._image_cache_prefetch_limit_percentage, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( image_cache_prefetch_sizer, self._image_cache_prefetch_limit_percentage_st, CC.FLAGS_CENTER_PERPENDICULAR )
video_buffer_sizer = QP.HBoxLayout()
QP.AddToLayout( video_buffer_sizer, self._video_buffer_size_mb, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( video_buffer_sizer, self._estimated_number_video_frames, CC.FLAGS_CENTER_PERPENDICULAR )
#
text = 'Does not change much, thumbs are cheap.'
st = ClientGUICommon.BetterStaticText( thumbnail_cache_panel, text )
thumbnail_cache_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'MB memory reserved for thumbnail cache:', thumbnails_sizer ) )
rows.append( ( 'Thumbnail cache timeout:', self._thumbnail_cache_timeout ) )
gridbox = ClientGUICommon.WrapInGrid( thumbnail_cache_panel, rows )
thumbnail_cache_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, thumbnail_cache_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
text = 'Important if you want smooth navigation between different images in the media viewer. If you deal with huge images, bump up cache size and max size that can be cached or prefetched, but be prepared to pay the memory price.'
text += os.linesep * 2
text += 'Allowing more prefetch is great, but it needs CPU.'
st = ClientGUICommon.BetterStaticText( image_cache_panel, text )
st.setWordWrap( True )
image_cache_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'MB memory reserved for image cache:', fullscreens_sizer ) )
rows.append( ( 'Image cache timeout:', self._image_cache_timeout ) )
rows.append( ( 'Maximum image size (in % of cache) that can be cached:', image_cache_storage_sizer ) )
rows.append( ( 'Maximum image size (in % of cache) that will be prefetched:', image_cache_prefetch_sizer ) )
rows.append( ( 'Base ms delay for media viewer neighbour render prefetch:', self._media_viewer_prefetch_delay_base_ms ) )
rows.append( ( 'Num previous to prefetch:', self._media_viewer_prefetch_num_previous ) )
rows.append( ( 'Num next to prefetch:', self._media_viewer_prefetch_num_next ) )
gridbox = ClientGUICommon.WrapInGrid( image_cache_panel, rows )
image_cache_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, image_cache_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
text = 'Important if you do a lot of zooming in and out on the same image or a small number of comparison images.'
st = ClientGUICommon.BetterStaticText( image_tile_cache_panel, text )
image_tile_cache_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'MB memory reserved for image tile cache:', image_tiles_sizer ) )
rows.append( ( 'Image tile cache timeout:', self._image_tile_cache_timeout ) )
rows.append( ( 'Ideal tile width/height px:', self._ideal_tile_dimension ) )
gridbox = ClientGUICommon.WrapInGrid( image_tile_cache_panel, rows )
image_tile_cache_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, image_tile_cache_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
text = 'This old option does not apply to mpv! It only applies to the native hydrus animation renderer!'
text += os.linesep
text += 'Hydrus video rendering is CPU intensive.'
text += os.linesep
text += 'If you have a lot of memory, you can set a generous potential video buffer to compensate.'
text += os.linesep
text += 'If the video buffer can hold an entire video, it only needs to be rendered once and will play and loop very smoothly.'
text += os.linesep
text += 'PROTIP: Do not go crazy here.'
st = ClientGUICommon.BetterStaticText( buffer_panel, text )
st.setWordWrap( True )
buffer_panel.Add( st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'MB memory for video buffer: ', video_buffer_sizer ) )
gridbox = ClientGUICommon.WrapInGrid( buffer_panel, rows )
buffer_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, buffer_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
rows = []
rows.append( ( 'Forced system:limit for all searches: ', self._forced_search_limit ) )
gridbox = ClientGUICommon.WrapInGrid( misc_panel, rows )
misc_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, misc_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
vbox.addStretch( 1 )
self.setLayout( vbox )
#
self._image_cache_storage_limit_percentage.valueChanged.connect( self.EventImageCacheUpdate )
self._image_cache_prefetch_limit_percentage.valueChanged.connect( self.EventImageCacheUpdate )
self.EventImageCacheUpdate()
self.EventThumbnailsUpdate( self._thumbnail_cache_size.value() )
self.EventImageTilesUpdate()
self.EventVideoBufferUpdate( self._video_buffer_size_mb.value() )
def EventImageCacheUpdate( self ):
cache_size = self._fullscreen_cache_size.value() * 1048576
display_size = ClientGUIFunctions.GetDisplaySize( self )
estimated_bytes_per_fullscreen = 3 * display_size.width() * display_size.height()
estimate = cache_size // estimated_bytes_per_fullscreen
self._estimated_number_fullscreens.setText( '(about {}-{} images the size of your screen)'.format( HydrusData.ToHumanInt( estimate // 2 ), HydrusData.ToHumanInt( estimate * 2 ) ) )
num_pixels = cache_size * ( self._image_cache_storage_limit_percentage.value() / 100 ) / 3
unit_square = num_pixels / ( 16 * 9 )
unit_length = unit_square ** 0.5
resolution = ( int( 16 * unit_length ), int( 9 * unit_length ) )
self._image_cache_storage_limit_percentage_st.setText( 'about a {} image'.format( HydrusData.ConvertResolutionToPrettyString( resolution ) ) )
num_pixels = cache_size * ( self._image_cache_prefetch_limit_percentage.value() / 100 ) / 3
unit_square = num_pixels / ( 16 * 9 )
unit_length = unit_square ** 0.5
resolution = ( int( 16 * unit_length ), int( 9 * unit_length ) )
self._image_cache_prefetch_limit_percentage_st.setText( 'about a {} image'.format( HydrusData.ConvertResolutionToPrettyString( resolution ) ) )
def EventImageTilesUpdate( self ):
value = self._image_tile_cache_size.GetValue()
display_size = ClientGUIFunctions.GetDisplaySize( self )
estimated_bytes_per_fullscreen = 3 * display_size.width() * display_size.height()
estimate = value // estimated_bytes_per_fullscreen
self._estimated_number_image_tiles.setText( '(about {} fullscreens)'.format( HydrusData.ToHumanInt( estimate ) ) )
def EventThumbnailsUpdate( self, value ):
( thumbnail_width, thumbnail_height ) = HC.options[ 'thumbnail_dimensions' ]
res_string = HydrusData.ConvertResolutionToPrettyString( ( thumbnail_width, thumbnail_height ) )
estimated_bytes_per_thumb = 3 * thumbnail_width * thumbnail_height
estimated_thumbs = ( value * 1024 * 1024 ) // estimated_bytes_per_thumb
self._estimated_number_thumbnails.setText( '(at '+res_string+', about '+HydrusData.ToHumanInt(estimated_thumbs)+' thumbnails)' )
def EventVideoBufferUpdate( self, value ):
estimated_720p_frames = int( ( value * 1024 * 1024 ) // ( 1280 * 720 * 3 ) )
self._estimated_number_video_frames.setText( '(about '+HydrusData.ToHumanInt(estimated_720p_frames)+' frames of 720p video)' )
def UpdateOptions( self ):
HC.options[ 'thumbnail_cache_size' ] = self._thumbnail_cache_size.value() * 1048576
HC.options[ 'fullscreen_cache_size' ] = self._fullscreen_cache_size.value() * 1048576
self._new_options.SetInteger( 'image_tile_cache_size', self._image_tile_cache_size.GetValue() )
self._new_options.SetInteger( 'thumbnail_cache_timeout', self._thumbnail_cache_timeout.GetValue() )
self._new_options.SetInteger( 'image_cache_timeout', self._image_cache_timeout.GetValue() )
self._new_options.SetInteger( 'image_tile_cache_timeout', self._image_tile_cache_timeout.GetValue() )
self._new_options.SetInteger( 'ideal_tile_dimension', self._ideal_tile_dimension.value() )
self._new_options.SetInteger( 'media_viewer_prefetch_delay_base_ms', self._media_viewer_prefetch_delay_base_ms.value() )
self._new_options.SetInteger( 'media_viewer_prefetch_num_previous', self._media_viewer_prefetch_num_previous.value() )
self._new_options.SetInteger( 'media_viewer_prefetch_num_next', self._media_viewer_prefetch_num_next.value() )
self._new_options.SetInteger( 'image_cache_storage_limit_percentage', self._image_cache_storage_limit_percentage.value() )
self._new_options.SetInteger( 'image_cache_prefetch_limit_percentage', self._image_cache_prefetch_limit_percentage.value() )
self._new_options.SetInteger( 'video_buffer_size_mb', self._video_buffer_size_mb.value() )
self._new_options.SetNoneableInteger( 'forced_search_limit', self._forced_search_limit.GetValue() )
class _StylePanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
self._qt_style_name = ClientGUICommon.BetterChoice( self )
self._qt_stylesheet_name = ClientGUICommon.BetterChoice( self )
self._qt_style_name.addItem( 'use default ("{}")'.format( ClientGUIStyle.ORIGINAL_STYLE_NAME ), None )
try:
for name in ClientGUIStyle.GetAvailableStyles():
self._qt_style_name.addItem( name, name )
except HydrusExceptions.DataMissing as e:
HydrusData.ShowException( e )
self._qt_stylesheet_name.addItem( 'use default', None )
try:
for name in ClientGUIStyle.GetAvailableStylesheets():
self._qt_stylesheet_name.addItem( name, name )
except HydrusExceptions.DataMissing as e:
HydrusData.ShowException( e )
#
self._qt_style_name.SetValue( self._new_options.GetNoneableString( 'qt_style_name' ) )
self._qt_stylesheet_name.SetValue( self._new_options.GetNoneableString( 'qt_stylesheet_name' ) )
#
vbox = QP.VBoxLayout()
#
text = 'The current styles are what your Qt has available, the stylesheets are what .css and .qss files are currently in install_dir/static/qss.'
st = ClientGUICommon.BetterStaticText( self, label = text )
QP.AddToLayout( vbox, st, CC.FLAGS_EXPAND_PERPENDICULAR )
rows = []
rows.append( ( 'Qt style:', self._qt_style_name ) )
rows.append( ( 'Qt stylesheet:', self._qt_stylesheet_name ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self.setLayout( vbox )
self._qt_style_name.currentIndexChanged.connect( self.StyleChanged )
self._qt_stylesheet_name.currentIndexChanged.connect( self.StyleChanged )
def StyleChanged( self ):
qt_style_name = self._qt_style_name.GetValue()
qt_stylesheet_name = self._qt_stylesheet_name.GetValue()
try:
if qt_style_name is None:
ClientGUIStyle.SetStyleFromName( ClientGUIStyle.ORIGINAL_STYLE_NAME )
else:
ClientGUIStyle.SetStyleFromName( qt_style_name )
except Exception as e:
QW.QMessageBox.critical( self, 'Critical', 'Could not apply style: {}'.format( str( e ) ) )
try:
if qt_stylesheet_name is None:
ClientGUIStyle.ClearStylesheet()
else:
ClientGUIStyle.SetStylesheetFromPath( qt_stylesheet_name )
except Exception as e:
QW.QMessageBox.critical( self, 'Critical', 'Could not apply stylesheet: {}'.format( str( e ) ) )
def UpdateOptions( self ):
self._new_options.SetNoneableString( 'qt_style_name', self._qt_style_name.GetValue() )
self._new_options.SetNoneableString( 'qt_stylesheet_name', self._qt_stylesheet_name.GetValue() )
class _SystemPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
sleep_panel = ClientGUICommon.StaticBox( self, 'system sleep' )
self._wake_delay_period = QP.MakeQSpinBox( sleep_panel, min = 0, max = 60 )
tt = 'It sometimes takes a few seconds for your network adapter to reconnect after a wake. This adds a grace period after a detected wake-from-sleep to allow your OS to sort that out before Hydrus starts making requests.'
self._wake_delay_period.setToolTip( tt )
self._file_system_waits_on_wakeup = QW.QCheckBox( sleep_panel )
self._file_system_waits_on_wakeup.setToolTip( 'This is useful if your hydrus is stored on a NAS that takes a few seconds to get going after your machine resumes from sleep.' )
#
self._wake_delay_period.setValue( self._new_options.GetInteger( 'wake_delay_period' ) )
self._file_system_waits_on_wakeup.setChecked( self._new_options.GetBoolean( 'file_system_waits_on_wakeup' ) )
#
rows = []
rows.append( ( 'After a wake from system sleep, wait this many seconds before allowing new network access:', self._wake_delay_period ) )
rows.append( ( 'Include the file system in this wait: ', self._file_system_waits_on_wakeup ) )
gridbox = ClientGUICommon.WrapInGrid( sleep_panel, rows )
sleep_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, sleep_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetInteger( 'wake_delay_period', self._wake_delay_period.value() )
self._new_options.SetBoolean( 'file_system_waits_on_wakeup', self._file_system_waits_on_wakeup.isChecked() )
class _SystemTrayPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
self._always_show_system_tray_icon = QW.QCheckBox( self )
self._minimise_client_to_system_tray = QW.QCheckBox( self )
self._close_client_to_system_tray = QW.QCheckBox( self )
self._start_client_in_system_tray = QW.QCheckBox( self )
#
self._always_show_system_tray_icon.setChecked( self._new_options.GetBoolean( 'always_show_system_tray_icon' ) )
self._minimise_client_to_system_tray.setChecked( self._new_options.GetBoolean( 'minimise_client_to_system_tray' ) )
self._close_client_to_system_tray.setChecked( self._new_options.GetBoolean( 'close_client_to_system_tray' ) )
self._start_client_in_system_tray.setChecked( self._new_options.GetBoolean( 'start_client_in_system_tray' ) )
#
vbox = QP.VBoxLayout()
rows = []
rows.append( ( 'Always show the hydrus system tray icon: ', self._always_show_system_tray_icon ) )
rows.append( ( 'Minimise the main window to system tray: ', self._minimise_client_to_system_tray ) )
rows.append( ( 'Close the main window to system tray: ', self._close_client_to_system_tray ) )
rows.append( ( 'Start the client minimised to system tray: ', self._start_client_in_system_tray ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
from hydrus.client.gui import ClientGUISystemTray
if not ClientGUISystemTray.SystemTrayAvailable():
QP.AddToLayout( vbox, ClientGUICommon.BetterStaticText( self, 'Unfortunately, your system does not seem to have a supported system tray.' ), CC.FLAGS_EXPAND_PERPENDICULAR )
self._always_show_system_tray_icon.setEnabled( False )
self._minimise_client_to_system_tray.setEnabled( False )
self._close_client_to_system_tray.setEnabled( False )
self._start_client_in_system_tray.setEnabled( False )
elif not HC.PLATFORM_WINDOWS:
if not HG.client_controller.new_options.GetBoolean( 'advanced_mode' ):
label = 'This is turned off for non-advanced non-Windows users for now.'
self._always_show_system_tray_icon.setEnabled( False )
self._minimise_client_to_system_tray.setEnabled( False )
self._close_client_to_system_tray.setEnabled( False )
self._start_client_in_system_tray.setEnabled( False )
else:
label = 'This can be buggy/crashy on non-Windows, hydev will keep working on this.'
QP.AddToLayout( vbox, ClientGUICommon.BetterStaticText( self, label ), CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.addStretch( 1 )
self.setLayout( vbox )
def UpdateOptions( self ):
self._new_options.SetBoolean( 'always_show_system_tray_icon', self._always_show_system_tray_icon.isChecked() )
self._new_options.SetBoolean( 'minimise_client_to_system_tray', self._minimise_client_to_system_tray.isChecked() )
self._new_options.SetBoolean( 'close_client_to_system_tray', self._close_client_to_system_tray.isChecked() )
self._new_options.SetBoolean( 'start_client_in_system_tray', self._start_client_in_system_tray.isChecked() )
class _TagsPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
general_panel = ClientGUICommon.StaticBox( self, 'general tag options' )
self._default_tag_sort = ClientGUITagSorting.TagSortControl( general_panel, self._new_options.GetDefaultTagSort(), show_siblings = True )
self._default_tag_repository = ClientGUICommon.BetterChoice( general_panel )
self._default_tag_service_search_page = ClientGUICommon.BetterChoice( general_panel )
self._expand_parents_on_storage_taglists = QW.QCheckBox( general_panel )
self._expand_parents_on_storage_autocomplete_taglists = QW.QCheckBox( general_panel )
self._ac_select_first_with_count = QW.QCheckBox( general_panel )
#
favourites_panel = ClientGUICommon.StaticBox( self, 'favourite tags' )
desc = 'These tags will appear in your tag autocomplete results area, under the \'favourites\' tab.'
favourites_st = ClientGUICommon.BetterStaticText( favourites_panel, desc )
favourites_st.setWordWrap( True )
self._favourites = ClientGUIListBoxes.ListBoxTagsStringsAddRemove( favourites_panel, CC.COMBINED_TAG_SERVICE_KEY, ClientTags.TAG_DISPLAY_STORAGE )
self._favourites_input = ClientGUIACDropdown.AutoCompleteDropdownTagsWrite( favourites_panel, self._favourites.AddTags, CC.LOCAL_FILE_SERVICE_KEY, CC.COMBINED_TAG_SERVICE_KEY, show_paste_button = True )
#
self._default_tag_service_search_page.addItem( 'all known tags', CC.COMBINED_TAG_SERVICE_KEY )
services = HG.client_controller.services_manager.GetServices( HC.REAL_TAG_SERVICES )
for service in services:
self._default_tag_repository.addItem( service.GetName(), service.GetServiceKey() )
self._default_tag_service_search_page.addItem( service.GetName(), service.GetServiceKey() )
default_tag_repository_key = HC.options[ 'default_tag_repository' ]
self._default_tag_repository.SetValue( default_tag_repository_key )
self._default_tag_service_search_page.SetValue( new_options.GetKey( 'default_tag_service_search_page' ) )
self._expand_parents_on_storage_taglists.setChecked( self._new_options.GetBoolean( 'expand_parents_on_storage_taglists' ) )
self._expand_parents_on_storage_taglists.setToolTip( 'This affects taglists in places like the manage tags dialog, where you edit tags as they actually are, and implied parents hang below tags.' )
self._expand_parents_on_storage_autocomplete_taglists.setChecked( self._new_options.GetBoolean( 'expand_parents_on_storage_autocomplete_taglists' ) )
self._expand_parents_on_storage_autocomplete_taglists.setToolTip( 'This affects the autocomplete results taglist.' )
self._ac_select_first_with_count.setChecked( self._new_options.GetBoolean( 'ac_select_first_with_count' ) )
#
self._favourites.SetTags( new_options.GetStringList( 'favourite_tags' ) )
#
vbox = QP.VBoxLayout()
rows = []
rows.append( ( 'Default tag service in manage tag dialogs: ', self._default_tag_repository ) )
rows.append( ( 'Default tag service in search pages: ', self._default_tag_service_search_page ) )
rows.append( ( 'Default tag sort: ', self._default_tag_sort ) )
rows.append( ( 'Show parents expanded by default on edit/write taglists: ', self._expand_parents_on_storage_taglists ) )
rows.append( ( 'Show parents expanded by default on edit/write autocomplete taglists: ', self._expand_parents_on_storage_autocomplete_taglists ) )
rows.append( ( 'By default, select the first tag result with actual count in write-autocomplete: ', self._ac_select_first_with_count ) )
gridbox = ClientGUICommon.WrapInGrid( general_panel, rows )
general_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, general_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
favourites_panel.Add( favourites_st, CC.FLAGS_EXPAND_PERPENDICULAR )
favourites_panel.Add( self._favourites, CC.FLAGS_EXPAND_BOTH_WAYS )
favourites_panel.Add( self._favourites_input )
QP.AddToLayout( vbox, favourites_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
#
self.setLayout( vbox )
def UpdateOptions( self ):
HC.options[ 'default_tag_repository' ] = self._default_tag_repository.GetValue()
self._new_options.SetDefaultTagSort( self._default_tag_sort.GetValue() )
self._new_options.SetBoolean( 'expand_parents_on_storage_taglists', self._expand_parents_on_storage_taglists.isChecked() )
self._new_options.SetBoolean( 'expand_parents_on_storage_autocomplete_taglists', self._expand_parents_on_storage_autocomplete_taglists.isChecked() )
self._new_options.SetBoolean( 'ac_select_first_with_count', self._ac_select_first_with_count.isChecked() )
self._new_options.SetKey( 'default_tag_service_search_page', self._default_tag_service_search_page.GetValue() )
#
self._new_options.SetStringList( 'favourite_tags', list( self._favourites.GetTags() ) )
class _TagPresentationPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
#
tag_summary_generator = self._new_options.GetTagSummaryGenerator( 'thumbnail_top' )
self._thumbnail_top = ClientGUITags.TagSummaryGeneratorButton( self, tag_summary_generator )
tag_summary_generator = self._new_options.GetTagSummaryGenerator( 'thumbnail_bottom_right' )
self._thumbnail_bottom_right = ClientGUITags.TagSummaryGeneratorButton( self, tag_summary_generator )
tag_summary_generator = self._new_options.GetTagSummaryGenerator( 'media_viewer_top' )
self._media_viewer_top = ClientGUITags.TagSummaryGeneratorButton( self, tag_summary_generator )
#
render_panel = ClientGUICommon.StaticBox( self, 'namespace rendering' )
render_st = ClientGUICommon.BetterStaticText( render_panel, label = 'Namespaced tags are stored and directly edited in hydrus as "namespace:subtag", but most presentation windows can display them differently.' )
self._show_namespaces = QW.QCheckBox( render_panel )
self._namespace_connector = QW.QLineEdit( render_panel )
self._replace_tag_underscores_with_spaces = QW.QCheckBox( render_panel )
#
namespace_colours_panel = ClientGUICommon.StaticBox( self, 'namespace colours' )
self._namespace_colours = ClientGUIListBoxes.ListBoxTagsColourOptions( namespace_colours_panel, HC.options[ 'namespace_colours' ] )
self._edit_namespace_colour = QW.QPushButton( 'edit selected', namespace_colours_panel )
self._edit_namespace_colour.clicked.connect( self.EventEditNamespaceColour )
self._new_namespace_colour = QW.QLineEdit( namespace_colours_panel )
self._new_namespace_colour.installEventFilter( ClientGUICommon.TextCatchEnterEventFilter( self._new_namespace_colour, self.AddNamespaceColour ) )
#
self._show_namespaces.setChecked( new_options.GetBoolean( 'show_namespaces' ) )
self._namespace_connector.setText( new_options.GetString( 'namespace_connector' ) )
self._replace_tag_underscores_with_spaces.setChecked( new_options.GetBoolean( 'replace_tag_underscores_with_spaces' ) )
#
namespace_colours_panel.Add( self._namespace_colours, CC.FLAGS_EXPAND_BOTH_WAYS )
namespace_colours_panel.Add( self._new_namespace_colour, CC.FLAGS_EXPAND_PERPENDICULAR )
namespace_colours_panel.Add( self._edit_namespace_colour, CC.FLAGS_EXPAND_PERPENDICULAR )
#
vbox = QP.VBoxLayout()
#
rows = []
rows.append( ( 'On thumbnail top:', self._thumbnail_top ) )
rows.append( ( 'On thumbnail bottom-right:', self._thumbnail_bottom_right ) )
rows.append( ( 'On media viewer top:', self._media_viewer_top ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
rows = []
rows.append( ( 'Show namespaces: ', self._show_namespaces ) )
rows.append( ( 'If shown, namespace connecting string: ', self._namespace_connector ) )
rows.append( ( 'EXPERIMENTAL: Replace all underscores with spaces: ', self._replace_tag_underscores_with_spaces ) )
gridbox = ClientGUICommon.WrapInGrid( render_panel, rows )
render_panel.Add( render_st, CC.FLAGS_EXPAND_PERPENDICULAR )
render_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
QP.AddToLayout( vbox, render_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
#
QP.AddToLayout( vbox, namespace_colours_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
#
self.setLayout( vbox )
def EventEditNamespaceColour( self ):
results = self._namespace_colours.GetSelectedNamespaceColours()
for ( namespace, ( r, g, b ) ) in list( results.items() ):
colour = QG.QColor( r, g, b )
colour = QW.QColorDialog.getColor( colour, self, 'Namespace colour', QW.QColorDialog.ShowAlphaChannel )
if colour.isValid():
self._namespace_colours.SetNamespaceColour( namespace, colour )
def AddNamespaceColour( self ):
namespace = self._new_namespace_colour.text()
if namespace != '':
self._namespace_colours.SetNamespaceColour( namespace, QG.QColor( random.randint(0,255), random.randint(0,255), random.randint(0,255) ) )
self._new_namespace_colour.clear()
def UpdateOptions( self ):
self._new_options.SetTagSummaryGenerator( 'thumbnail_top', self._thumbnail_top.GetValue() )
self._new_options.SetTagSummaryGenerator( 'thumbnail_bottom_right', self._thumbnail_bottom_right.GetValue() )
self._new_options.SetTagSummaryGenerator( 'media_viewer_top', self._media_viewer_top.GetValue() )
self._new_options.SetBoolean( 'show_namespaces', self._show_namespaces.isChecked() )
self._new_options.SetString( 'namespace_connector', self._namespace_connector.text() )
self._new_options.SetBoolean( 'replace_tag_underscores_with_spaces', self._replace_tag_underscores_with_spaces.isChecked() )
HC.options[ 'namespace_colours' ] = self._namespace_colours.GetNamespaceColours()
class _TagSuggestionsPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
suggested_tags_panel = ClientGUICommon.StaticBox( self, 'suggested tags' )
self._suggested_tags_width = QP.MakeQSpinBox( suggested_tags_panel, min=20, max=65535 )
self._suggested_tags_layout = ClientGUICommon.BetterChoice( suggested_tags_panel )
self._suggested_tags_layout.addItem( 'notebook', 'notebook' )
self._suggested_tags_layout.addItem( 'side-by-side', 'columns' )
suggest_tags_panel_notebook = QW.QTabWidget( suggested_tags_panel )
#
suggested_tags_favourites_panel = QW.QWidget( suggest_tags_panel_notebook )
suggested_tags_favourites_panel.setMinimumWidth( 400 )
self._suggested_favourites_services = ClientGUICommon.BetterChoice( suggested_tags_favourites_panel )
tag_services = HG.client_controller.services_manager.GetServices( HC.REAL_TAG_SERVICES )
for tag_service in tag_services:
self._suggested_favourites_services.addItem( tag_service.GetName(), tag_service.GetServiceKey() )
self._suggested_favourites = ClientGUIListBoxes.ListBoxTagsStringsAddRemove( suggested_tags_favourites_panel, CC.COMBINED_TAG_SERVICE_KEY, ClientTags.TAG_DISPLAY_STORAGE )
self._current_suggested_favourites_service = None
self._suggested_favourites_dict = {}
self._suggested_favourites_input = ClientGUIACDropdown.AutoCompleteDropdownTagsWrite( suggested_tags_favourites_panel, self._suggested_favourites.AddTags, CC.LOCAL_FILE_SERVICE_KEY, CC.COMBINED_TAG_SERVICE_KEY, show_paste_button = True )
#
suggested_tags_related_panel = QW.QWidget( suggest_tags_panel_notebook )
self._show_related_tags = QW.QCheckBox( suggested_tags_related_panel )
self._related_tags_search_1_duration_ms = QP.MakeQSpinBox( suggested_tags_related_panel, min=50, max=60000 )
self._related_tags_search_2_duration_ms = QP.MakeQSpinBox( suggested_tags_related_panel, min=50, max=60000 )
self._related_tags_search_3_duration_ms = QP.MakeQSpinBox( suggested_tags_related_panel, min=50, max=60000 )
#
suggested_tags_file_lookup_script_panel = QW.QWidget( suggest_tags_panel_notebook )
self._show_file_lookup_script_tags = QW.QCheckBox( suggested_tags_file_lookup_script_panel )
self._favourite_file_lookup_script = ClientGUICommon.BetterChoice( suggested_tags_file_lookup_script_panel )
script_names = sorted( HG.client_controller.Read( 'serialisable_names', HydrusSerialisable.SERIALISABLE_TYPE_PARSE_ROOT_FILE_LOOKUP ) )
for name in script_names:
self._favourite_file_lookup_script.addItem( name, name )
#
suggested_tags_recent_panel = QW.QWidget( suggest_tags_panel_notebook )
self._num_recent_tags = ClientGUICommon.NoneableSpinCtrl( suggested_tags_recent_panel, 'number of recent tags to show', min = 1, none_phrase = 'do not show' )
#
self._suggested_tags_width.setValue( self._new_options.GetInteger( 'suggested_tags_width' ) )
self._suggested_tags_layout.SetValue( self._new_options.GetNoneableString( 'suggested_tags_layout' ) )
self._show_related_tags.setChecked( self._new_options.GetBoolean( 'show_related_tags' ) )
self._related_tags_search_1_duration_ms.setValue( self._new_options.GetInteger( 'related_tags_search_1_duration_ms' ) )
self._related_tags_search_2_duration_ms.setValue( self._new_options.GetInteger( 'related_tags_search_2_duration_ms' ) )
self._related_tags_search_3_duration_ms.setValue( self._new_options.GetInteger( 'related_tags_search_3_duration_ms' ) )
self._show_file_lookup_script_tags.setChecked( self._new_options.GetBoolean( 'show_file_lookup_script_tags' ) )
self._favourite_file_lookup_script.SetValue( self._new_options.GetNoneableString( 'favourite_file_lookup_script' ) )
self._num_recent_tags.SetValue( self._new_options.GetNoneableInteger( 'num_recent_tags' ) )
#
panel_vbox = QP.VBoxLayout()
QP.AddToLayout( panel_vbox, self._suggested_favourites_services, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( panel_vbox, self._suggested_favourites, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( panel_vbox, self._suggested_favourites_input, CC.FLAGS_EXPAND_PERPENDICULAR )
suggested_tags_favourites_panel.setLayout( panel_vbox )
#
panel_vbox = QP.VBoxLayout()
rows = []
rows.append( ( 'Show related tags on single-file manage tags windows: ', self._show_related_tags ) )
rows.append( ( 'Initial search duration (ms): ', self._related_tags_search_1_duration_ms ) )
rows.append( ( 'Medium search duration (ms): ', self._related_tags_search_2_duration_ms ) )
rows.append( ( 'Thorough search duration (ms): ', self._related_tags_search_3_duration_ms ) )
gridbox = ClientGUICommon.WrapInGrid( suggested_tags_related_panel, rows )
desc = 'This will search the database for statistically related tags based on what your focused file already has.'
QP.AddToLayout( panel_vbox, ClientGUICommon.BetterStaticText(suggested_tags_related_panel,desc), CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( panel_vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
suggested_tags_related_panel.setLayout( panel_vbox )
#
panel_vbox = QP.VBoxLayout()
rows = []
rows.append( ( 'Show file lookup scripts on single-file manage tags windows: ', self._show_file_lookup_script_tags ) )
rows.append( ( 'Favourite file lookup script: ', self._favourite_file_lookup_script ) )
gridbox = ClientGUICommon.WrapInGrid( suggested_tags_file_lookup_script_panel, rows )
QP.AddToLayout( panel_vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
suggested_tags_file_lookup_script_panel.setLayout( panel_vbox )
#
panel_vbox = QP.VBoxLayout()
QP.AddToLayout( panel_vbox, self._num_recent_tags, CC.FLAGS_EXPAND_PERPENDICULAR )
panel_vbox.addStretch( 1 )
suggested_tags_recent_panel.setLayout( panel_vbox )
#
suggest_tags_panel_notebook.addTab( suggested_tags_favourites_panel, 'favourites' )
suggest_tags_panel_notebook.addTab( suggested_tags_related_panel, 'related' )
suggest_tags_panel_notebook.addTab( suggested_tags_file_lookup_script_panel, 'file lookup scripts' )
suggest_tags_panel_notebook.addTab( suggested_tags_recent_panel, 'recent' )
#
rows = []
rows.append( ( 'Width of suggested tags columns: ', self._suggested_tags_width ) )
rows.append( ( 'Column layout: ', self._suggested_tags_layout ) )
gridbox = ClientGUICommon.WrapInGrid( suggested_tags_panel, rows )
desc = 'The manage tags dialog can provide several kinds of tag suggestions. For simplicity, most are turned off by default.'
suggested_tags_panel.Add( ClientGUICommon.BetterStaticText( suggested_tags_panel, desc ), CC.FLAGS_EXPAND_PERPENDICULAR )
suggested_tags_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
suggested_tags_panel.Add( suggest_tags_panel_notebook, CC.FLAGS_EXPAND_BOTH_WAYS )
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, suggested_tags_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.setLayout( vbox )
#
self._suggested_favourites_services.currentIndexChanged.connect( self.EventSuggestedFavouritesService )
self.EventSuggestedFavouritesService( None )
def _SaveCurrentSuggestedFavourites( self ):
if self._current_suggested_favourites_service is not None:
self._suggested_favourites_dict[ self._current_suggested_favourites_service ] = self._suggested_favourites.GetTags()
def EventSuggestedFavouritesService( self, index ):
self._SaveCurrentSuggestedFavourites()
self._current_suggested_favourites_service = self._suggested_favourites_services.GetValue()
if self._current_suggested_favourites_service in self._suggested_favourites_dict:
favourites = self._suggested_favourites_dict[ self._current_suggested_favourites_service ]
else:
favourites = self._new_options.GetSuggestedTagsFavourites( self._current_suggested_favourites_service )
self._suggested_favourites.SetTagServiceKey( self._current_suggested_favourites_service )
self._suggested_favourites.SetTags( favourites )
self._suggested_favourites_input.SetTagServiceKey( self._current_suggested_favourites_service )
self._suggested_favourites_input.SetDisplayTagServiceKey( self._current_suggested_favourites_service )
def UpdateOptions( self ):
self._new_options.SetInteger( 'suggested_tags_width', self._suggested_tags_width.value() )
self._new_options.SetNoneableString( 'suggested_tags_layout', self._suggested_tags_layout.GetValue() )
self._SaveCurrentSuggestedFavourites()
for ( service_key, favourites ) in list(self._suggested_favourites_dict.items()):
self._new_options.SetSuggestedTagsFavourites( service_key, favourites )
self._new_options.SetBoolean( 'show_related_tags', self._show_related_tags.isChecked() )
self._new_options.SetInteger( 'related_tags_search_1_duration_ms', self._related_tags_search_1_duration_ms.value() )
self._new_options.SetInteger( 'related_tags_search_2_duration_ms', self._related_tags_search_2_duration_ms.value() )
self._new_options.SetInteger( 'related_tags_search_3_duration_ms', self._related_tags_search_3_duration_ms.value() )
self._new_options.SetBoolean( 'show_file_lookup_script_tags', self._show_file_lookup_script_tags.isChecked() )
self._new_options.SetNoneableString( 'favourite_file_lookup_script', self._favourite_file_lookup_script.GetValue() )
self._new_options.SetNoneableInteger( 'num_recent_tags', self._num_recent_tags.GetValue() )
class _ThumbnailsPanel( QW.QWidget ):
def __init__( self, parent, new_options ):
QW.QWidget.__init__( self, parent )
self._new_options = new_options
self._thumbnail_width = QP.MakeQSpinBox( self, min=20, max=2048 )
self._thumbnail_height = QP.MakeQSpinBox( self, min=20, max=2048 )
self._thumbnail_border = QP.MakeQSpinBox( self, min=0, max=20 )
self._thumbnail_margin = QP.MakeQSpinBox( self, min=0, max=20 )
self._video_thumbnail_percentage_in = QP.MakeQSpinBox( self, min=0, max=100 )
self._thumbnail_scroll_rate = QW.QLineEdit( self )
self._thumbnail_fill = QW.QCheckBox( self )
self._thumbnail_visibility_scroll_percent = QP.MakeQSpinBox( self, min=1, max=99 )
self._thumbnail_visibility_scroll_percent.setToolTip( 'Lower numbers will cause fewer scrolls, higher numbers more.' )
self._media_background_bmp_path = QP.FilePickerCtrl( self )
#
( thumbnail_width, thumbnail_height ) = HC.options[ 'thumbnail_dimensions' ]
self._thumbnail_width.setValue( thumbnail_width )
self._thumbnail_height.setValue( thumbnail_height )
self._thumbnail_border.setValue( self._new_options.GetInteger( 'thumbnail_border' ) )
self._thumbnail_margin.setValue( self._new_options.GetInteger( 'thumbnail_margin' ) )
self._video_thumbnail_percentage_in.setValue( self._new_options.GetInteger( 'video_thumbnail_percentage_in' ) )
self._thumbnail_scroll_rate.setText( self._new_options.GetString( 'thumbnail_scroll_rate' ) )
self._thumbnail_fill.setChecked( self._new_options.GetBoolean( 'thumbnail_fill' ) )
self._thumbnail_visibility_scroll_percent.setValue( self._new_options.GetInteger( 'thumbnail_visibility_scroll_percent' ) )
media_background_bmp_path = self._new_options.GetNoneableString( 'media_background_bmp_path' )
if media_background_bmp_path is not None:
self._media_background_bmp_path.SetPath( media_background_bmp_path )
#
rows = []
rows.append( ( 'Thumbnail width: ', self._thumbnail_width ) )
rows.append( ( 'Thumbnail height: ', self._thumbnail_height ) )
rows.append( ( 'Thumbnail border: ', self._thumbnail_border ) )
rows.append( ( 'Thumbnail margin: ', self._thumbnail_margin ) )
rows.append( ( 'Generate video thumbnails this % in: ', self._video_thumbnail_percentage_in ) )
rows.append( ( 'Do not scroll down on key navigation if thumbnail at least this % visible: ', self._thumbnail_visibility_scroll_percent ) )
rows.append( ( 'EXPERIMENTAL: Scroll thumbnails at this rate per scroll tick: ', self._thumbnail_scroll_rate ) )
rows.append( ( 'EXPERIMENTAL: Zoom thumbnails so they \'fill\' their space: ', self._thumbnail_fill ) )
rows.append( ( 'EXPERIMENTAL: Image path for thumbnail panel background image (set blank to clear): ', self._media_background_bmp_path ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self.setLayout( vbox )
def UpdateOptions( self ):
new_thumbnail_dimensions = [self._thumbnail_width.value(), self._thumbnail_height.value()]
HC.options[ 'thumbnail_dimensions' ] = new_thumbnail_dimensions
self._new_options.SetInteger( 'thumbnail_border', self._thumbnail_border.value() )
self._new_options.SetInteger( 'thumbnail_margin', self._thumbnail_margin.value() )
self._new_options.SetInteger( 'video_thumbnail_percentage_in', self._video_thumbnail_percentage_in.value() )
try:
thumbnail_scroll_rate = self._thumbnail_scroll_rate.text()
float( thumbnail_scroll_rate )
self._new_options.SetString( 'thumbnail_scroll_rate', thumbnail_scroll_rate )
except:
pass
self._new_options.SetBoolean( 'thumbnail_fill', self._thumbnail_fill.isChecked() )
self._new_options.SetInteger( 'thumbnail_visibility_scroll_percent', self._thumbnail_visibility_scroll_percent.value() )
media_background_bmp_path = self._media_background_bmp_path.GetPath()
if media_background_bmp_path == '':
media_background_bmp_path = None
self._new_options.SetNoneableString( 'media_background_bmp_path', media_background_bmp_path )
def CommitChanges( self ):
for page in self._listbook.GetActivePages():
page.UpdateOptions()
try:
HG.client_controller.WriteSynchronous( 'save_options', HC.options )
HG.client_controller.WriteSynchronous( 'serialisable', self._new_options )
except:
QW.QMessageBox.critical( self, 'Error', traceback.format_exc() )
class ManageURLsPanel( ClientGUIScrolledPanels.ManagePanel ):
def __init__( self, parent, media ):
ClientGUIScrolledPanels.ManagePanel.__init__( self, parent )
media = ClientMedia.FlattenMedia( media )
self._current_media = [ m.Duplicate() for m in media ]
self._multiple_files_warning = ClientGUICommon.BetterStaticText( self, label = 'Warning: you are editing urls for multiple files!\nBe very careful about adding URLs here, as they will apply to everything.\nAdding the same URL to multiple files is only appropriate for gallery-type URLs!' )
self._multiple_files_warning.setObjectName( 'HydrusWarning' )
if len( self._current_media ) == 1:
self._multiple_files_warning.hide()
self._urls_listbox = QW.QListWidget( self )
self._urls_listbox.setSortingEnabled( True )
self._urls_listbox.setSelectionMode( QW.QAbstractItemView.ExtendedSelection )
self._urls_listbox.itemDoubleClicked.connect( self.EventListDoubleClick )
self._listbox_event_filter = QP.WidgetEventFilter( self._urls_listbox )
self._listbox_event_filter.EVT_KEY_DOWN( self.EventListKeyDown )
( width, height ) = ClientGUIFunctions.ConvertTextToPixels( self._urls_listbox, ( 120, 10 ) )
self._urls_listbox.setMinimumWidth( width )
self._urls_listbox.setMinimumHeight( height )
self._url_input = QW.QLineEdit( self )
self._url_input.installEventFilter( ClientGUICommon.TextCatchEnterEventFilter( self._url_input, self.AddURL ) )
self._copy_button = ClientGUICommon.BetterButton( self, 'copy all', self._Copy )
self._paste_button = ClientGUICommon.BetterButton( self, 'paste', self._Paste )
self._urls_to_add = set()
self._urls_to_remove = set()
#
self._pending_content_updates = []
self._current_urls_count = collections.Counter()
self._UpdateList()
#
hbox = QP.HBoxLayout()
QP.AddToLayout( hbox, self._copy_button, CC.FLAGS_CENTER_PERPENDICULAR )
QP.AddToLayout( hbox, self._paste_button, CC.FLAGS_CENTER_PERPENDICULAR )
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, self._multiple_files_warning, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._urls_listbox, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._url_input, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, hbox, CC.FLAGS_ON_RIGHT )
self.widget().setLayout( vbox )
self._my_shortcut_handler = ClientGUIShortcuts.ShortcutsHandler( self, [ 'global', 'media', 'main_gui' ] )
ClientGUIFunctions.SetFocusLater( self._url_input )
def _Copy( self ):
urls = sorted( self._current_urls_count.keys() )
text = os.linesep.join( urls )
HG.client_controller.pub( 'clipboard', 'text', text )
def _EnterURL( self, url, only_add = False ):
normalised_url = HG.client_controller.network_engine.domain_manager.NormaliseURL( url )
addee_media = set()
for m in self._current_media:
locations_manager = m.GetLocationsManager()
if normalised_url not in locations_manager.GetURLs():
addee_media.add( m )
if len( addee_media ) > 0:
addee_hashes = { m.GetHash() for m in addee_media }
content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_ADD, ( ( normalised_url, ), addee_hashes ) )
for m in addee_media:
m.GetMediaResult().ProcessContentUpdate( CC.COMBINED_LOCAL_FILE_SERVICE_KEY, content_update )
self._pending_content_updates.append( content_update )
#
self._UpdateList()
def _Paste( self ):
try:
raw_text = HG.client_controller.GetClipboardText()
except HydrusExceptions.DataMissing as e:
QW.QMessageBox.warning( self, 'Warning', str(e) )
return
try:
for url in HydrusText.DeserialiseNewlinedTexts( raw_text ):
if url != '':
self._EnterURL( url, only_add = True )
except Exception as e:
QW.QMessageBox.warning( self, 'Warning', 'I could not understand what was in the clipboard: {}'.format( e ) )
def _RemoveURL( self, url ):
removee_media = set()
for m in self._current_media:
locations_manager = m.GetLocationsManager()
if url in locations_manager.GetURLs():
removee_media.add( m )
if len( removee_media ) > 0:
removee_hashes = { m.GetHash() for m in removee_media }
content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_DELETE, ( ( url, ), removee_hashes ) )
for m in removee_media:
m.GetMediaResult().ProcessContentUpdate( CC.COMBINED_LOCAL_FILE_SERVICE_KEY, content_update )
self._pending_content_updates.append( content_update )
#
self._UpdateList()
def _SetSearchFocus( self ):
self._url_input.setFocus( QC.Qt.OtherFocusReason )
def _UpdateList( self ):
self._urls_listbox.clear()
self._current_urls_count = collections.Counter()
for m in self._current_media:
locations_manager = m.GetLocationsManager()
for url in locations_manager.GetURLs():
self._current_urls_count[ url ] += 1
for ( url, count ) in self._current_urls_count.items():
if len( self._current_media ) == 1:
label = url
else:
label = '{} ({})'.format( url, count )
item = QW.QListWidgetItem()
item.setText( label )
item.setData( QC.Qt.UserRole, url )
self._urls_listbox.addItem( item )
def EventListDoubleClick( self, item ):
urls = [ QP.GetClientData( self._urls_listbox, selection.row() ) for selection in list( self._urls_listbox.selectedIndexes() ) ]
for url in urls:
self._RemoveURL( url )
if len( urls ) == 1:
url = urls[0]
self._url_input.setText( url )
def EventListKeyDown( self, event ):
( modifier, key ) = ClientGUIShortcuts.ConvertKeyEventToSimpleTuple( event )
if key in ClientGUIShortcuts.DELETE_KEYS_QT:
urls = [ QP.GetClientData( self._urls_listbox, selection.row() ) for selection in list( self._urls_listbox.selectedIndexes() ) ]
for url in urls:
self._RemoveURL( url )
else:
return True # was: event.ignore()
def AddURL( self ):
url = self._url_input.text()
if url == '':
self.parentWidget().DoOK()
else:
try:
self._EnterURL( url )
self._url_input.clear()
except Exception as e:
QW.QMessageBox.warning( self, 'Warning', 'I could not add that URL: {}'.format( e ) )
def CommitChanges( self ):
if len( self._pending_content_updates ) > 0:
service_keys_to_content_updates = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY : self._pending_content_updates }
HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
def ProcessApplicationCommand( self, command: CAC.ApplicationCommand ):
command_processed = True
if command.IsSimpleCommand():
action = command.GetSimpleAction()
if action == CAC.SIMPLE_MANAGE_FILE_URLS:
self._OKParent()
elif action == CAC.SIMPLE_SET_SEARCH_FOCUS:
self._SetSearchFocus()
else:
command_processed = False
else:
command_processed = False
return command_processed
class RepairFileSystemPanel( ClientGUIScrolledPanels.ManagePanel ):
def __init__( self, parent, missing_locations ):
ClientGUIScrolledPanels.ManagePanel.__init__( self, parent )
self._only_thumbs = True
self._incorrect_locations = {}
self._correct_locations = {}
for ( incorrect_location, prefix ) in missing_locations:
self._incorrect_locations[ prefix ] = incorrect_location
if prefix.startswith( 'f' ):
self._only_thumbs = False
text = 'This dialog has launched because some expected file storage directories were not found. This is a serious error. You have two options:'
text += os.linesep * 2
text += '1) If you know what these should be (e.g. you recently remapped their external drive to another location), update the paths here manually. For most users, this will be clicking _add a possibly correct location_ and then select the new folder where the subdirectories all went. You can repeat this if your folders are missing in multiple locations. Check everything reports _ok!_'
text += os.linesep * 2
text += 'Although it is best if you can find everything, you only _have_ to fix the subdirectories starting with \'f\', which store your original files. Those starting \'t\' and \'r\' are for your thumbnails, which can be regenerated with a bit of work.'
text += os.linesep * 2
text += 'Then hit \'apply\', and the client will launch. You should double-check all your locations under database->migrate database immediately.'
text += os.linesep * 2
text += '2) If the locations are not available, or you do not know what they should be, or you wish to fix this outside of the program, hit \'cancel\' to gracefully cancel client boot. Feel free to contact hydrus dev for help.'
if self._only_thumbs:
text += os.linesep * 2
text += 'SPECIAL NOTE FOR YOUR SITUATION: The only paths missing are thumbnail paths. If you cannot recover these folders, you can hit apply to create empty paths at the original or corrected locations and then run a maintenance routine to regenerate the thumbnails from their originals.'
st = ClientGUICommon.BetterStaticText( self, text )
st.setWordWrap( True )
self._locations = ClientGUIListCtrl.BetterListCtrl( self, CGLC.COLUMN_LIST_REPAIR_LOCATIONS.ID, 12, self._ConvertPrefixToListCtrlTuples, activation_callback = self._SetLocations )
self._set_button = ClientGUICommon.BetterButton( self, 'set correct location', self._SetLocations )
self._add_button = ClientGUICommon.BetterButton( self, 'add a possibly correct location (let the client figure out what it contains)', self._AddLocation )
# add a button here for 'try to fill them in for me'. you give it a dir, and it tries to figure out and fill in the prefixes for you
#
self._locations.AddDatas( [ prefix for ( incorrect_location, prefix ) in missing_locations ] )
self._locations.Sort()
#
vbox = QP.VBoxLayout()
QP.AddToLayout( vbox, st, CC.FLAGS_EXPAND_PERPENDICULAR )
QP.AddToLayout( vbox, self._locations, CC.FLAGS_EXPAND_BOTH_WAYS )
QP.AddToLayout( vbox, self._set_button, CC.FLAGS_ON_RIGHT )
QP.AddToLayout( vbox, self._add_button, CC.FLAGS_ON_RIGHT )
self.widget().setLayout( vbox )
def _AddLocation( self ):
with QP.DirDialog( self, 'Select the potential correct location.' ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
path = dlg.GetPath()
for prefix in self._locations.GetData():
ok = os.path.exists( os.path.join( path, prefix ) )
if ok:
self._correct_locations[ prefix ] = ( path, ok )
self._locations.UpdateDatas()
def _ConvertPrefixToListCtrlTuples( self, prefix ):
incorrect_location = self._incorrect_locations[ prefix ]
if prefix in self._correct_locations:
( correct_location, ok ) = self._correct_locations[ prefix ]
if ok:
pretty_ok = 'ok!'
else:
pretty_ok = 'not found'
else:
correct_location = ''
ok = None
pretty_ok = ''
pretty_incorrect_location = incorrect_location
pretty_prefix = prefix
pretty_correct_location = correct_location
display_tuple = ( pretty_incorrect_location, pretty_prefix, pretty_correct_location, pretty_ok )
sort_tuple = ( incorrect_location, prefix, correct_location, ok )
return ( display_tuple, sort_tuple )
def _GetValue( self ):
correct_rows = []
thumb_problems = False
for prefix in self._locations.GetData():
incorrect_location = self._incorrect_locations[ prefix ]
if prefix not in self._correct_locations:
if prefix.startswith( 'f' ):
raise HydrusExceptions.VetoException( 'You did not correct all the file locations!' )
else:
thumb_problems = True
correct_location = incorrect_location
else:
( correct_location, ok ) = self._correct_locations[ prefix ]
if not ok:
if prefix.startswith( 'f' ):
raise HydrusExceptions.VetoException( 'You did not find all the correct file locations!' )
else:
thumb_problems = True
correct_rows.append( ( prefix, correct_location ) )
return ( correct_rows, thumb_problems )
def _SetLocations( self ):
prefixes = self._locations.GetData( only_selected = True )
if len( prefixes ) > 0:
with QP.DirDialog( self, 'Select correct location.' ) as dlg:
if dlg.exec() == QW.QDialog.Accepted:
path = dlg.GetPath()
for prefix in prefixes:
ok = os.path.exists( os.path.join( path, prefix ) )
self._correct_locations[ prefix ] = ( path, ok )
self._locations.UpdateDatas()
def CheckValid( self ):
# raises veto if invalid
self._GetValue()
def CommitChanges( self ):
( correct_rows, thumb_problems ) = self._GetValue()
HG.client_controller.WriteSynchronous( 'repair_client_files', correct_rows )
def UserIsOKToOK( self ):
( correct_rows, thumb_problems ) = self._GetValue()
if thumb_problems:
message = 'Some or all of your incorrect paths have not been corrected, but they are all thumbnail paths.'
message += os.linesep * 2
message += 'Would you like instead to create new empty subdirectories at the previous (or corrected, if you have entered them) locations?'
message += os.linesep * 2
message += 'You can run database->regenerate->thumbnails to fill them up again.'
result = ClientGUIDialogsQuick.GetYesNo( self, message )
if result != QW.QDialog.Accepted:
return False
return True
| true | true |
f7370f304d2f3d2c71675dfd32940ea74d4f55c8 | 1,146 | py | Python | parallelbar/tools.py | dubovikmaster/parallelbar | 9ae8aa51c8ea89e3842cc3428aa488cf37066338 | [
"MIT"
] | 4 | 2021-08-05T11:45:20.000Z | 2021-12-26T15:19:16.000Z | parallelbar/tools.py | dubovikmaster/parallelbar | 9ae8aa51c8ea89e3842cc3428aa488cf37066338 | [
"MIT"
] | null | null | null | parallelbar/tools.py | dubovikmaster/parallelbar | 9ae8aa51c8ea89e3842cc3428aa488cf37066338 | [
"MIT"
] | 1 | 2022-01-26T17:49:22.000Z | 2022-01-26T17:49:22.000Z | from math import sin, cos, radians
def func_args_unpack(func, args):
return func(*args)
def get_len(iterable, total):
try:
length = iterable.__len__()
except AttributeError:
length = total
return length
def cpu_bench(number):
product = 1.0
for elem in range(number):
angle = radians(elem)
product *= sin(angle)**2 + cos(angle)**2
return product
def fibonacci(number):
if number <= 1:
return number
else:
return fibonacci(number-2) + fibonacci(number-1)
def iterate_by_pack(iterable, pack_size: int = 1):
if pack_size < 1:
raise ValueError("pack_size must be greater than 0")
iterator = iter(iterable)
sentinel = object()
item = None
while item is not sentinel:
pack = []
for _ in range(pack_size):
item = next(iterator, sentinel)
if item is sentinel:
break
pack.append(item)
if pack:
yield pack
def get_packs_count(array, pack_size):
total, extra = divmod(len(array), pack_size)
if extra:
total += 1
return total
| 21.222222 | 60 | 0.596859 | from math import sin, cos, radians
def func_args_unpack(func, args):
return func(*args)
def get_len(iterable, total):
try:
length = iterable.__len__()
except AttributeError:
length = total
return length
def cpu_bench(number):
product = 1.0
for elem in range(number):
angle = radians(elem)
product *= sin(angle)**2 + cos(angle)**2
return product
def fibonacci(number):
if number <= 1:
return number
else:
return fibonacci(number-2) + fibonacci(number-1)
def iterate_by_pack(iterable, pack_size: int = 1):
if pack_size < 1:
raise ValueError("pack_size must be greater than 0")
iterator = iter(iterable)
sentinel = object()
item = None
while item is not sentinel:
pack = []
for _ in range(pack_size):
item = next(iterator, sentinel)
if item is sentinel:
break
pack.append(item)
if pack:
yield pack
def get_packs_count(array, pack_size):
total, extra = divmod(len(array), pack_size)
if extra:
total += 1
return total
| true | true |
f73711d86f4b2d494a534b3efcb1258c44779b25 | 1,366 | py | Python | Code/scripts/select_user.py | Alerovere/WALIS | ddd9ea65055f2849ee59e7be7d39629218ff14e4 | [
"MIT"
] | null | null | null | Code/scripts/select_user.py | Alerovere/WALIS | ddd9ea65055f2849ee59e7be7d39629218ff14e4 | [
"MIT"
] | null | null | null | Code/scripts/select_user.py | Alerovere/WALIS | ddd9ea65055f2849ee59e7be7d39629218ff14e4 | [
"MIT"
] | null | null | null | ## From the dictionary in connection.py, extract the dataframes
rsl=walis_dict[0].copy()
countries=walis_dict[1].copy()
regions=walis_dict[2].copy()
MIS_ages=walis_dict[3].copy()
references=walis_dict[4].copy()
hrzpos=walis_dict[5].copy()
rslind=walis_dict[6].copy()
sldatum=walis_dict[7].copy()
vrt_meas=walis_dict[8].copy()
useries=walis_dict[9].copy()
aar=walis_dict[10].copy()
luminescence=walis_dict[11].copy()
esr=walis_dict[12].copy()
strat=walis_dict[13].copy()
other=walis_dict[14].copy()
user=walis_dict[15].copy()
Summary=walis_dict[18].copy()
## Create a list of users who have compiled RSL datapoints and U-Series RSL data
rslCreators = walis_dict[0].copy()
rslCreators=pd.DataFrame(rslCreators.Createdby)
useriesCreators = walis_dict[9].copy()
useriesCreators.drop(useriesCreators[useriesCreators.RSL_Estimate_avaliable != 'Yes'].index, inplace=True)
useriesCreators=pd.DataFrame(useriesCreators.Createdby)
users = pd.concat([rslCreators, useriesCreators]).drop_duplicates('Createdby').reset_index()
users.loc[-1] = ['','WALIS Admin']
users.index = users.index + 1
users_dict = dict(zip(user.login, user.name))
users['Createdby']=users['Createdby'].map(users_dict)
users.sort_values(['Createdby'],inplace=True)
multiUsr = widgets.SelectMultiple(
options=users.Createdby,
rows=15,
columns=3,
disabled=False,
value=['WALIS Admin']) | 35.947368 | 106 | 0.762079 | ns=walis_dict[2].copy()
MIS_ages=walis_dict[3].copy()
references=walis_dict[4].copy()
hrzpos=walis_dict[5].copy()
rslind=walis_dict[6].copy()
sldatum=walis_dict[7].copy()
vrt_meas=walis_dict[8].copy()
useries=walis_dict[9].copy()
aar=walis_dict[10].copy()
luminescence=walis_dict[11].copy()
esr=walis_dict[12].copy()
strat=walis_dict[13].copy()
other=walis_dict[14].copy()
user=walis_dict[15].copy()
Summary=walis_dict[18].copy()
by)
useriesCreators = walis_dict[9].copy()
useriesCreators.drop(useriesCreators[useriesCreators.RSL_Estimate_avaliable != 'Yes'].index, inplace=True)
useriesCreators=pd.DataFrame(useriesCreators.Createdby)
users = pd.concat([rslCreators, useriesCreators]).drop_duplicates('Createdby').reset_index()
users.loc[-1] = ['','WALIS Admin']
users.index = users.index + 1
users_dict = dict(zip(user.login, user.name))
users['Createdby']=users['Createdby'].map(users_dict)
users.sort_values(['Createdby'],inplace=True)
multiUsr = widgets.SelectMultiple(
options=users.Createdby,
rows=15,
columns=3,
disabled=False,
value=['WALIS Admin']) | true | true |
f7371207afddb1e5f2666d504ad248b681b95ded | 513 | py | Python | Bio/PDB/PDBExceptions.py | sbassi/biopython | b41975bb8363171add80d19903861f3d8cffe405 | [
"PostgreSQL"
] | 1 | 2019-08-05T08:39:27.000Z | 2019-08-05T08:39:27.000Z | Bio/PDB/PDBExceptions.py | sbassi/biopython | b41975bb8363171add80d19903861f3d8cffe405 | [
"PostgreSQL"
] | null | null | null | Bio/PDB/PDBExceptions.py | sbassi/biopython | b41975bb8363171add80d19903861f3d8cffe405 | [
"PostgreSQL"
] | null | null | null | # Copyright (C) 2002, Thomas Hamelryck (thamelry@binf.ku.dk)
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Some Bio.PDB-specific exceptions."""
# General error
class PDBException(Exception):
pass
# The PDB file cannot be unambiguously represented in the SMCRA
# data structure
class PDBConstructionException(Exception):
pass
class PDBConstructionWarning(Warning):
pass
| 27 | 70 | 0.762183 |
class PDBException(Exception):
pass
class PDBConstructionException(Exception):
pass
class PDBConstructionWarning(Warning):
pass
| true | true |
f7371517b360fd9b2af5fd089b116986f003ead3 | 794 | py | Python | handlers/user/delivery_status.py | ndoubleu/telegram | 64bd05ac5b84e5d34b8583d2dbfe7a6fdf5c485d | [
"MIT"
] | null | null | null | handlers/user/delivery_status.py | ndoubleu/telegram | 64bd05ac5b84e5d34b8583d2dbfe7a6fdf5c485d | [
"MIT"
] | null | null | null | handlers/user/delivery_status.py | ndoubleu/telegram | 64bd05ac5b84e5d34b8583d2dbfe7a6fdf5c485d | [
"MIT"
] | null | null | null |
from aiogram.types import Message
from loader import dp, db
from .menu import delivery_status
from filters import IsUser
@dp.message_handler(IsUser(), text=delivery_status)
async def process_delivery_status(message: Message):
orders = db.fetchall('SELECT * FROM orders WHERE cid=?', (message.chat.id,))
if len(orders) == 0: await message.answer('У вас нет активных заказов.')
else: await delivery_status_answer(message, orders)
async def delivery_status_answer(message, orders):
res = ''
for order in orders:
res += f'Заказ <b>№{order[3]}</b>'
answer = [
' Ваш заказ готовится.',
' Уже в пути!',
' Прибыл и ждет вас!'
]
res += answer[0]
res += '\n\n'
await message.answer(res) | 25.612903 | 80 | 0.625945 |
from aiogram.types import Message
from loader import dp, db
from .menu import delivery_status
from filters import IsUser
@dp.message_handler(IsUser(), text=delivery_status)
async def process_delivery_status(message: Message):
orders = db.fetchall('SELECT * FROM orders WHERE cid=?', (message.chat.id,))
if len(orders) == 0: await message.answer('У вас нет активных заказов.')
else: await delivery_status_answer(message, orders)
async def delivery_status_answer(message, orders):
res = ''
for order in orders:
res += f'Заказ <b>№{order[3]}</b>'
answer = [
' Ваш заказ готовится.',
' Уже в пути!',
' Прибыл и ждет вас!'
]
res += answer[0]
res += '\n\n'
await message.answer(res) | true | true |
f73715de65315cedc09bbca8f2fcb037fe388093 | 5,046 | py | Python | dbutils/sqlite2del.py | faisalthaheem/deep-learning-scripts | 7b44e8520f59c63bca18f00e8916d76ee22bd193 | [
"Apache-2.0"
] | null | null | null | dbutils/sqlite2del.py | faisalthaheem/deep-learning-scripts | 7b44e8520f59c63bca18f00e8916d76ee22bd193 | [
"Apache-2.0"
] | 10 | 2019-10-30T15:33:44.000Z | 2022-03-11T23:50:31.000Z | dbutils/sqlite2del.py | faisalthaheem/deep-learning-scripts | 7b44e8520f59c63bca18f00e8916d76ee22bd193 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2019 Faisal Thaheem
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import argparse
import os
import sys
import sqlite3
import time
import pprint
import traceback
import logging
import csv
import cv2
from objclass import getObjectClass
from tqdm import tqdm
ap = argparse.ArgumentParser()
ap.add_argument("-ln", "--lite.names", required=True,
help="comma seperated list of sqlite db files to look up image info from")
ap.add_argument("-dp", "--data.path", required=True,
help="comma seperated list of folders containing images to process")
args = vars(ap.parse_args())
#create logger
logger = logging.getLogger('sqlite2del')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
fh = logging.FileHandler('sqlite2del.log')
fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
sqlitedbs = args["lite.names"].split(',')
datapaths = args["data.path"].split(',')
logger.info("Will be looking up following databases for info...")
logger.info(str(sqlitedbs))
logger.info("And, will be deleting files from..")
logger.info(str(datapaths))
dbconns = {}
dbcursors = {}
def loadsqlitedbs():
logger.info("Instantiating sqlite dbs")
for db in sqlitedbs:
logger.info("Opening database " + db)
if os.path.exists(db):
conn = sqlite3.connect(db)
dbconns[db] = conn
cursor = conn.cursor()
dbcursors[db] = cursor
logger.info("database [{}] opened".format(db))
else:
logger.warn("database [{}] does not exist, skipping".format(db))
dbcursors[db] = None
dbconns[db] = None
logger.info("dbs opened")
def closedbs():
logger.info("Closing dbs")
for db in sqlitedbs:
if dbcursors[db] is not None:
dbcursors[db].close()
if dbconns[db] is not None:
dbconns[db].close()
logger.info("DBs closed")
def lookupFile(nameWithoutExtension, filePath):
y1,x1,y2,x2,width,height,classId,className = None,None,None,None,None,None,None,None
global targetWidth, targetHeight, crop_height, crop_width, crop_enabled
for db in sqlitedbs:
try:
cursor = dbcursors[db]
if cursor is not None:
#look up plate information for the requested name
query = "SELECT y1,x1,y2,x2,width,height,imheight,imwidth FROM plates WHERE filename = '{}' and isdeleted = 0".format(nameWithoutExtension)
cursor.execute(query)
row = cursor.fetchone()
if row is not None:
y1,x1,y2,x2,width,height,imheight,imwidth = int(row[0]),int(row[1]),int(row[2]),int(row[3]),int(row[4]),int(row[5]),int(row[6]),int(row[7])
break
except:
logger.error(traceback.format_exc())
return y1,x1,y2,x2,width,height,classId,className
def processDataDir(datapath):
logger.error("Processing data path [{}]".format(datapath))
for root, dirs, files in os.walk(datapath):
totalFiles = len(files)
logger.error("Processing [{}] file(s) in root [{}]".format(totalFiles, root))
for i in tqdm(range(0,totalFiles)):
fileName = files[i]
filePath = os.path.join(root,fileName)
#logger.info("Processing.. {} [{} of {}]".format(filePath, i, totalFiles))
#y1,x1,y2,x2,width,height = lookupFile(os.path.splitext(fileName)[0])
y1,x1,y2,x2,width,height,classId,className = lookupFile(fileName, filePath)
if y1 is None:
logger.warn("Could not get information for [{}] @ [{}]".format(fileName,filePath))
continue
try:
pass
except:
logger.error(traceback.format_exc())
time_start = time.time()
loadsqlitedbs()
for path in datapaths:
processDataDir(path)
#clean up part
closedbs()
time_end = time.time()
logger.info("Took [{}] s to process request".format(time_end-time_start)) | 31.148148 | 160 | 0.632382 |
import argparse
import os
import sys
import sqlite3
import time
import pprint
import traceback
import logging
import csv
import cv2
from objclass import getObjectClass
from tqdm import tqdm
ap = argparse.ArgumentParser()
ap.add_argument("-ln", "--lite.names", required=True,
help="comma seperated list of sqlite db files to look up image info from")
ap.add_argument("-dp", "--data.path", required=True,
help="comma seperated list of folders containing images to process")
args = vars(ap.parse_args())
logger = logging.getLogger('sqlite2del')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler('sqlite2del.log')
fh.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
logger.addHandler(fh)
logger.addHandler(ch)
sqlitedbs = args["lite.names"].split(',')
datapaths = args["data.path"].split(',')
logger.info("Will be looking up following databases for info...")
logger.info(str(sqlitedbs))
logger.info("And, will be deleting files from..")
logger.info(str(datapaths))
dbconns = {}
dbcursors = {}
def loadsqlitedbs():
logger.info("Instantiating sqlite dbs")
for db in sqlitedbs:
logger.info("Opening database " + db)
if os.path.exists(db):
conn = sqlite3.connect(db)
dbconns[db] = conn
cursor = conn.cursor()
dbcursors[db] = cursor
logger.info("database [{}] opened".format(db))
else:
logger.warn("database [{}] does not exist, skipping".format(db))
dbcursors[db] = None
dbconns[db] = None
logger.info("dbs opened")
def closedbs():
logger.info("Closing dbs")
for db in sqlitedbs:
if dbcursors[db] is not None:
dbcursors[db].close()
if dbconns[db] is not None:
dbconns[db].close()
logger.info("DBs closed")
def lookupFile(nameWithoutExtension, filePath):
y1,x1,y2,x2,width,height,classId,className = None,None,None,None,None,None,None,None
global targetWidth, targetHeight, crop_height, crop_width, crop_enabled
for db in sqlitedbs:
try:
cursor = dbcursors[db]
if cursor is not None:
query = "SELECT y1,x1,y2,x2,width,height,imheight,imwidth FROM plates WHERE filename = '{}' and isdeleted = 0".format(nameWithoutExtension)
cursor.execute(query)
row = cursor.fetchone()
if row is not None:
y1,x1,y2,x2,width,height,imheight,imwidth = int(row[0]),int(row[1]),int(row[2]),int(row[3]),int(row[4]),int(row[5]),int(row[6]),int(row[7])
break
except:
logger.error(traceback.format_exc())
return y1,x1,y2,x2,width,height,classId,className
def processDataDir(datapath):
logger.error("Processing data path [{}]".format(datapath))
for root, dirs, files in os.walk(datapath):
totalFiles = len(files)
logger.error("Processing [{}] file(s) in root [{}]".format(totalFiles, root))
for i in tqdm(range(0,totalFiles)):
fileName = files[i]
filePath = os.path.join(root,fileName)
y1,x1,y2,x2,width,height,classId,className = lookupFile(fileName, filePath)
if y1 is None:
logger.warn("Could not get information for [{}] @ [{}]".format(fileName,filePath))
continue
try:
pass
except:
logger.error(traceback.format_exc())
time_start = time.time()
loadsqlitedbs()
for path in datapaths:
processDataDir(path)
closedbs()
time_end = time.time()
logger.info("Took [{}] s to process request".format(time_end-time_start)) | true | true |
f7371720556a549f75162cd9c52d4b42d0aff5f5 | 2,660 | py | Python | openmdao/surrogate_models/tests/test_map.py | gjkennedy/OpenMDAO | 06897b584403cce34bc106dd2840aa07eea69e96 | [
"Apache-2.0"
] | null | null | null | openmdao/surrogate_models/tests/test_map.py | gjkennedy/OpenMDAO | 06897b584403cce34bc106dd2840aa07eea69e96 | [
"Apache-2.0"
] | 1 | 2015-08-12T17:58:18.000Z | 2015-08-12T17:58:18.000Z | openmdao/surrogate_models/tests/test_map.py | gjkennedy/OpenMDAO | 06897b584403cce34bc106dd2840aa07eea69e96 | [
"Apache-2.0"
] | 1 | 2021-01-17T14:03:48.000Z | 2021-01-17T14:03:48.000Z | from openmdao.api import Group, Problem, MetaModelUnStructuredComp, NearestNeighbor
from openmdao.utils.assert_utils import assert_near_equal
import numpy as np
import unittest
class CompressorMap(MetaModelUnStructuredComp):
def __init__(self):
super(CompressorMap, self).__init__()
self.add_input('Nc', val=1.0)
self.add_input('Rline', val=2.0)
self.add_input('alpha', val=0.0)
self.add_output('PR', val=1.0, surrogate=NearestNeighbor(interpolant_type='linear'))
self.add_output('eff', val=1.0, surrogate=NearestNeighbor(interpolant_type='linear'))
self.add_output('Wc', val=1.0, surrogate=NearestNeighbor(interpolant_type='linear'))
class TestMap(unittest.TestCase):
def test_comp_map(self):
# create compressor map and save reference to options (for training data)
c = CompressorMap()
m = c.options
# add compressor map to problem
p = Problem()
p.model.add_subsystem('compmap', c)
p.setup()
# train metamodel
Nc = np.array([0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1])
Rline = np.array([1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.2, 2.4, 2.6, 2.8, 3.0])
alpha = np.array([0.0, 1.0])
Nc_mat, Rline_mat, alpha_mat = np.meshgrid(Nc, Rline, alpha, sparse=False)
m['train:Nc'] = Nc_mat.flatten()
m['train:Rline'] = Rline_mat.flatten()
m['train:alpha'] = alpha_mat.flatten()
m['train:PR'] = m['train:Nc']*m['train:Rline']+m['train:alpha']
m['train:eff'] = m['train:Nc']*m['train:Rline']**2+m['train:alpha']
m['train:Wc'] = m['train:Nc']**2*m['train:Rline']**2+m['train:alpha']
# check predicted values
p['compmap.Nc'] = 0.9
p['compmap.Rline'] = 2.0
p['compmap.alpha'] = 0.0
p.run_model()
tol = 1e-1
assert_near_equal(p['compmap.PR'], p['compmap.Nc']*p['compmap.Rline']+p['compmap.alpha'], tol)
assert_near_equal(p['compmap.eff'], p['compmap.Nc']*p['compmap.Rline']**2+p['compmap.alpha'], tol)
assert_near_equal(p['compmap.Wc'], p['compmap.Nc']**2*p['compmap.Rline']**2+p['compmap.alpha'], tol)
p['compmap.Nc'] = 0.95
p['compmap.Rline'] = 2.1
p['compmap.alpha'] = 0.0
p.run_model()
assert_near_equal(p['compmap.PR'], p['compmap.Nc']*p['compmap.Rline']+p['compmap.alpha'], tol)
assert_near_equal(p['compmap.eff'], p['compmap.Nc']*p['compmap.Rline']**2+p['compmap.alpha'], tol)
assert_near_equal(p['compmap.Wc'], p['compmap.Nc']**2*p['compmap.Rline']**2+p['compmap.alpha'], tol)
if __name__ == "__main__":
unittest.main()
| 37.464789 | 108 | 0.605263 | from openmdao.api import Group, Problem, MetaModelUnStructuredComp, NearestNeighbor
from openmdao.utils.assert_utils import assert_near_equal
import numpy as np
import unittest
class CompressorMap(MetaModelUnStructuredComp):
def __init__(self):
super(CompressorMap, self).__init__()
self.add_input('Nc', val=1.0)
self.add_input('Rline', val=2.0)
self.add_input('alpha', val=0.0)
self.add_output('PR', val=1.0, surrogate=NearestNeighbor(interpolant_type='linear'))
self.add_output('eff', val=1.0, surrogate=NearestNeighbor(interpolant_type='linear'))
self.add_output('Wc', val=1.0, surrogate=NearestNeighbor(interpolant_type='linear'))
class TestMap(unittest.TestCase):
def test_comp_map(self):
c = CompressorMap()
m = c.options
p = Problem()
p.model.add_subsystem('compmap', c)
p.setup()
Nc = np.array([0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1])
Rline = np.array([1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.2, 2.4, 2.6, 2.8, 3.0])
alpha = np.array([0.0, 1.0])
Nc_mat, Rline_mat, alpha_mat = np.meshgrid(Nc, Rline, alpha, sparse=False)
m['train:Nc'] = Nc_mat.flatten()
m['train:Rline'] = Rline_mat.flatten()
m['train:alpha'] = alpha_mat.flatten()
m['train:PR'] = m['train:Nc']*m['train:Rline']+m['train:alpha']
m['train:eff'] = m['train:Nc']*m['train:Rline']**2+m['train:alpha']
m['train:Wc'] = m['train:Nc']**2*m['train:Rline']**2+m['train:alpha']
p['compmap.Nc'] = 0.9
p['compmap.Rline'] = 2.0
p['compmap.alpha'] = 0.0
p.run_model()
tol = 1e-1
assert_near_equal(p['compmap.PR'], p['compmap.Nc']*p['compmap.Rline']+p['compmap.alpha'], tol)
assert_near_equal(p['compmap.eff'], p['compmap.Nc']*p['compmap.Rline']**2+p['compmap.alpha'], tol)
assert_near_equal(p['compmap.Wc'], p['compmap.Nc']**2*p['compmap.Rline']**2+p['compmap.alpha'], tol)
p['compmap.Nc'] = 0.95
p['compmap.Rline'] = 2.1
p['compmap.alpha'] = 0.0
p.run_model()
assert_near_equal(p['compmap.PR'], p['compmap.Nc']*p['compmap.Rline']+p['compmap.alpha'], tol)
assert_near_equal(p['compmap.eff'], p['compmap.Nc']*p['compmap.Rline']**2+p['compmap.alpha'], tol)
assert_near_equal(p['compmap.Wc'], p['compmap.Nc']**2*p['compmap.Rline']**2+p['compmap.alpha'], tol)
if __name__ == "__main__":
unittest.main()
| true | true |
f737179ac7bc892cee2bb77d3de95f7641166dcf | 15,543 | py | Python | BA_MDI1.py | acadianshadow237/BA_MDI1 | 73e0e87c15ff083ce860f7a09fa2de3a3c71c215 | [
"MIT"
] | null | null | null | BA_MDI1.py | acadianshadow237/BA_MDI1 | 73e0e87c15ff083ce860f7a09fa2de3a3c71c215 | [
"MIT"
] | null | null | null | BA_MDI1.py | acadianshadow237/BA_MDI1 | 73e0e87c15ff083ce860f7a09fa2de3a3c71c215 | [
"MIT"
] | null | null | null |
from PySide6 import QtCore
from PySide6 import QtGui
from PySide6 import QtWidgets
import argparse
import sys, os
from Models.login_model import login_stuff, url_builder
from helpers.helpers1 import db_tables
from Views.sample_login import LoginForm
from Views.all_view import VAS_view,VCRH_view ,VCCP_view,VCRH_Edit, VProj_Edit,VCRLD_Edit, RPE_Edit
from Views.editmenu import splitSections, tweakSections, moveSections, copySections
from helpers.mmenus import menu_cascade
from Controllers.orm_select import county_select
from Controllers.controller import connectToDatabase
from Models.my_tables_model import gather_tables
from Controllers.my_MDIArea import MdiArea
__version__ = "1.0.0"
__my_debug__ = True
# -----------------------------------------------------------------------------
# Main window class.
# -----------------------------------------------------------------------------
class MainWindow(QtWidgets.QMainWindow):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.mdiArea = QtWidgets.QMdiArea()
self.setCentralWidget(self.mdiArea)
self.AppTitle = self.tr("MDI Pavement Management")
self.AppVersion = __version__
self.my_login = login_stuff()
self.my_url = None
self.my_session = None
self.login_flag = False
self.my_db_tables = db_tables
self.index = 0
# Setup main window.
self.setWindowTitle(self.AppTitle)
self.setWindowIcon(QtGui.QIcon.fromTheme('utilities-system-monitor'))
self.resize(1200, 600)
self.document_type=0
self.count = 0
menu_cascade(self)
self.statusbarAct = QtGui.QAction(self.tr("&Statusbar"), self)
self.statusbarAct.setCheckable(True)
self.statusbarAct.setChecked(True)
self.statusbarAct.setStatusTip(self.tr("Show or hide the statusbar in the current window"))
self.statusbarAct.toggled.connect(self.onToggleStatusBar)
def onOpen(self):
"""Select a file using a file open dialog."""
if self.document_type == 1:
my_list=[]
#self.actionUpdate.triggered.connect(self.onEdit)
fileName = "VAS"
the_doc = VAS_view(self)
self.count = self.count +1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
#self.loadDocument(fileName)
if self.document_type == 2:
fileName = "VCCP"
the_doc = VCCP_view(self)
self.count = self.count + 1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
#self.loadDocument(fileName)
if self.document_type == 3:
fileName = 'VCRH'
the_doc = VCRH_Edit(self)
self.count = self.count + 1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 4:
fileName = 'VCRLD'
the_doc = VCRLD_Edit(self)
self.count = self.count + 1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 5:
fileName = 'VProj'
the_doc = VProj_Edit(self)
self.count = self.count + 1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 6:
fileName = 'EditLayers'
the_doc = RPE_Edit(self)
self.count = self.count + 1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 7:
fileName = 'Split_Section'
the_doc = splitSections(self)
self.count = self.count+1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 8:
fileName = 'Tweak_Section'
the_doc = tweakSections(self)
self.count = self.count+1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 9:
fileName = 'Move_Section'
the_doc = moveSections(self)
self.count = self.count+1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 10:
fileName = 'Copy_Section'
the_doc = copySections(self)
self.count = self.count+1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
def onToggleStatusBar(self):
"""Toggles the visibility of the status bar."""
self.statusBar().setVisible(self.statusbarAct.isChecked())
def onContents(self):
QtWidgets.QMessageBox.information(self, self.tr("Contents"), self.tr("<p>Please refer to...</p>"))
def onAbout(self):
QtWidgets.QMessageBox.information(self, self.tr("About"),
self.tr("<p><strong>{}</strong></p>"
"<p>Version {}</p>"
"<p>Authors: ...</p>").format(self.AppTitle, self.AppVersion)
)
def onLogin(self):
if __my_debug__ == True:
self.login_flag = True
self.my_db_tables = db_tables
#self.my_url = f'oracle+cx_oracle://USER_MISS:password@ORACLEDEV01:1521/GISDEV'
self.my_login = login_stuff()
self.my_login.user_name = 'USER_MISS'
self.my_login.user_password = 'password'
self.my_login.database_name = 'GISDEV'
self.my_login.database_type = 0
self.my_login.schema ='USER_MISS'
self.my_login.server_name = 'ORACLEDEV01'
self.my_url = url_builder(self.my_login)
self.my_session = connectToDatabase(self.my_url)
self.document_type = 1
gather_tables(self)
menu_cascade(self)
else:
self.my_login = login_stuff()
self.my_url = None
self.login_flag = False
self.document_type = 0
myresults = LoginForm(self)
myresults.exec()
if myresults.login_flag == True:
self.my_db_table = myresults.db
self.my_url = myresults.my_url
self.my_login = myresults.my_login
self.login_flag = True
self.my_session = connectToDatabase(self.my_url)
menu_cascade(self)
else:
print('Failed')
pass
def onLogout(self):
self.my_url = None
self.db = None
self.my_login = None
self.login_flag = False
self.document_type = 0
self.my_session.close()
for item in self.mdiArea.subWindowList():
item.close()
menu_cascade(self)
def onExit(self):
for item in self.mdiArea.subWindowList():
item.my_session.close()
item.close()
self.close()
pass
# View Menu Area
def onVAS(self):
self.document_type = 1
#menu_cascade(self)
##my_doc = Ui_VAS_Form()
self.onOpen()
def onVCRHVCRLDVPROJ(self):
self.document_type = 2
#menu_cascade(self)
self.onOpen()
def onVCRH(self):
self.document_type = 3
#menu_cascade(self)
self.onOpen()
def onVCRLD(self):
self.document_type = 4
#menu_cascade(self)
self.onOpen()
def onVPRJ(self):
self.document_type = 5
#menu_cascade(self)
self.onOpen()
def onLog(self):
print('Made it to Log')
## project menu area
def onNewProject(self):
print('Made it to New Project')
def onRenameProject(self):
print('made it to rename project')
def onCopyProject(self):
print('made it to Copy Project')
def onEditLayers(self):
self.document_type = 6
#menu_cascade(self)
self.onOpen()
# edit Menu Layers
def onSplitSection(self):
self.document_type = 7
self.onOpen()
def onTweakSection(self):
self.document_type = 8
self.onOpen()
def onMoveSection(self):
self.document_type = 9
self.onOpen()
def onCopySection(self):
self.document_type = 10
self.onOpen()
def loadDocument(self, filename):
"""Load document from filename."""
#for index in range(self.mdiArea.count()):
# document = self.mdiArea.widget(index)
# if document:
# if filename == document.filename:
# self.mdiArea.setCurrentIndex(index)
# document.reload()
# return
# Else load from file and create new document tab.
self.statusBar.showMessage(self.tr("Loading..."), 2500)
document = Document(filename, self)
index = self.mdiArea.addDocument(document)
#self.mdiArea.setCurrentIndex(index)
index.show()
# After loading a conenction file, it is possible to refresh the current module.
self.refreshAct.setEnabled(True)
self.statusBar.showMessage(self.tr("Successfully loaded file"), 2500)
# Enable close action
#self.closeAct.setEnabled(self.mdiArea.count())
class MdiArea(QtWidgets.QWidget):
def __init__(self, parent=None):
super(MdiArea, self).__init__(parent)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self._is_untitled = True
self.index = 0
def addDocument(self, document):
document.fileChanged.connect(self.onFileChanged)
document.fileLoaded.connect(self.onFileLoaded)
sub = QtWidgets.QMdiSubWindow()
sub.setWidget(document)
return sub
def currentDocument(self):
"""Return current active document."""
return self.widget(self.currentIndex())
def documents(self):
"""Returns iterator of all documents."""
for index in range(self.count()):
yield self.widget(index)
def closeDocument(self):
"""Close current active document. Provided for convenience.
"""
index = self.currentIndex()
# Finally remove tab by index.
self.removeTab(index)
def setDocumentChanged(self, document, changed):
index = self.indexOf(document)
label = document.filename
self.setTabText(index, "{}{}".format('*' if changed else '', label))
def checkTimestamps(self):
for document in self.documents():
document.checkTimestamp()
def onFileLoaded(self, document):
self.setDocumentChanged(document, False)
def onFileChanged(self, document):
self.setDocumentChanged(document, True)
class Document(QtWidgets.QWidget):
"""Generic document widget."""
fileLoaded = QtCore.Signal(QtWidgets.QWidget)
fileChanged = QtCore.Signal(QtWidgets.QWidget)
def __init__(self, filename, my_parent, parent=None):
super(Document, self).__init__(parent)
self.document_type = my_parent.document_type
self.my_db_tables = my_parent.my_db_tables
self.my_url = my_parent.my_url
self.my_login = my_parent.my_login
self.my_session = my_parent.my_session
self.filename =filename
self.documentEdit = self.createDocumentEdit()
self.my_parent = my_parent
#self.warningLabel = self.createWarningLabel()
#layout = QtWidgets.QVBoxLayout()
#layout.addWidget(self.warningLabel)
#layout.addWidget(self.textEdit)
#layout.setSpacing(0)
#layout.setContentsMargins(0, 0, 0, 0)
#self.setLayout(layout)
## Load the file.
QtCore.QCoreApplication.instance().processEvents()
self.reload()
def createDocumentEdit(self):
if self.document_type == 1:
my_document = VAS_view(self)
if self.document_type == 2:
my_document = VCCP_view(self)
# Disable editing.
#textEdit.setReadOnly(True)
## Set a monospace font for content.
#textEdit.setFont(QtGui.QFont("Monospace", 10))
return my_document
#def createWarningLabel(self):
# label = QtWidgets.QLabel(self)
# label.setObjectName("warningLabel")
# label.setStyleSheet(
# "padding: 16px;"
# "background-color: #f9ac3a;"
# "border: none;"
# )
# label.setWordWrap(True)
# label.hide()
# return label
def reload(self):
#"""Reload from file."""
#with open(self.filename) as f:
# self.timestamp = os.path.getmtime(self.filename)
# self.textEdit.setText(f.read())
# self.fileLoaded.emit(self)
#self.clearWarning()
pass
def clearWarning(self):
"""Clear the warning badge located at the top of the document."""
self.warningLabel.clear()
self.warningLabel.hide()
def showWarning(self, message):
"""Show a warning badge displaying a message located at the top of the document."""
self.warningLabel.setText(message)
self.warningLabel.show()
def checkTimestamp(self):
timestamp = os.path.getmtime(self.filename)
if timestamp > self.timestamp:
self.showWarning(self.tr("<strong>The file {} changed on disk.</strong> Reload (hit Ctrl+R) to see the changes.").format(self.filename))
self.fileChanged.emit(self)
else:
self.clearWarning()
# -----------------------------------------------------------------------------
# Parsing command line arguments
# -----------------------------------------------------------------------------
def parse_args():
"""Parse command line arguments."""
parser = argparse.ArgumentParser(prog=os.path.basename(__file__), description="")
parser.add_argument('filename', nargs="*", metavar='<file>', help="file")
parser.add_argument('-V, --version', action='version', version='%(prog)s {}'.format(__version__))
return parser.parse_args()
# -----------------------------------------------------------------------------
# Main routine
# -----------------------------------------------------------------------------
def main():
"""Main routine."""
args = parse_args()
# Create application and main window.
app = QtWidgets.QApplication(sys.argv)
window = MainWindow()
window.show()
# Open connections file using command line argument.
for filename in args.filename:
window.loadDocument(filename)
# Run execution loop.
return app.exec()
if __name__ == '__main__':
exit(main())
| 31.785276 | 148 | 0.57505 |
from PySide6 import QtCore
from PySide6 import QtGui
from PySide6 import QtWidgets
import argparse
import sys, os
from Models.login_model import login_stuff, url_builder
from helpers.helpers1 import db_tables
from Views.sample_login import LoginForm
from Views.all_view import VAS_view,VCRH_view ,VCCP_view,VCRH_Edit, VProj_Edit,VCRLD_Edit, RPE_Edit
from Views.editmenu import splitSections, tweakSections, moveSections, copySections
from helpers.mmenus import menu_cascade
from Controllers.orm_select import county_select
from Controllers.controller import connectToDatabase
from Models.my_tables_model import gather_tables
from Controllers.my_MDIArea import MdiArea
__version__ = "1.0.0"
__my_debug__ = True
class MainWindow(QtWidgets.QMainWindow):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.mdiArea = QtWidgets.QMdiArea()
self.setCentralWidget(self.mdiArea)
self.AppTitle = self.tr("MDI Pavement Management")
self.AppVersion = __version__
self.my_login = login_stuff()
self.my_url = None
self.my_session = None
self.login_flag = False
self.my_db_tables = db_tables
self.index = 0
self.setWindowTitle(self.AppTitle)
self.setWindowIcon(QtGui.QIcon.fromTheme('utilities-system-monitor'))
self.resize(1200, 600)
self.document_type=0
self.count = 0
menu_cascade(self)
self.statusbarAct = QtGui.QAction(self.tr("&Statusbar"), self)
self.statusbarAct.setCheckable(True)
self.statusbarAct.setChecked(True)
self.statusbarAct.setStatusTip(self.tr("Show or hide the statusbar in the current window"))
self.statusbarAct.toggled.connect(self.onToggleStatusBar)
def onOpen(self):
if self.document_type == 1:
my_list=[]
fileName = "VAS"
the_doc = VAS_view(self)
self.count = self.count +1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 2:
fileName = "VCCP"
the_doc = VCCP_view(self)
self.count = self.count + 1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 3:
fileName = 'VCRH'
the_doc = VCRH_Edit(self)
self.count = self.count + 1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 4:
fileName = 'VCRLD'
the_doc = VCRLD_Edit(self)
self.count = self.count + 1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 5:
fileName = 'VProj'
the_doc = VProj_Edit(self)
self.count = self.count + 1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 6:
fileName = 'EditLayers'
the_doc = RPE_Edit(self)
self.count = self.count + 1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 7:
fileName = 'Split_Section'
the_doc = splitSections(self)
self.count = self.count+1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 8:
fileName = 'Tweak_Section'
the_doc = tweakSections(self)
self.count = self.count+1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 9:
fileName = 'Move_Section'
the_doc = moveSections(self)
self.count = self.count+1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
if self.document_type == 10:
fileName = 'Copy_Section'
the_doc = copySections(self)
self.count = self.count+1
the_doc.fileName = fileName + str(self.count)
sub = self.mdiArea.addSubWindow(the_doc)
sub.show()
def onToggleStatusBar(self):
self.statusBar().setVisible(self.statusbarAct.isChecked())
def onContents(self):
QtWidgets.QMessageBox.information(self, self.tr("Contents"), self.tr("<p>Please refer to...</p>"))
def onAbout(self):
QtWidgets.QMessageBox.information(self, self.tr("About"),
self.tr("<p><strong>{}</strong></p>"
"<p>Version {}</p>"
"<p>Authors: ...</p>").format(self.AppTitle, self.AppVersion)
)
def onLogin(self):
if __my_debug__ == True:
self.login_flag = True
self.my_db_tables = db_tables
self.my_login = login_stuff()
self.my_login.user_name = 'USER_MISS'
self.my_login.user_password = 'password'
self.my_login.database_name = 'GISDEV'
self.my_login.database_type = 0
self.my_login.schema ='USER_MISS'
self.my_login.server_name = 'ORACLEDEV01'
self.my_url = url_builder(self.my_login)
self.my_session = connectToDatabase(self.my_url)
self.document_type = 1
gather_tables(self)
menu_cascade(self)
else:
self.my_login = login_stuff()
self.my_url = None
self.login_flag = False
self.document_type = 0
myresults = LoginForm(self)
myresults.exec()
if myresults.login_flag == True:
self.my_db_table = myresults.db
self.my_url = myresults.my_url
self.my_login = myresults.my_login
self.login_flag = True
self.my_session = connectToDatabase(self.my_url)
menu_cascade(self)
else:
print('Failed')
pass
def onLogout(self):
self.my_url = None
self.db = None
self.my_login = None
self.login_flag = False
self.document_type = 0
self.my_session.close()
for item in self.mdiArea.subWindowList():
item.close()
menu_cascade(self)
def onExit(self):
for item in self.mdiArea.subWindowList():
item.my_session.close()
item.close()
self.close()
pass
def onVAS(self):
self.document_type = 1
def onVCRHVCRLDVPROJ(self):
self.document_type = 2
self.onOpen()
def onVCRH(self):
self.document_type = 3
self.onOpen()
def onVCRLD(self):
self.document_type = 4
self.onOpen()
def onVPRJ(self):
self.document_type = 5
self.onOpen()
def onLog(self):
print('Made it to Log')
ct(self):
print('Made it to New Project')
def onRenameProject(self):
print('made it to rename project')
def onCopyProject(self):
print('made it to Copy Project')
def onEditLayers(self):
self.document_type = 6
self.onOpen()
def onSplitSection(self):
self.document_type = 7
self.onOpen()
def onTweakSection(self):
self.document_type = 8
self.onOpen()
def onMoveSection(self):
self.document_type = 9
self.onOpen()
def onCopySection(self):
self.document_type = 10
self.onOpen()
def loadDocument(self, filename):
self.statusBar.showMessage(self.tr("Loading..."), 2500)
document = Document(filename, self)
index = self.mdiArea.addDocument(document)
index.show()
self.refreshAct.setEnabled(True)
self.statusBar.showMessage(self.tr("Successfully loaded file"), 2500)
class MdiArea(QtWidgets.QWidget):
def __init__(self, parent=None):
super(MdiArea, self).__init__(parent)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self._is_untitled = True
self.index = 0
def addDocument(self, document):
document.fileChanged.connect(self.onFileChanged)
document.fileLoaded.connect(self.onFileLoaded)
sub = QtWidgets.QMdiSubWindow()
sub.setWidget(document)
return sub
def currentDocument(self):
return self.widget(self.currentIndex())
def documents(self):
for index in range(self.count()):
yield self.widget(index)
def closeDocument(self):
index = self.currentIndex()
self.removeTab(index)
def setDocumentChanged(self, document, changed):
index = self.indexOf(document)
label = document.filename
self.setTabText(index, "{}{}".format('*' if changed else '', label))
def checkTimestamps(self):
for document in self.documents():
document.checkTimestamp()
def onFileLoaded(self, document):
self.setDocumentChanged(document, False)
def onFileChanged(self, document):
self.setDocumentChanged(document, True)
class Document(QtWidgets.QWidget):
fileLoaded = QtCore.Signal(QtWidgets.QWidget)
fileChanged = QtCore.Signal(QtWidgets.QWidget)
def __init__(self, filename, my_parent, parent=None):
super(Document, self).__init__(parent)
self.document_type = my_parent.document_type
self.my_db_tables = my_parent.my_db_tables
self.my_url = my_parent.my_url
self.my_login = my_parent.my_login
self.my_session = my_parent.my_session
self.filename =filename
self.documentEdit = self.createDocumentEdit()
self.my_parent = my_parent
QCoreApplication.instance().processEvents()
self.reload()
def createDocumentEdit(self):
if self.document_type == 1:
my_document = VAS_view(self)
if self.document_type == 2:
my_document = VCCP_view(self)
t
def reload(self):
pass
def clearWarning(self):
self.warningLabel.clear()
self.warningLabel.hide()
def showWarning(self, message):
self.warningLabel.setText(message)
self.warningLabel.show()
def checkTimestamp(self):
timestamp = os.path.getmtime(self.filename)
if timestamp > self.timestamp:
self.showWarning(self.tr("<strong>The file {} changed on disk.</strong> Reload (hit Ctrl+R) to see the changes.").format(self.filename))
self.fileChanged.emit(self)
else:
self.clearWarning()
def parse_args():
parser = argparse.ArgumentParser(prog=os.path.basename(__file__), description="")
parser.add_argument('filename', nargs="*", metavar='<file>', help="file")
parser.add_argument('-V, --version', action='version', version='%(prog)s {}'.format(__version__))
return parser.parse_args()
def main():
args = parse_args()
app = QtWidgets.QApplication(sys.argv)
window = MainWindow()
window.show()
for filename in args.filename:
window.loadDocument(filename)
return app.exec()
if __name__ == '__main__':
exit(main())
| true | true |
f73717b813f198b4b9298a72667370e86930923f | 3,695 | py | Python | plot_signals_weighted_depth.py | pyensemble/wildwood | b261cbd7d0b425b50647f719ab99c1d89f477d5c | [
"BSD-3-Clause"
] | 22 | 2021-06-24T11:30:03.000Z | 2022-03-09T00:59:30.000Z | plot_signals_weighted_depth.py | pyensemble/wildwood | b261cbd7d0b425b50647f719ab99c1d89f477d5c | [
"BSD-3-Clause"
] | 65 | 2021-03-13T17:50:03.000Z | 2022-02-22T16:50:02.000Z | plot_signals_weighted_depth.py | pyensemble/wildwood | b261cbd7d0b425b50647f719ab99c1d89f477d5c | [
"BSD-3-Clause"
] | 3 | 2021-03-04T18:44:10.000Z | 2022-01-26T17:28:35.000Z |
import logging
from matplotlib.cm import get_cmap
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from wildwood.datasets import get_signal, make_regression
from wildwood.forest import ForestRegressor
from wildwood._binning import Binner
pd.set_option("display.max_columns", 20)
pd.set_option("display.precision", 2)
logging.basicConfig(
level=logging.INFO, format="%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S"
)
colormap = get_cmap("tab20")
n_samples_train = 5000
n_samples_test = 1000
random_state = 42
noise = 0.03
aggregation = True
n_estimators = 100
step = 1 / noise ** 2
signal = "heavisine"
X_train, y_train = make_regression(
n_samples=n_samples_train, signal=signal, noise=noise, random_state=random_state
)
X_test = np.linspace(0, 1, num=n_samples_test)
#
# reg = ForestRegressor(
# random_state=random_state,
# aggregation=aggregation,
# max_features=1,
# n_estimators=n_estimators,
# step=step,
# )
#
# reg.fit(X_train.reshape(n_samples_train, 1), y_train)
# y_pred = reg.predict(X_test.reshape(n_samples_test, 1))
#
# df = reg.get_nodes(0)
# print(df)
# exit(0)
signals = ["heavisine", "bumps", "blocks", "doppler"]
def plot_weighted_depth(signal):
X_train, y_train = make_regression(
n_samples=n_samples_train, signal=signal, noise=noise, random_state=random_state
)
X_train = X_train.reshape(-1, 1)
X_test = np.linspace(0, 1, num=n_samples_test).reshape(-1, 1)
binner = Binner().fit(X_train)
X_test_binned = binner.transform(X_test)
reg = ForestRegressor(
random_state=random_state,
aggregation=aggregation,
n_estimators=n_estimators,
step=step,
)
reg.fit(X_train, y_train)
y_pred = reg.predict(X_test)
weighted_depths = reg._weighted_depth(X_test.reshape(n_samples_test, 1))
# print("weighted_depths.shape:", weighted_depths.shape)
# avg_weighted_depth = weighted_depths.mean(axis=0)
fig, (ax1, ax2, ax3) = plt.subplots(nrows=3, ncols=1, sharex=True, figsize=(6, 5))
plot_samples = ax1.plot(
X_train, y_train, color=colormap.colors[1], lw=2, label="Samples"
)[0]
plot_signal = ax1.plot(
X_test_binned / 255,
get_signal(X_test_binned / 255, signal),
lw=2,
color=colormap.colors[0],
label="Signal",
)[0]
plot_prediction = ax2.plot(
X_test.ravel(), y_pred, lw=2, color=colormap.colors[2], label="Prediction"
)[0]
# ax3.plot(
# X_test,
# weighted_depths[:, 1:],
# lw=1,
# color=colormap.colors[5],
# alpha=0.2,
# label="Weighted depths",
# )
plot_weighted_depths = ax3.plot(
X_test, weighted_depths.T, lw=1, color=colormap.colors[5], alpha=0.2
)[0]
plot_mean_weighted_depths = ax3.plot(
X_test,
weighted_depths.mean(axis=0),
lw=2,
color=colormap.colors[4],
label="Mean weighted depth",
)[0]
filename = "weighted_depths_%s.pdf" % signal
fig.subplots_adjust(hspace=0.1)
fig.legend(
(
plot_signal,
plot_samples,
plot_mean_weighted_depths,
plot_weighted_depths,
plot_prediction,
),
(
"Signal",
"Samples",
"Average weighted depths",
"Weighted depths",
"Prediction",
),
fontsize=12,
loc="upper center",
bbox_to_anchor=(0.5, 1.0),
ncol=3,
)
# plt.savefig(filename)
logging.info("Saved the decision functions in '%s'" % filename)
for signal in signals:
plot_weighted_depth(signal)
plt.show()
| 24.309211 | 88 | 0.637618 |
import logging
from matplotlib.cm import get_cmap
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from wildwood.datasets import get_signal, make_regression
from wildwood.forest import ForestRegressor
from wildwood._binning import Binner
pd.set_option("display.max_columns", 20)
pd.set_option("display.precision", 2)
logging.basicConfig(
level=logging.INFO, format="%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S"
)
colormap = get_cmap("tab20")
n_samples_train = 5000
n_samples_test = 1000
random_state = 42
noise = 0.03
aggregation = True
n_estimators = 100
step = 1 / noise ** 2
signal = "heavisine"
X_train, y_train = make_regression(
n_samples=n_samples_train, signal=signal, noise=noise, random_state=random_state
)
X_test = np.linspace(0, 1, num=n_samples_test)
signals = ["heavisine", "bumps", "blocks", "doppler"]
def plot_weighted_depth(signal):
X_train, y_train = make_regression(
n_samples=n_samples_train, signal=signal, noise=noise, random_state=random_state
)
X_train = X_train.reshape(-1, 1)
X_test = np.linspace(0, 1, num=n_samples_test).reshape(-1, 1)
binner = Binner().fit(X_train)
X_test_binned = binner.transform(X_test)
reg = ForestRegressor(
random_state=random_state,
aggregation=aggregation,
n_estimators=n_estimators,
step=step,
)
reg.fit(X_train, y_train)
y_pred = reg.predict(X_test)
weighted_depths = reg._weighted_depth(X_test.reshape(n_samples_test, 1))
fig, (ax1, ax2, ax3) = plt.subplots(nrows=3, ncols=1, sharex=True, figsize=(6, 5))
plot_samples = ax1.plot(
X_train, y_train, color=colormap.colors[1], lw=2, label="Samples"
)[0]
plot_signal = ax1.plot(
X_test_binned / 255,
get_signal(X_test_binned / 255, signal),
lw=2,
color=colormap.colors[0],
label="Signal",
)[0]
plot_prediction = ax2.plot(
X_test.ravel(), y_pred, lw=2, color=colormap.colors[2], label="Prediction"
)[0]
plot_weighted_depths = ax3.plot(
X_test, weighted_depths.T, lw=1, color=colormap.colors[5], alpha=0.2
)[0]
plot_mean_weighted_depths = ax3.plot(
X_test,
weighted_depths.mean(axis=0),
lw=2,
color=colormap.colors[4],
label="Mean weighted depth",
)[0]
filename = "weighted_depths_%s.pdf" % signal
fig.subplots_adjust(hspace=0.1)
fig.legend(
(
plot_signal,
plot_samples,
plot_mean_weighted_depths,
plot_weighted_depths,
plot_prediction,
),
(
"Signal",
"Samples",
"Average weighted depths",
"Weighted depths",
"Prediction",
),
fontsize=12,
loc="upper center",
bbox_to_anchor=(0.5, 1.0),
ncol=3,
)
logging.info("Saved the decision functions in '%s'" % filename)
for signal in signals:
plot_weighted_depth(signal)
plt.show()
| true | true |
f737183680b786d27cc2fd5efe9c5e2fe75626a8 | 2,520 | py | Python | setup.py | JarnoHerr/AnaFlow | a7c56cdadf90d652f80bc1e1d38d3687d0365a63 | [
"MIT"
] | null | null | null | setup.py | JarnoHerr/AnaFlow | a7c56cdadf90d652f80bc1e1d38d3687d0365a63 | [
"MIT"
] | null | null | null | setup.py | JarnoHerr/AnaFlow | a7c56cdadf90d652f80bc1e1d38d3687d0365a63 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""AnaFlow - analytical solutions for the groundwater-flow equation."""
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(HERE, "README.md"), encoding="utf-8") as f:
README = f.read()
with open(os.path.join(HERE, "requirements.txt"), encoding="utf-8") as f:
REQ = f.read().splitlines()
with open(os.path.join(HERE, "requirements_setup.txt"), encoding="utf-8") as f:
REQ_SETUP = f.read().splitlines()
with open(os.path.join(HERE, "requirements_test.txt"), encoding="utf-8") as f:
REQ_TEST = f.read().splitlines()
with open(
os.path.join(HERE, "docs", "requirements_doc.txt"), encoding="utf-8"
) as f:
REQ_DOC = f.read().splitlines()
REQ_DEV = REQ_SETUP + REQ_TEST + REQ_DOC
DOCLINE = __doc__.split("\n")[0]
CLASSIFIERS = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: MacOS",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Operating System :: Unix",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Scientific/Engineering",
"Topic :: Software Development",
"Topic :: Utilities",
]
setup(
name="anaflow",
description=DOCLINE,
long_description=README,
long_description_content_type="text/markdown",
maintainer="Sebastian Mueller",
maintainer_email="sebastian.mueller@ufz.de",
author="Sebastian Mueller",
author_email="sebastian.mueller@ufz.de",
url="https://github.com/GeoStat-Framework/AnaFlow",
license="MIT",
classifiers=CLASSIFIERS,
platforms=["Windows", "Linux", "Mac OS-X"],
include_package_data=True,
python_requires=">=3.5",
use_scm_version={
"relative_to": __file__,
"write_to": "anaflow/_version.py",
"write_to_template": "__version__ = '{version}'",
"local_scheme": "no-local-version",
"fallback_version": "0.0.0.dev0",
},
install_requires=REQ,
setup_requires=REQ_SETUP,
extras_require={"doc": REQ_DOC, "test": REQ_TEST, "dev": REQ_DEV},
packages=find_packages(exclude=["tests*", "docs*"]),
)
| 34.054054 | 79 | 0.659524 |
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(HERE, "README.md"), encoding="utf-8") as f:
README = f.read()
with open(os.path.join(HERE, "requirements.txt"), encoding="utf-8") as f:
REQ = f.read().splitlines()
with open(os.path.join(HERE, "requirements_setup.txt"), encoding="utf-8") as f:
REQ_SETUP = f.read().splitlines()
with open(os.path.join(HERE, "requirements_test.txt"), encoding="utf-8") as f:
REQ_TEST = f.read().splitlines()
with open(
os.path.join(HERE, "docs", "requirements_doc.txt"), encoding="utf-8"
) as f:
REQ_DOC = f.read().splitlines()
REQ_DEV = REQ_SETUP + REQ_TEST + REQ_DOC
DOCLINE = __doc__.split("\n")[0]
CLASSIFIERS = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: MacOS",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Operating System :: Unix",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Scientific/Engineering",
"Topic :: Software Development",
"Topic :: Utilities",
]
setup(
name="anaflow",
description=DOCLINE,
long_description=README,
long_description_content_type="text/markdown",
maintainer="Sebastian Mueller",
maintainer_email="sebastian.mueller@ufz.de",
author="Sebastian Mueller",
author_email="sebastian.mueller@ufz.de",
url="https://github.com/GeoStat-Framework/AnaFlow",
license="MIT",
classifiers=CLASSIFIERS,
platforms=["Windows", "Linux", "Mac OS-X"],
include_package_data=True,
python_requires=">=3.5",
use_scm_version={
"relative_to": __file__,
"write_to": "anaflow/_version.py",
"write_to_template": "__version__ = '{version}'",
"local_scheme": "no-local-version",
"fallback_version": "0.0.0.dev0",
},
install_requires=REQ,
setup_requires=REQ_SETUP,
extras_require={"doc": REQ_DOC, "test": REQ_TEST, "dev": REQ_DEV},
packages=find_packages(exclude=["tests*", "docs*"]),
)
| true | true |
f73718f2e536fcce88e31006128c63e2fafa2f68 | 989 | py | Python | mayan/apps/templating/forms.py | darrenflexxu/Mayan-EDMS | 6707365bfacd137e625ddc1b990168012246fa07 | [
"Apache-2.0"
] | null | null | null | mayan/apps/templating/forms.py | darrenflexxu/Mayan-EDMS | 6707365bfacd137e625ddc1b990168012246fa07 | [
"Apache-2.0"
] | 5 | 2021-03-19T22:59:52.000Z | 2022-03-12T00:13:16.000Z | mayan/apps/templating/forms.py | Sumit-Kumar-Jha/mayan | 5b7ddeccf080b9e41cc1074c70e27dfe447be19f | [
"Apache-2.0"
] | 1 | 2020-07-29T21:03:27.000Z | 2020-07-29T21:03:27.000Z | from __future__ import absolute_import, unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from .fields import TemplateField
class DocumentTemplateSandboxForm(forms.Form):
result = forms.CharField(
help_text=_('Resulting text from the evaluated template.'),
label=_('Result'), required=False, widget=forms.widgets.Textarea(
attrs={'readonly': 'readonly', 'rows': 5}
)
)
def __init__(self, *args, **kwargs):
self.model = kwargs.pop('model')
self.model_variable = kwargs.pop('model_variable')
super(DocumentTemplateSandboxForm, self).__init__(*args, **kwargs)
self.fields['template'] = TemplateField(
initial_help_text=_('The template string to be evaluated.'),
label=_('Template'), model=self.model,
model_variable=self.model_variable, required=True
)
self.order_fields(field_order=('template', 'result'))
| 36.62963 | 74 | 0.676441 | from __future__ import absolute_import, unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from .fields import TemplateField
class DocumentTemplateSandboxForm(forms.Form):
result = forms.CharField(
help_text=_('Resulting text from the evaluated template.'),
label=_('Result'), required=False, widget=forms.widgets.Textarea(
attrs={'readonly': 'readonly', 'rows': 5}
)
)
def __init__(self, *args, **kwargs):
self.model = kwargs.pop('model')
self.model_variable = kwargs.pop('model_variable')
super(DocumentTemplateSandboxForm, self).__init__(*args, **kwargs)
self.fields['template'] = TemplateField(
initial_help_text=_('The template string to be evaluated.'),
label=_('Template'), model=self.model,
model_variable=self.model_variable, required=True
)
self.order_fields(field_order=('template', 'result'))
| true | true |
f73719671222a3d63128620021ee5f964d29f3b8 | 1,541 | py | Python | test/e2e/tests/aws/test_autoscaling.py | BlueWhaleKo/cortex | a60b9f3adf593ef27c234b9622f2516ebd17ce6c | [
"Apache-2.0"
] | null | null | null | test/e2e/tests/aws/test_autoscaling.py | BlueWhaleKo/cortex | a60b9f3adf593ef27c234b9622f2516ebd17ce6c | [
"Apache-2.0"
] | null | null | null | test/e2e/tests/aws/test_autoscaling.py | BlueWhaleKo/cortex | a60b9f3adf593ef27c234b9622f2516ebd17ce6c | [
"Apache-2.0"
] | null | null | null | # Copyright 2022 Cortex Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Callable, Dict
import cortex as cx
import pytest
import e2e.tests
TEST_APIS = [
{
"primary": "realtime/sleep",
"dummy": ["realtime/prime-generator"],
"query_params": {
"sleep": "1.0",
},
}
]
@pytest.mark.usefixtures("client")
@pytest.mark.parametrize("apis", TEST_APIS, ids=[api["primary"] for api in TEST_APIS])
def test_autoscaling(printer: Callable, config: Dict, client: cx.Client, apis: Dict[str, Any]):
skip_autoscaling_test = config["global"].get("skip_autoscaling", False)
if skip_autoscaling_test:
pytest.skip("--skip-autoscaling flag detected, skipping autoscaling tests")
e2e.tests.test_autoscaling(
printer,
client,
apis,
autoscaling_config=config["global"]["autoscaling_test_config"],
deploy_timeout=config["global"]["realtime_deploy_timeout"],
node_groups=config["aws"]["x86_nodegroups"],
)
| 32.104167 | 95 | 0.695003 |
from typing import Any, Callable, Dict
import cortex as cx
import pytest
import e2e.tests
TEST_APIS = [
{
"primary": "realtime/sleep",
"dummy": ["realtime/prime-generator"],
"query_params": {
"sleep": "1.0",
},
}
]
@pytest.mark.usefixtures("client")
@pytest.mark.parametrize("apis", TEST_APIS, ids=[api["primary"] for api in TEST_APIS])
def test_autoscaling(printer: Callable, config: Dict, client: cx.Client, apis: Dict[str, Any]):
skip_autoscaling_test = config["global"].get("skip_autoscaling", False)
if skip_autoscaling_test:
pytest.skip("--skip-autoscaling flag detected, skipping autoscaling tests")
e2e.tests.test_autoscaling(
printer,
client,
apis,
autoscaling_config=config["global"]["autoscaling_test_config"],
deploy_timeout=config["global"]["realtime_deploy_timeout"],
node_groups=config["aws"]["x86_nodegroups"],
)
| true | true |
f737197aefec12c6b11fc71158604b87f0617b80 | 11,519 | py | Python | intersight/model/hyperflex_local_credential_policy_list.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 5 | 2021-12-16T15:13:32.000Z | 2022-03-29T16:09:54.000Z | intersight/model/hyperflex_local_credential_policy_list.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 4 | 2022-01-25T19:05:51.000Z | 2022-03-29T20:18:37.000Z | intersight/model/hyperflex_local_credential_policy_list.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 2 | 2020-07-07T15:01:08.000Z | 2022-01-31T04:27:35.000Z | """
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.hyperflex_local_credential_policy import HyperflexLocalCredentialPolicy
from intersight.model.hyperflex_local_credential_policy_list_all_of import HyperflexLocalCredentialPolicyListAllOf
from intersight.model.mo_base_response import MoBaseResponse
globals()['HyperflexLocalCredentialPolicy'] = HyperflexLocalCredentialPolicy
globals()['HyperflexLocalCredentialPolicyListAllOf'] = HyperflexLocalCredentialPolicyListAllOf
globals()['MoBaseResponse'] = MoBaseResponse
class HyperflexLocalCredentialPolicyList(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'object_type': (str,), # noqa: E501
'count': (int,), # noqa: E501
'results': ([HyperflexLocalCredentialPolicy], none_type,), # noqa: E501
}
@cached_property
def discriminator():
val = {
}
if not val:
return None
return {'object_type': val}
attribute_map = {
'object_type': 'ObjectType', # noqa: E501
'count': 'Count', # noqa: E501
'results': 'Results', # noqa: E501
}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, object_type, *args, **kwargs): # noqa: E501
"""HyperflexLocalCredentialPolicyList - a model defined in OpenAPI
Args:
object_type (str): A discriminator value to disambiguate the schema of a HTTP GET response body.
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
count (int): The total number of 'hyperflex.LocalCredentialPolicy' resources matching the request, accross all pages. The 'Count' attribute is included when the HTTP GET request includes the '$inlinecount' parameter.. [optional] # noqa: E501
results ([HyperflexLocalCredentialPolicy], none_type): The array of 'hyperflex.LocalCredentialPolicy' resources matching the request.. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
required_args = {
'object_type': object_type,
}
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(
constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in kwargs.items():
if var_name in unused_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
not self._additional_properties_model_instances:
# discard variable.
continue
setattr(self, var_name, var_value)
@cached_property
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
HyperflexLocalCredentialPolicyListAllOf,
MoBaseResponse,
],
'oneOf': [
],
}
| 48.196653 | 1,678 | 0.643285 |
import re
import sys
from intersight.model_utils import (
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.hyperflex_local_credential_policy import HyperflexLocalCredentialPolicy
from intersight.model.hyperflex_local_credential_policy_list_all_of import HyperflexLocalCredentialPolicyListAllOf
from intersight.model.mo_base_response import MoBaseResponse
globals()['HyperflexLocalCredentialPolicy'] = HyperflexLocalCredentialPolicy
globals()['HyperflexLocalCredentialPolicyListAllOf'] = HyperflexLocalCredentialPolicyListAllOf
globals()['MoBaseResponse'] = MoBaseResponse
class HyperflexLocalCredentialPolicyList(ModelComposed):
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,)
_nullable = False
@cached_property
def openapi_types():
lazy_import()
return {
'object_type': (str,),
'count': (int,),
'results': ([HyperflexLocalCredentialPolicy], none_type,),
}
@cached_property
def discriminator():
val = {
}
if not val:
return None
return {'object_type': val}
attribute_map = {
'object_type': 'ObjectType',
'count': 'Count',
'results': 'Results',
}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, object_type, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
required_args = {
'object_type': object_type,
}
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(
constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in kwargs.items():
if var_name in unused_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
not self._additional_properties_model_instances:
continue
setattr(self, var_name, var_value)
@cached_property
def _composed_schemas():
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
HyperflexLocalCredentialPolicyListAllOf,
MoBaseResponse,
],
'oneOf': [
],
}
| true | true |
f73719b8ddd6e7b96a87fa9b7d8a5b4f930cd798 | 60,481 | py | Python | jax/_src/random.py | obkyrush/jax | 8662c5f660678b6320a1a8fc46e917e97c399b57 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-12-25T01:12:42.000Z | 2021-12-25T01:12:42.000Z | jax/_src/random.py | obkyrush/jax | 8662c5f660678b6320a1a8fc46e917e97c399b57 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | jax/_src/random.py | obkyrush/jax | 8662c5f660678b6320a1a8fc46e917e97c399b57 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
from typing import Any, Optional, Sequence, Union
import warnings
import numpy as np
from jax import lax
from jax import core
from jax import numpy as jnp
from jax._src import dtypes
from jax.core import NamedShape
from jax._src.api import jit, vmap
from jax._src.numpy.lax_numpy import _constant_like, _convert_and_clip_integer, _check_arraylike
from jax.lib import xla_bridge
from jax.lib import xla_client
from jax.lib import cuda_prng
from jax.numpy.linalg import cholesky, svd, eigh
from jax.interpreters import ad
from jax.interpreters import batching
from jax.interpreters import xla
from jax._src.util import prod
Array = Any
RealArray = Array
IntegerArray = Array
# TODO: Import or define these to match
# https://github.com/numpy/numpy/blob/main/numpy/typing/_dtype_like.py.
DTypeLikeInt = Any
DTypeLikeFloat = Any
_UINT_DTYPES = {8: jnp.uint8, 16: jnp.uint16, 32: jnp.uint32, 64: jnp.uint64}
def PRNGKey(seed: int) -> jnp.ndarray:
"""Create a pseudo-random number generator (PRNG) key given an integer seed.
Args:
seed: a 64- or 32-bit integer used as the value of the key.
Returns:
A PRNG key, which is modeled as an array of shape (2,) and dtype uint32. The
key is constructed from a 64-bit seed by effectively bit-casting to a pair
of uint32 values (or from a 32-bit seed by first padding out with zeros).
"""
# Avoid overflowerror in X32 mode by first converting ints to int64.
# This breaks JIT invariance of PRNGKey for large ints, but supports the
# common use-case of instantiating PRNGKey with Python hashes in X32 mode.
if isinstance(seed, int):
seed_arr = jnp.asarray(np.int64(seed))
else:
seed_arr = jnp.asarray(seed)
if seed_arr.shape:
raise TypeError(f"PRNGKey seed must be a scalar; got {seed!r}.")
if not np.issubdtype(seed_arr.dtype, np.integer):
raise TypeError(f"PRNGKey seed must be an integer; got {seed!r}")
convert = lambda k: lax.reshape(lax.convert_element_type(k, np.uint32), [1])
k1 = convert(lax.shift_right_logical(seed_arr, lax._const(seed_arr, 32)))
k2 = convert(jnp.bitwise_and(seed_arr, np.uint32(0xFFFFFFFF)))
return lax.concatenate([k1, k2], 0)
def _is_prng_key(key: jnp.ndarray) -> bool:
try:
return key.shape == (2,) and key.dtype == np.uint32
except AttributeError:
return False
### utilities
def _make_rotate_left(dtype):
if not jnp.issubdtype(dtype, np.integer):
raise TypeError("_rotate_left only accepts integer dtypes.")
nbits = np.array(jnp.iinfo(dtype).bits, dtype)
def _rotate_left(x, d):
if lax.dtype(d) != dtype:
d = lax.convert_element_type(d, dtype)
if lax.dtype(x) != dtype:
x = lax.convert_element_type(x, dtype)
return lax.shift_left(x, d) | lax.shift_right_logical(x, nbits - d)
return _rotate_left
def _bit_stats(bits):
"""This is a debugging function to compute the statistics of bit fields."""
return np.array([list(map(int, np.binary_repr(x, 64))) for x in bits]).mean(0)
### hash function and split
def _threefry2x32_abstract_eval(*args):
if any(a.dtype != jnp.uint32 for a in args):
raise TypeError("Arguments to threefry2x32 must have uint32 type, got {}"
.format(args))
if all(isinstance(arg, core.ShapedArray) for arg in args):
shape = lax._broadcasting_shape_rule(*args)
named_shape = core.join_named_shapes(*(a.named_shape for a in args))
aval = core.ShapedArray(shape, jnp.dtype(jnp.uint32), named_shape=named_shape)
else:
aval = core.UnshapedArray(jnp.dtype(jnp.uint32))
return (aval,) * 2
rotate_left = _make_rotate_left(np.uint32)
def apply_round(v, rot):
v = v[:]
v[0] = v[0] + v[1]
v[1] = rotate_left(v[1], rot)
v[1] = v[0] ^ v[1]
return v
def rotate_list(xs):
return xs[1:] + xs[:1]
def rolled_loop_step(i, state):
x, ks, rotations = state
for r in rotations[0]:
x = apply_round(x, r)
new_x = [x[0] + ks[0], x[1] + ks[1] + jnp.asarray(i + 1, dtype=np.uint32)]
return new_x, rotate_list(ks), rotate_list(rotations)
def _threefry2x32_lowering(key1, key2, x1, x2, use_rolled_loops=True):
"""Apply the Threefry 2x32 hash.
Args:
keypair: a pair of 32bit unsigned integers used for the key.
count: an array of dtype uint32 used for the counts.
Returns:
An array of dtype uint32 with the same shape as `count`.
"""
x = [x1, x2]
rotations = [np.array([13, 15, 26, 6], dtype=np.uint32),
np.array([17, 29, 16, 24], dtype=np.uint32)]
ks = [key1, key2, key1 ^ key2 ^ np.uint32(0x1BD11BDA)]
x[0] = x[0] + ks[0]
x[1] = x[1] + ks[1]
if use_rolled_loops:
x, _, _ = lax.fori_loop(0, 5, rolled_loop_step, (x, rotate_list(ks), rotations))
else:
for r in rotations[0]:
x = apply_round(x, r)
x[0] = x[0] + ks[1]
x[1] = x[1] + ks[2] + np.uint32(1)
for r in rotations[1]:
x = apply_round(x, r)
x[0] = x[0] + ks[2]
x[1] = x[1] + ks[0] + np.uint32(2)
for r in rotations[0]:
x = apply_round(x, r)
x[0] = x[0] + ks[0]
x[1] = x[1] + ks[1] + np.uint32(3)
for r in rotations[1]:
x = apply_round(x, r)
x[0] = x[0] + ks[1]
x[1] = x[1] + ks[2] + np.uint32(4)
for r in rotations[0]:
x = apply_round(x, r)
x[0] = x[0] + ks[2]
x[1] = x[1] + ks[0] + np.uint32(5)
return tuple(x)
def _threefry2x32_gpu_translation_rule(c, k1, k2, x1, x2):
shape = lax.broadcast_shapes(
c.get_shape(k1).dimensions(), c.get_shape(k2).dimensions(),
c.get_shape(x1).dimensions(), c.get_shape(x2).dimensions())
rank = len(shape)
if 0 in shape:
zeros = xla_client.ops.Broadcast(
xla_bridge.constant(c, np.array(0, np.uint32)), shape)
return xla_client.ops.Tuple(c, [zeros, zeros])
def _broadcast(x):
ndims = c.get_shape(x).rank()
return xla_client.ops.BroadcastInDim(x, shape,
tuple(range(rank - ndims, rank)))
return cuda_prng.threefry2x32(
c, (_broadcast(k1), _broadcast(k2)), (_broadcast(x1), _broadcast(x2)))
threefry2x32_p = core.Primitive("threefry2x32")
threefry2x32_p.multiple_results = True
threefry2x32_p.def_impl(partial(xla.apply_primitive, threefry2x32_p))
threefry2x32_p.def_abstract_eval(_threefry2x32_abstract_eval)
batching.defbroadcasting(threefry2x32_p)
xla.translations_with_avals[threefry2x32_p] = xla.lower_fun(
partial(_threefry2x32_lowering, use_rolled_loops=False),
multiple_results=True, with_avals=True)
xla.backend_specific_translations['cpu'][threefry2x32_p] = xla.lower_fun(
partial(_threefry2x32_lowering, use_rolled_loops=True),
multiple_results=True)
if cuda_prng:
xla.backend_specific_translations['gpu'][threefry2x32_p] = \
_threefry2x32_gpu_translation_rule
@jit
def threefry_2x32(keypair, count):
"""Apply the Threefry 2x32 hash.
Args:
keypair: a pair of 32bit unsigned integers used for the key.
count: an array of dtype uint32 used for the counts.
Returns:
An array of dtype uint32 with the same shape as `count`.
"""
key1, key2 = keypair
if not lax.dtype(key1) == lax.dtype(key2) == lax.dtype(count) == np.uint32:
msg = "threefry_2x32 requires uint32 arguments, got {}"
raise TypeError(msg.format([lax.dtype(x) for x in [key1, key2, count]]))
odd_size = count.size % 2
if odd_size:
x = list(jnp.split(jnp.concatenate([count.ravel(), np.uint32([0])]), 2))
else:
x = list(jnp.split(count.ravel(), 2))
x = threefry2x32_p.bind(key1, key2, x[0], x[1])
out = jnp.concatenate(x)
assert out.dtype == np.uint32
return lax.reshape(out[:-1] if odd_size else out, count.shape)
def split(key: jnp.ndarray, num: int = 2) -> jnp.ndarray:
"""Splits a PRNG key into `num` new keys by adding a leading axis.
Args:
key: a PRNGKey (an array with shape (2,) and dtype uint32).
num: optional, a positive integer indicating the number of keys to produce
(default 2).
Returns:
An array with shape (num, 2) and dtype uint32 representing `num` new keys.
"""
return _split(key, int(num)) # type: ignore
@partial(jit, static_argnums=(1,))
def _split(key, num) -> jnp.ndarray:
counts = lax.iota(np.uint32, num * 2)
return lax.reshape(threefry_2x32(key, counts), (num, 2))
def fold_in(key: jnp.ndarray, data: int) -> jnp.ndarray:
"""Folds in data to a PRNG key to form a new PRNG key.
Args:
key: a PRNGKey (an array with shape (2,) and dtype uint32).
data: a 32bit integer representing data to be folded in to the key.
Returns:
A new PRNGKey that is a deterministic function of the inputs and is
statistically safe for producing a stream of new pseudo-random values.
"""
return _fold_in(key, jnp.uint32(data))
@jit
def _fold_in(key, data):
return threefry_2x32(key, PRNGKey(data))
@partial(jit, static_argnums=(1, 2))
def _random_bits(key, bit_width, shape):
"""Sample uniform random bits of given width and shape using PRNG key."""
if not _is_prng_key(key):
raise TypeError("_random_bits got invalid prng key.")
if bit_width not in (8, 16, 32, 64):
raise TypeError("requires 8-, 16-, 32- or 64-bit field width.")
shape = core.as_named_shape(shape)
for name, size in shape.named_items:
real_size = lax.psum(1, name)
if real_size != size:
raise ValueError(f"The shape of axis {name} was specified as {size}, "
f"but it really is {real_size}")
axis_index = lax.axis_index(name)
key = fold_in(key, axis_index)
size = prod(shape.positional)
max_count = int(np.ceil(bit_width * size / 32))
nblocks, rem = divmod(max_count, jnp.iinfo(np.uint32).max)
if not nblocks:
bits = threefry_2x32(key, lax.iota(np.uint32, rem))
else:
keys = split(key, nblocks + 1)
subkeys, last_key = keys[:-1], keys[-1]
blocks = vmap(threefry_2x32, in_axes=(0, None))(subkeys, lax.iota(np.uint32, jnp.iinfo(np.uint32).max))
last = threefry_2x32(last_key, lax.iota(np.uint32, rem))
bits = lax.concatenate([blocks.ravel(), last], 0)
dtype = _UINT_DTYPES[bit_width]
if bit_width == 64:
bits = [lax.convert_element_type(x, dtype) for x in jnp.split(bits, 2)]
bits = lax.shift_left(bits[0], dtype(32)) | bits[1]
elif bit_width in [8, 16]:
# this is essentially bits.view(dtype)[:size]
bits = lax.bitwise_and(
np.uint32(np.iinfo(dtype).max),
lax.shift_right_logical(
lax.broadcast(bits, (1,)),
lax.mul(
np.uint32(bit_width),
lax.broadcasted_iota(np.uint32, (32 // bit_width, 1), 0)
)
)
)
bits = lax.reshape(bits, (np.uint32(max_count * 32 // bit_width),), (1, 0))
bits = lax.convert_element_type(bits, dtype)[:size]
return lax.reshape(bits, shape)
### random samplers
def _check_shape(name, shape: Union[Sequence[int], NamedShape], *param_shapes):
shape = core.as_named_shape(shape)
if param_shapes:
shape_ = lax.broadcast_shapes(shape.positional, *param_shapes)
if shape.positional != shape_:
msg = ("{} parameter shapes must be broadcast-compatible with shape "
"argument, and the result of broadcasting the shapes must equal "
"the shape argument, but got result {} for shape argument {}.")
raise ValueError(msg.format(name, shape_, shape))
def uniform(key: jnp.ndarray,
shape: Union[Sequence[int], NamedShape] = (),
dtype: DTypeLikeFloat = dtypes.float_,
minval: RealArray = 0.,
maxval: RealArray = 1.) -> jnp.ndarray:
"""Sample uniform random values in [minval, maxval) with given shape/dtype.
Args:
key: a PRNGKey used as the random key.
shape: optional, a tuple of nonnegative integers representing the result
shape. Default ().
dtype: optional, a float dtype for the returned values (default float64 if
jax_enable_x64 is true, otherwise float32).
minval: optional, a minimum (inclusive) value broadcast-compatible with shape for the range (default 0).
maxval: optional, a maximum (exclusive) value broadcast-compatible with shape for the range (default 1).
Returns:
A random array with the specified shape and dtype.
"""
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `uniform` must be a float dtype, "
f"got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.as_named_shape(shape)
return _uniform(key, shape, dtype, minval, maxval) # type: ignore
@partial(jit, static_argnums=(1, 2))
def _uniform(key, shape, dtype, minval, maxval) -> jnp.ndarray:
_check_shape("uniform", shape)
if not jnp.issubdtype(dtype, np.floating):
raise TypeError("uniform only accepts floating point dtypes.")
minval = lax.convert_element_type(minval, dtype)
maxval = lax.convert_element_type(maxval, dtype)
minval = lax.broadcast_to_rank(minval, shape.positional_rank)
maxval = lax.broadcast_to_rank(maxval, shape.positional_rank)
finfo = jnp.finfo(dtype)
nbits, nmant = finfo.bits, finfo.nmant
if nbits not in (16, 32, 64):
raise TypeError("uniform only accepts 32- or 64-bit dtypes.")
bits = _random_bits(key, nbits, shape)
# The strategy here is to randomize only the mantissa bits with an exponent of
# 1 (after applying the bias), then shift and scale to the desired range. The
# bit-level transformation we use relies on Numpy and XLA having bit-for-bit
# equivalent float representations, which might not be true on all platforms.
float_bits = lax.bitwise_or(
lax.shift_right_logical(bits, np.array(nbits - nmant, lax.dtype(bits))),
np.array(1., dtype).view(_UINT_DTYPES[nbits]))
floats = lax.bitcast_convert_type(float_bits, dtype) - np.array(1., dtype)
return lax.max(
minval,
lax.reshape(floats * (maxval - minval) + minval, shape.positional))
def randint(key: jnp.ndarray,
shape: Sequence[int],
minval: IntegerArray,
maxval: IntegerArray,
dtype: DTypeLikeInt = dtypes.int_):
"""Sample uniform random values in [minval, maxval) with given shape/dtype.
Args:
key: a PRNGKey used as the random key.
shape: a tuple of nonnegative integers representing the shape.
minval: int or array of ints broadcast-compatible with ``shape``, a minimum
(inclusive) value for the range.
maxval: int or array of ints broadcast-compatible with ``shape``, a maximum
(exclusive) value for the range.
dtype: optional, an int dtype for the returned values (default int64 if
jax_enable_x64 is true, otherwise int32).
Returns:
A random array with the specified shape and dtype.
"""
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _randint(key, shape, minval, maxval, dtype)
@partial(jit, static_argnums=(1, 4))
def _randint(key, shape, minval, maxval, dtype):
_check_shape("randint", shape, np.shape(minval), np.shape(maxval))
if not jnp.issubdtype(dtype, np.integer):
raise TypeError(f"randint only accepts integer dtypes, got {dtype}")
_check_arraylike("randint", minval, maxval)
minval = jnp.asarray(minval)
maxval = jnp.asarray(maxval)
if not jnp.issubdtype(minval.dtype, np.integer):
minval = minval.astype(int)
if not jnp.issubdtype(maxval.dtype, np.integer):
maxval = maxval.astype(int)
# Flag where maxval is greater than the maximum value of dtype
# in order to handle cases like randint(key, shape, 0, 256, 'uint8')
maxval_out_of_range = lax.gt(
maxval, _convert_and_clip_integer(jnp.array(jnp.iinfo(dtype).max, dtype), maxval.dtype))
minval = _convert_and_clip_integer(minval, dtype)
maxval = _convert_and_clip_integer(maxval, dtype)
minval = lax.broadcast_to_rank(minval, len(shape))
maxval = lax.broadcast_to_rank(maxval, len(shape))
nbits = jnp.iinfo(dtype).bits
if nbits not in (8, 16, 32, 64):
raise TypeError(f"randint only accepts 8-, 16-, 32-, or 64-bit dtypes, got {dtype}")
# This algorithm is biased whenever (maxval - minval) is not a power of 2.
# We generate double the number of random bits required by the dtype so as to
# reduce that bias.
k1, k2 = split(key)
rbits = lambda key: _random_bits(key, nbits, shape)
higher_bits, lower_bits = rbits(k1), rbits(k2)
unsigned_dtype = _UINT_DTYPES[nbits]
span = lax.convert_element_type(maxval - minval, unsigned_dtype)
# Ensure that span=1 when maxval <= minval, so minval is always returned;
# https://github.com/google/jax/issues/222
span = lax.select(maxval <= minval, lax.full_like(span, 1), span)
# When maxval is out of range, the span has to be one larger.
# If span is already the maximum representable value, this will wrap to zero,
# causing remainders below to have no effect, which is the correct semantics.
span = lax.select(
maxval_out_of_range & (maxval > minval),
lax.add(span, lax._const(span, 1)),
span)
# To compute a remainder operation on an integer that might have twice as many
# bits as we can represent in the native unsigned dtype, we compute a
# multiplier equal to 2**nbits % span. To avoid overflow, we use the identity:
# (a * b) % N = [(a % N) * (b % N)] % N
multiplier = lax.rem(lax._const(span, 2 ** (nbits // 2)), span)
multiplier = lax.rem(lax.mul(multiplier, multiplier), span)
random_offset = lax.add(lax.mul(lax.rem(higher_bits, span), multiplier),
lax.rem(lower_bits, span))
random_offset = lax.rem(random_offset, span)
return lax.add(minval, lax.convert_element_type(random_offset, dtype))
def shuffle(key: jnp.ndarray, x: Array, axis: int = 0) -> jnp.ndarray:
"""Shuffle the elements of an array uniformly at random along an axis.
Args:
key: a PRNGKey used as the random key.
x: the array to be shuffled.
axis: optional, an int axis along which to shuffle (default 0).
Returns:
A shuffled version of x.
"""
msg = ("jax.random.shuffle is deprecated and will be removed in a future release. "
"Use jax.random.permutation")
warnings.warn(msg, FutureWarning)
return _shuffle(key, x, axis) # type: ignore
def permutation(key: jnp.ndarray, x: Array) -> jnp.ndarray:
"""
Permute elements of an array along its first axis or return a permuted range.
If `x` is a multi-dimensional array, it is only shuffled along its
first index.
Args:n
key: a PRNGKey used as the random key.
x: the array or integer range to be shuffled.
Returns:
A shuffled version of x or array range
"""
if not np.ndim(x):
# scalar case, must be a concrete integer
if not np.issubdtype(lax.dtype(x), np.integer):
raise TypeError("x must be an integer or at least 1-dimensional")
x = int(x) # type: ignore[assignment]
return _shuffle(key, jnp.arange(x), 0)
elif np.ndim(x) == 1:
return _shuffle(key, x, 0)
else:
assert isinstance(x, jnp.ndarray)
ind = _shuffle(key, jnp.arange(x.shape[0]), 0) # type: ignore[attribute-error]
return x[ind]
@partial(jit, static_argnums=(2,))
def _shuffle(key, x, axis) -> jnp.ndarray:
# On parallel architectures, Fisher-Yates is more expensive than doing
# multiple sorts. This algorithm is based on one developed and analyzed by
# tjablin@. We sort according to randomly-generated 32bit keys, but those keys
# may have collisions. If we repeat the process, using fresh 32bit keys for
# each sort, then whenever all pairs of elements have been assigned distinct
# keys at some iteration (or equivalently when the strings formed by
# concatenating the successive keys for each element are all distinct) then we
# are guaranteed to have a perfect sample (assuming that either the sort is
# stable or that any bias is not value-dependent). Since checking uniqueness
# at runtime may be expensive, we use a heuristic static stop criterion
# developed by tjablin@. See tensorflow/compiler/tf2xla/random_ops.cc for more
# info, and for the original implementation of this algorithm. See also
# Section 2 of http://people.csail.mit.edu/costis/6896sp11/lec5s.pdf for
# another analysis (where the keys are generated one bit at a time).
exponent = 3 # see tjablin@'s analysis for explanation of this parameter
uint32max = jnp.iinfo(np.uint32).max
num_rounds = int(np.ceil(exponent * np.log(max(1, x.size)) / np.log(uint32max)))
for _ in range(num_rounds):
key, subkey = split(key)
sort_keys = _random_bits(subkey, 32, x.shape)
_, x = lax.sort_key_val(sort_keys, x, axis)
return x
def choice(key: jnp.ndarray,
a: IntegerArray,
shape: Sequence[int] = (),
replace: bool = True,
p=None) -> jnp.ndarray:
"""Generates a random sample from a given 1-D array.
Args:
key: a PRNGKey used as the random key.
a : 1D array or int. If an ndarray, a random sample is generated from
its elements. If an int, the random sample is generated as if a were
arange(a).
shape : tuple of ints, optional. Output shape. If the given shape is,
e.g., ``(m, n)``, then ``m * n`` samples are drawn. Default is (),
in which case a single value is returned.
replace : boolean. Whether the sample is with or without replacement.
default is True.
p : 1-D array-like, The probabilities associated with each entry in a.
If not given the sample assumes a uniform distribution over all
entries in a.
Returns:
An array of shape `shape` containing samples from `a`.
"""
if not isinstance(shape, Sequence):
raise TypeError("shape argument of jax.random.choice must be a sequence, "
f"got {shape}")
if np.ndim(a) not in [0, 1]:
raise ValueError("a must be an integer or 1-dimensional")
_check_arraylike("choice", a)
if np.ndim(a) == 0:
a = core.concrete_or_error(int, a, "The error occurred in jax.random.choice()")
else:
a = jnp.asarray(a)
n_inputs = int(a) if np.ndim(a) == 0 else len(a) # type: ignore[arg-type]
n_draws = prod(shape)
if n_draws == 0:
return jnp.zeros(shape, dtype=lax.dtype(a))
if n_inputs <= 0:
raise ValueError("a must be greater than 0 unless no samples are taken")
if not replace and n_draws > n_inputs:
raise ValueError("Cannot take a larger sample than population when 'replace=False'")
if p is None:
if replace:
ind = randint(key, shape, 0, n_inputs)
result = ind if np.ndim(a) == 0 else a[ind] # type: ignore[index]
else:
result = permutation(key, a)[:n_draws]
else:
if p.shape != (n_inputs,):
raise ValueError("p must be None or match the shape of a")
if replace:
p_cuml = jnp.cumsum(p)
r = p_cuml[-1] * (1 - uniform(key, shape))
ind = jnp.searchsorted(p_cuml, r)
result = ind if np.ndim(a) == 0 else a[ind] # type: ignore[index]
else:
# Gumbel top-k trick: https://timvieira.github.io/blog/post/2019/09/16/algorithms-for-sampling-without-replacement/
g = -gumbel(key, (n_inputs,)) - jnp.log(p)
ind = jnp.argsort(g)[:n_draws]
result = ind if np.ndim(a) == 0 else a[ind] # type: ignore[index]
return result.reshape(shape)
def normal(key: jnp.ndarray,
shape: Union[Sequence[int], NamedShape] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample standard normal random values with given shape and float dtype.
Args:
key: a PRNGKey used as the random key.
shape: optional, a tuple of nonnegative integers representing the result
shape. Default ().
dtype: optional, a float dtype for the returned values (default float64 if
jax_enable_x64 is true, otherwise float32).
Returns:
A random array with the specified shape and dtype.
"""
if not dtypes.issubdtype(dtype, np.inexact):
raise ValueError(f"dtype argument to `normal` must be a float or complex dtype, "
f"got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.as_named_shape(shape)
return _normal(key, shape, dtype) # type: ignore
@partial(jit, static_argnums=(1, 2))
def _normal(key, shape, dtype) -> jnp.ndarray:
if dtypes.issubdtype(dtype, np.complexfloating):
sqrt2 = np.array(np.sqrt(2), dtype)
key_re, key_im = split(key)
real_dtype = np.array(0, dtype).real.dtype
_re = _normal_real(key_re, shape, real_dtype)
_im = _normal_real(key_im, shape, real_dtype)
return (_re + 1j * _im) / sqrt2
else:
return _normal_real(key, shape, dtype) # type: ignore
@partial(jit, static_argnums=(1, 2))
def _normal_real(key, shape, dtype) -> jnp.ndarray:
_check_shape("normal", shape)
lo = np.nextafter(np.array(-1., dtype), np.array(0., dtype), dtype=dtype)
hi = np.array(1., dtype)
u = uniform(key, shape, dtype, lo, hi) # type: ignore[arg-type]
return np.array(np.sqrt(2), dtype) * lax.erf_inv(u)
def multivariate_normal(key: jnp.ndarray,
mean: RealArray,
cov: RealArray,
shape: Optional[Sequence[int]] = None,
dtype: DTypeLikeFloat = dtypes.float_,
method: str = 'cholesky') -> jnp.ndarray:
"""Sample multivariate normal random values with given mean and covariance.
Args:
key: a PRNGKey used as the random key.
mean: a mean vector of shape ``(..., n)``.
cov: a positive definite covariance matrix of shape ``(..., n, n)``. The
batch shape ``...`` must be broadcast-compatible with that of ``mean``.
shape: optional, a tuple of nonnegative integers specifying the result
batch shape; that is, the prefix of the result shape excluding the last
axis. Must be broadcast-compatible with ``mean.shape[:-1]`` and
``cov.shape[:-2]``. The default (None) produces a result batch shape by
broadcasting together the batch shapes of ``mean`` and ``cov``.
dtype: optional, a float dtype for the returned values (default float64 if
jax_enable_x64 is true, otherwise float32).
method: optional, a method to compute the factor of ``cov``.
Must be one of 'svd', eigh, and 'cholesky'. Default 'cholesky'.
Returns:
A random array with the specified dtype and shape given by
``shape + mean.shape[-1:]`` if ``shape`` is not None, or else
``broadcast_shapes(mean.shape[:-1], cov.shape[:-2]) + mean.shape[-1:]``.
"""
if method not in {'svd', 'eigh', 'cholesky'}:
raise ValueError("method must be one of {'svd', 'eigh', 'cholesky'}")
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `multivariate_normal` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
if shape is not None:
shape = core.canonicalize_shape(shape)
return _multivariate_normal(key, mean, cov, shape, dtype, method) # type: ignore
@partial(jit, static_argnums=(3, 4, 5))
def _multivariate_normal(key, mean, cov, shape, dtype, method) -> jnp.ndarray:
if not np.ndim(mean) >= 1:
msg = "multivariate_normal requires mean.ndim >= 1, got mean.ndim == {}"
raise ValueError(msg.format(np.ndim(mean)))
if not np.ndim(cov) >= 2:
msg = "multivariate_normal requires cov.ndim >= 2, got cov.ndim == {}"
raise ValueError(msg.format(np.ndim(cov)))
n = mean.shape[-1]
if np.shape(cov)[-2:] != (n, n):
msg = ("multivariate_normal requires cov.shape == (..., n, n) for n={n}, "
"but got cov.shape == {shape}.")
raise ValueError(msg.format(n=n, shape=np.shape(cov)))
if shape is None:
shape = lax.broadcast_shapes(mean.shape[:-1], cov.shape[:-2])
else:
_check_shape("normal", shape, mean.shape[:-1], cov.shape[:-2])
if method == 'svd':
(u, s, _) = svd(cov)
factor = u * jnp.sqrt(s)
elif method == 'eigh':
(w, v) = eigh(cov)
factor = v * jnp.sqrt(w)
else: # 'cholesky'
factor = cholesky(cov)
normal_samples = normal(key, shape + mean.shape[-1:], dtype)
return mean + jnp.einsum('...ij,...j->...i', factor, normal_samples)
def truncated_normal(key: jnp.ndarray,
lower: RealArray,
upper: RealArray,
shape: Optional[Union[Sequence[int], NamedShape]] = None,
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample truncated standard normal random values with given shape and dtype.
Args:
key: a PRNGKey used as the random key.
lower: a float or array of floats representing the lower bound for
truncation. Must be broadcast-compatible with ``upper``.
upper: a float or array of floats representing the upper bound for
truncation. Must be broadcast-compatible with ``lower``.
shape: optional, a tuple of nonnegative integers specifying the result
shape. Must be broadcast-compatible with ``lower`` and ``upper``. The
default (None) produces a result shape by broadcasting ``lower`` and
``upper``.
dtype: optional, a float dtype for the returned values (default float64 if
jax_enable_x64 is true, otherwise float32).
Returns:
A random array with the specified dtype and shape given by ``shape`` if
``shape`` is not None, or else by broadcasting ``lower`` and ``upper``.
Returns values in the open interval ``(lower, upper)``.
"""
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `truncated_normal` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
if shape is not None:
shape = core.as_named_shape(shape)
return _truncated_normal(key, lower, upper, shape, dtype) # type: ignore
@partial(jit, static_argnums=(3, 4))
def _truncated_normal(key, lower, upper, shape, dtype) -> jnp.ndarray:
if shape is None:
shape = lax.broadcast_shapes(np.shape(lower), np.shape(upper))
else:
_check_shape("truncated_normal", shape, np.shape(lower), np.shape(upper))
sqrt2 = np.array(np.sqrt(2), dtype)
lower = lax.convert_element_type(lower, dtype)
upper = lax.convert_element_type(upper, dtype)
a = lax.erf(lower / sqrt2)
b = lax.erf(upper / sqrt2)
if not jnp.issubdtype(dtype, np.floating):
raise TypeError("truncated_normal only accepts floating point dtypes.")
u = uniform(key, shape, dtype, minval=a, maxval=b)
out = sqrt2 * lax.erf_inv(u)
# Clamp the value to the open interval (lower, upper) to make sure that
# rounding (or if we chose `a` for `u`) doesn't push us outside of the range.
return jnp.clip(
out,
lax.nextafter(lax.stop_gradient(lower), np.array(np.inf, dtype=dtype)),
lax.nextafter(lax.stop_gradient(upper), np.array(-np.inf, dtype=dtype)))
def bernoulli(key: jnp.ndarray,
p: RealArray = np.float32(0.5),
shape: Optional[Union[Sequence[int], NamedShape]] = None) -> jnp.ndarray:
"""Sample Bernoulli random values with given shape and mean.
Args:
key: a PRNGKey used as the random key.
p: optional, a float or array of floats for the mean of the random
variables. Must be broadcast-compatible with ``shape``. Default 0.5.
shape: optional, a tuple of nonnegative integers representing the result
shape. Must be broadcast-compatible with ``p.shape``. The default (None)
produces a result shape equal to ``p.shape``.
Returns:
A random array with boolean dtype and shape given by ``shape`` if ``shape``
is not None, or else ``p.shape``.
"""
dtype = dtypes.canonicalize_dtype(lax.dtype(p))
if shape is not None:
shape = core.as_named_shape(shape)
if not jnp.issubdtype(dtype, np.floating):
msg = "bernoulli probability `p` must have a floating dtype, got {}."
raise TypeError(msg.format(dtype))
p = lax.convert_element_type(p, dtype)
return _bernoulli(key, p, shape) # type: ignore
@partial(jit, static_argnums=(2,))
def _bernoulli(key, p, shape) -> jnp.ndarray:
if shape is None:
# TODO: Use the named part of `p` as well
shape = np.shape(p)
else:
_check_shape("bernoulli", shape, np.shape(p))
return uniform(key, shape, lax.dtype(p)) < p
def beta(key: jnp.ndarray,
a: RealArray,
b: RealArray,
shape: Optional[Sequence[int]] = None,
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample Beta random values with given shape and float dtype.
Args:
key: a PRNGKey used as the random key.
a: a float or array of floats broadcast-compatible with ``shape``
representing the first parameter "alpha".
b: a float or array of floats broadcast-compatible with ``shape``
representing the second parameter "beta".
shape: optional, a tuple of nonnegative integers specifying the result
shape. Must be broadcast-compatible with ``a`` and ``b``. The default
(None) produces a result shape by broadcasting ``a`` and ``b``.
dtype: optional, a float dtype for the returned values (default float64 if
jax_enable_x64 is true, otherwise float32).
Returns:
A random array with the specified dtype and shape given by ``shape`` if
``shape`` is not None, or else by broadcasting ``a`` and ``b``.
"""
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `beta` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
if shape is not None:
shape = core.canonicalize_shape(shape)
return _beta(key, a, b, shape, dtype)
def _beta(key, a, b, shape, dtype):
if shape is None:
shape = lax.broadcast_shapes(np.shape(a), np.shape(b))
else:
_check_shape("beta", shape, np.shape(a), np.shape(b))
a = lax.convert_element_type(a, dtype)
b = lax.convert_element_type(b, dtype)
key_a, key_b = split(key)
a = jnp.broadcast_to(a, shape)
b = jnp.broadcast_to(b, shape)
gamma_a = gamma(key_a, a, shape, dtype)
gamma_b = gamma(key_b, b, shape, dtype)
return gamma_a / (gamma_a + gamma_b)
def cauchy(key: jnp.ndarray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample Cauchy random values with given shape and float dtype.
Args:
key: a PRNGKey used as the random key.
shape: optional, a tuple of nonnegative integers representing the result
shape. Default ().
dtype: optional, a float dtype for the returned values (default float64 if
jax_enable_x64 is true, otherwise float32).
Returns:
A random array with the specified shape and dtype.
"""
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `cauchy` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _cauchy(key, shape, dtype)
@partial(jit, static_argnums=(1, 2))
def _cauchy(key, shape, dtype):
_check_shape("cauchy", shape)
u = uniform(key, shape, dtype, minval=jnp.finfo(dtype).eps, maxval=1.)
pi = _constant_like(u, np.pi)
return lax.tan(lax.mul(pi, lax.sub(u, _constant_like(u, 0.5))))
def dirichlet(key: jnp.ndarray,
alpha: RealArray,
shape: Optional[Sequence[int]] = None,
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample Dirichlet random values with given shape and float dtype.
Args:
key: a PRNGKey used as the random key.
alpha: an array of shape ``(..., n)`` used as the concentration
parameter of the random variables.
shape: optional, a tuple of nonnegative integers specifying the result
batch shape; that is, the prefix of the result shape excluding the last
element of value ``n``. Must be broadcast-compatible with
``alpha.shape[:-1]``. The default (None) produces a result shape equal to
``alpha.shape``.
dtype: optional, a float dtype for the returned values (default float64 if
jax_enable_x64 is true, otherwise float32).
Returns:
A random array with the specified dtype and shape given by
``shape + (alpha.shape[-1],)`` if ``shape`` is not None, or else
``alpha.shape``.
"""
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `dirichlet` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
if shape is not None:
shape = core.canonicalize_shape(shape)
return _dirichlet(key, alpha, shape, dtype)
@partial(jit, static_argnums=(2, 3))
def _dirichlet(key, alpha, shape, dtype):
if not np.ndim(alpha) >= 1:
msg = "dirichlet requires alpha.ndim >= 1, got alpha.ndim == {}"
raise ValueError(msg.format(np.ndim(alpha)))
if shape is None:
shape = np.shape(alpha)[:-1]
else:
_check_shape("dirichlet", shape, np.shape(alpha)[:-1])
alpha = lax.convert_element_type(alpha, dtype)
gamma_samples = gamma(key, alpha, shape + np.shape(alpha)[-1:], dtype)
return gamma_samples / jnp.sum(gamma_samples, axis=-1, keepdims=True)
def exponential(key: jnp.ndarray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample Exponential random values with given shape and float dtype.
Args:
key: a PRNGKey used as the random key.
shape: optional, a tuple of nonnegative integers representing the result
shape. Default ().
dtype: optional, a float dtype for the returned values (default float64 if
jax_enable_x64 is true, otherwise float32).
Returns:
A random array with the specified shape and dtype.
"""
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `exponential` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _exponential(key, shape, dtype)
@partial(jit, static_argnums=(1, 2))
def _exponential(key, shape, dtype):
_check_shape("exponential", shape)
u = uniform(key, shape, dtype)
# taking 1 - u to move the domain of log to (0, 1] instead of [0, 1)
return lax.neg(lax.log1p(lax.neg(u)))
def _gamma_one(key, alpha):
# Ref: A simple method for generating gamma variables, George Marsaglia and Wai Wan Tsang
# The algorithm can also be founded in:
# https://en.wikipedia.org/wiki/Gamma_distribution#Generating_gamma-distributed_random_variables
zero = _constant_like(alpha, 0)
one = _constant_like(alpha, 1)
minus_one = _constant_like(alpha, -1)
one_over_two = _constant_like(alpha, 0.5)
one_over_three = _constant_like(alpha, 1. / 3.)
squeeze_const = _constant_like(alpha, 0.0331)
dtype = lax.dtype(alpha)
key, subkey = split(key)
# for alpha < 1, we boost alpha to alpha + 1 and get a sample according to
# Gamma(alpha) ~ Gamma(alpha+1) * Uniform()^(1 / alpha)
boost = lax.select(lax.ge(alpha, one),
one,
lax.pow(uniform(subkey, (), dtype=dtype), lax.div(one, alpha)))
alpha = lax.select(lax.ge(alpha, one), alpha, lax.add(alpha, one))
d = lax.sub(alpha, one_over_three)
c = lax.div(one_over_three, lax.sqrt(d))
def _cond_fn(kXVU):
_, X, V, U = kXVU
# TODO: use lax.cond when its batching rule is supported
# The reason is to avoid evaluating second condition which involves log+log
# if the first condition is satisfied
cond = lax.bitwise_and(lax.ge(U, lax.sub(one, lax.mul(squeeze_const, lax.mul(X, X)))),
lax.ge(lax.log(U), lax.add(lax.mul(X, one_over_two),
lax.mul(d, lax.add(lax.sub(one, V),
lax.log(V))))))
return cond
def _body_fn(kXVU):
def _next_kxv(kxv):
key = kxv[0]
key, subkey = split(key)
x = normal(subkey, (), dtype=dtype)
v = lax.add(one, lax.mul(x, c))
return key, x, v
key = kXVU[0]
key, x_key, U_key = split(key, 3)
_, x, v = lax.while_loop(lambda kxv: lax.le(kxv[2], zero), _next_kxv, (x_key, zero, minus_one))
X = lax.mul(x, x)
V = lax.mul(lax.mul(v, v), v)
U = uniform(U_key, (), dtype=dtype)
return key, X, V, U
# initial state is chosen such that _cond_fn will return True
_, _, V, _ = lax.while_loop(_cond_fn, _body_fn, (key, zero, one, _constant_like(alpha, 2)))
z = lax.mul(lax.mul(d, V), boost)
return lax.select(lax.eq(z, zero), jnp.finfo(z.dtype).tiny, z)
def _gamma_grad(sample, a):
samples = jnp.reshape(sample, -1)
alphas = jnp.reshape(a, -1)
if xla_bridge.get_backend().platform == 'cpu':
grads = lax.map(lambda args: lax.random_gamma_grad(*args), (alphas, samples))
else:
grads = vmap(lax.random_gamma_grad)(alphas, samples)
return grads.reshape(np.shape(a))
def _gamma_impl(key, a, use_vmap=False):
a_shape = jnp.shape(a)
# split key to match the shape of a
key_ndim = jnp.ndim(key) - 1
key = jnp.reshape(key, (-1, 2))
key = vmap(split, in_axes=(0, None))(key, prod(a_shape[key_ndim:]))
keys = jnp.reshape(key, (-1, 2))
alphas = jnp.reshape(a, -1)
if use_vmap:
samples = vmap(_gamma_one)(keys, alphas)
else:
samples = lax.map(lambda args: _gamma_one(*args), (keys, alphas))
return jnp.reshape(samples, a_shape)
def _gamma_batching_rule(batched_args, batch_dims):
k, a = batched_args
bk, ba = batch_dims
size = next(t.shape[i] for t, i in zip(batched_args, batch_dims) if i is not None)
k = batching.bdim_at_front(k, bk, size)
a = batching.bdim_at_front(a, ba, size)
return random_gamma_p.bind(k, a), 0
random_gamma_p = core.Primitive('random_gamma')
random_gamma_p.def_impl(_gamma_impl)
random_gamma_p.def_abstract_eval(lambda key, a: core.raise_to_shaped(a))
ad.defjvp2(random_gamma_p, None, lambda tangent, ans, key, a: tangent * _gamma_grad(ans, a))
xla.translations_with_avals[random_gamma_p] = xla.lower_fun(
partial(_gamma_impl, use_vmap=True),
multiple_results=False, with_avals=True)
xla.backend_specific_translations['cpu'][random_gamma_p] = xla.lower_fun(
partial(_gamma_impl, use_vmap=False),
multiple_results=False)
batching.primitive_batchers[random_gamma_p] = _gamma_batching_rule
def gamma(key: jnp.ndarray,
a: RealArray,
shape: Optional[Sequence[int]] = None,
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample Gamma random values with given shape and float dtype.
Args:
key: a PRNGKey used as the random key.
a: a float or array of floats broadcast-compatible with ``shape``
representing the parameter of the distribution.
shape: optional, a tuple of nonnegative integers specifying the result
shape. Must be broadcast-compatible with ``a``. The default (None)
produces a result shape equal to ``a.shape``.
dtype: optional, a float dtype for the returned values (default float64 if
jax_enable_x64 is true, otherwise float32).
Returns:
A random array with the specified dtype and with shape given by ``shape`` if
``shape`` is not None, or else by ``a.shape``.
"""
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `gamma` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
if shape is not None:
shape = core.canonicalize_shape(shape)
return _gamma(key, a, shape, dtype)
@partial(jit, static_argnums=(2, 3))
def _gamma(key, a, shape, dtype):
if shape is None:
shape = np.shape(a)
else:
_check_shape("gamma", shape, np.shape(a))
a = lax.convert_element_type(a, dtype)
if np.shape(a) != shape:
a = jnp.broadcast_to(a, shape)
return random_gamma_p.bind(key, a)
@partial(jit, static_argnums=(2, 3, 4))
def _poisson_knuth(key, lam, shape, dtype, max_iters):
# Knuth's algorithm for generating Poisson random variates.
# Reference:
# https://en.wikipedia.org/wiki/Poisson_distribution#Generating_Poisson-distributed_random_variables
def body_fn(carry):
i, k, rng, log_prod = carry
rng, subkey = split(rng)
k = lax.select(log_prod > -lam, k + 1, k)
u = uniform(subkey, shape, np.float32)
return i + 1, k, rng, log_prod + jnp.log(u)
def cond_fn(carry):
i, log_prod = carry[0], carry[3]
return (log_prod > -lam).any() & (i < max_iters)
k_init = lax.full_like(lam, 0, dtype, shape)
log_rate_init = lax.full_like(lam, 0, np.float32, shape)
k = lax.while_loop(cond_fn, body_fn, (0, k_init, key, log_rate_init))[1]
return (k - 1).astype(dtype)
@partial(jit, static_argnums=(2, 3, 4))
def _poisson_rejection(key, lam, shape, dtype, max_iters):
# Transformed rejection due to Hormann.
# Reference:
# http://citeseer.ist.psu.edu/viewdoc/citations;jsessionid=1BEB35946CC807879F55D42512E5490C?doi=10.1.1.48.3054.
log_lam = lax.log(lam)
b = 0.931 + 2.53 * lax.sqrt(lam)
a = -0.059 + 0.02483 * b
inv_alpha = 1.1239 + 1.1328 / (b - 3.4)
v_r = 0.9277 - 3.6224 / (b - 2)
def body_fn(carry):
i, k_out, accepted, key = carry
key, subkey_0, subkey_1 = split(key, 3)
u = uniform(subkey_0, shape, lam.dtype) - 0.5
v = uniform(subkey_1, shape, lam.dtype)
u_shifted = 0.5 - abs(u)
k = lax.floor((2 * a / u_shifted + b) * u + lam + 0.43)
s = lax.log(v * inv_alpha / (a / (u_shifted * u_shifted) + b))
t = -lam + k * log_lam - lax.lgamma(k + 1)
accept1 = (u_shifted >= 0.07) & (v <= v_r)
reject = (k < 0) | ((u_shifted < 0.013) & (v > u_shifted))
accept2 = s <= t
accept = accept1 | (~reject & accept2)
k_out = lax.select(accept, k, k_out)
accepted |= accept
return i + 1, k_out, accepted, key
def cond_fn(carry):
i, k_out, accepted, key = carry
return (~accepted).any() & (i < max_iters)
k_init = lax.full_like(lam, -1, lam.dtype, shape)
accepted = lax.full_like(lam, False, jnp.bool_, shape)
k = lax.while_loop(cond_fn, body_fn, (0, k_init, accepted, key))[1]
return k.astype(dtype)
@partial(jit, static_argnums=(2, 3))
def _poisson(key, lam, shape, dtype):
# The implementation matches TensorFlow and NumPy:
# https://github.com/tensorflow/tensorflow/blob/v2.2.0-rc3/tensorflow/core/kernels/random_poisson_op.cc
# https://github.com/numpy/numpy/blob/v1.18.3/numpy/random/src/distributions/distributions.c#L574
# For lambda < 10, we use the Knuth algorithm; otherwise, we use transformed
# rejection sampling.
use_knuth = lam < 10
lam_knuth = lax.select(use_knuth, lam, lax.full_like(lam, 0.0))
# The acceptance probability for rejection sampling maxes out at 89% as
# λ -> ∞, so pick some arbitrary large value.
lam_rejection = lax.select(use_knuth, lax.full_like(lam, 1e5), lam)
max_iters = dtype.type(jnp.iinfo(dtype).max) # insanely conservative
result = lax.select(
use_knuth,
_poisson_knuth(key, lam_knuth, shape, dtype, max_iters),
_poisson_rejection(key, lam_rejection, shape, dtype, max_iters),
)
return lax.select(lam == 0, jnp.zeros_like(result), result)
def poisson(key: jnp.ndarray,
lam: RealArray,
shape: Sequence[int] = (),
dtype: DTypeLikeInt = dtypes.int_) -> jnp.ndarray:
"""Sample Poisson random values with given shape and integer dtype.
Args:
key: a PRNGKey used as the random key.
lam: rate parameter (mean of the distribution), must be >= 0.
shape: optional, a tuple of nonnegative integers representing the result
shape. Default ().
dtype: optional, a integer dtype for the returned values (default int64 if
jax_enable_x64 is true, otherwise int32).
Returns:
A random array with the specified shape and dtype.
"""
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
if np.shape(lam) != shape:
lam = jnp.broadcast_to(lam, shape)
lam = lax.convert_element_type(lam, np.float32)
return _poisson(key, lam, shape, dtype)
def gumbel(key: jnp.ndarray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample Gumbel random values with given shape and float dtype.
Args:
key: a PRNGKey used as the random key.
shape: optional, a tuple of nonnegative integers representing the result
shape. Default ().
dtype: optional, a float dtype for the returned values (default float64 if
jax_enable_x64 is true, otherwise float32).
Returns:
A random array with the specified shape and dtype.
"""
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `gumbel` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _gumbel(key, shape, dtype)
@partial(jit, static_argnums=(1, 2))
def _gumbel(key, shape, dtype):
_check_shape("gumbel", shape)
return -jnp.log(-jnp.log(
uniform(key, shape, dtype, minval=jnp.finfo(dtype).tiny, maxval=1.)))
def categorical(key: jnp.ndarray,
logits: RealArray,
axis: int = -1,
shape: Optional[Sequence[int]] = None) -> jnp.ndarray:
"""Sample random values from categorical distributions.
Args:
key: a PRNGKey used as the random key.
logits: Unnormalized log probabilities of the categorical distribution(s) to sample from,
so that `softmax(logits, axis)` gives the corresponding probabilities.
axis: Axis along which logits belong to the same categorical distribution.
shape: Optional, a tuple of nonnegative integers representing the result shape.
Must be broadcast-compatible with ``np.delete(logits.shape, axis)``.
The default (None) produces a result shape equal to ``np.delete(logits.shape, axis)``.
Returns:
A random array with int dtype and shape given by ``shape`` if ``shape``
is not None, or else ``np.delete(logits.shape, axis)``.
"""
if axis >= 0:
axis -= len(logits.shape)
batch_shape = tuple(np.delete(logits.shape, axis))
if shape is None:
shape = batch_shape
else:
shape = tuple(shape)
_check_shape("categorical", shape, batch_shape)
sample_shape = shape[:len(shape)-len(batch_shape)]
return jnp.argmax(
gumbel(key, sample_shape + logits.shape, logits.dtype) +
lax.expand_dims(logits, tuple(range(len(sample_shape)))),
axis=axis)
def laplace(key: jnp.ndarray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample Laplace random values with given shape and float dtype.
Args:
key: a PRNGKey used as the random key.
shape: optional, a tuple of nonnegative integers representing the result
shape. Default ().
dtype: optional, a float dtype for the returned values (default float64 if
jax_enable_x64 is true, otherwise float32).
Returns:
A random array with the specified shape and dtype.
"""
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `laplace` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _laplace(key, shape, dtype)
@partial(jit, static_argnums=(1, 2))
def _laplace(key, shape, dtype):
_check_shape("laplace", shape)
u = uniform(
key, shape, dtype, minval=-1. + jnp.finfo(dtype).epsneg, maxval=1.)
return lax.mul(lax.sign(u), lax.log1p(lax.neg(lax.abs(u))))
def logistic(key: jnp.ndarray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample logistic random values with given shape and float dtype.
Args:
key: a PRNGKey used as the random key.
shape: optional, a tuple of nonnegative integers representing the result
shape. Default ().
dtype: optional, a float dtype for the returned values (default float64 if
jax_enable_x64 is true, otherwise float32).
Returns:
A random array with the specified shape and dtype.
"""
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `logistic` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _logistic(key, shape, dtype)
@partial(jit, static_argnums=(1, 2))
def _logistic(key, shape, dtype):
_check_shape("logistic", shape)
x = uniform(key, shape, dtype, minval=jnp.finfo(dtype).eps, maxval=1.)
return lax.log(lax.div(x, lax.sub(lax._const(x, 1), x)))
def pareto(key: jnp.ndarray,
b: RealArray,
shape: Optional[Sequence[int]] = None,
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample Pareto random values with given shape and float dtype.
Args:
key: a PRNGKey used as the random key.
b: a float or array of floats broadcast-compatible with ``shape``
representing the parameter of the distribution.
shape: optional, a tuple of nonnegative integers specifying the result
shape. Must be broadcast-compatible with ``b``. The default (None)
produces a result shape equal to ``b.shape``.
dtype: optional, a float dtype for the returned values (default float64 if
jax_enable_x64 is true, otherwise float32).
Returns:
A random array with the specified dtype and with shape given by ``shape`` if
``shape`` is not None, or else by ``b.shape``.
"""
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `pareto` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
if shape is not None:
shape = core.canonicalize_shape(shape)
return _pareto(key, b, shape, dtype)
@partial(jit, static_argnums=(2, 3))
def _pareto(key, b, shape, dtype):
if shape is None:
shape = np.shape(b)
else:
_check_shape("pareto", shape)
b = lax.convert_element_type(b, dtype)
e = exponential(key, shape, dtype)
return lax.exp(e / b)
def t(key: jnp.ndarray,
df: RealArray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample Student's t random values with given shape and float dtype.
Args:
key: a PRNGKey used as the random key.
df: a float or array of floats broadcast-compatible with ``shape``
representing the parameter of the distribution.
shape: optional, a tuple of nonnegative integers specifying the result
shape. Must be broadcast-compatible with ``df``. The default (None)
produces a result shape equal to ``df.shape``.
dtype: optional, a float dtype for the returned values (default float64 if
jax_enable_x64 is true, otherwise float32).
Returns:
A random array with the specified dtype and with shape given by ``shape`` if
``shape`` is not None, or else by ``df.shape``.
"""
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `t` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _t(key, df, shape, dtype)
@partial(jit, static_argnums=(2, 3))
def _t(key, df, shape, dtype):
if shape is None:
shape = np.shape(df)
else:
_check_shape("t", shape, np.shape(df))
df = lax.convert_element_type(df, dtype)
key_n, key_g = split(key)
n = normal(key_n, shape, dtype)
two = _constant_like(n, 2)
half_df = lax.div(df, two)
g = gamma(key_n, half_df, shape, dtype)
return n * jnp.sqrt(half_df / g)
def rademacher(key: jnp.ndarray,
shape: Sequence[int],
dtype: DTypeLikeInt = dtypes.int_) -> jnp.ndarray:
"""Sample from a Rademacher distribution.
Args:
key: a PRNGKey key.
shape: The shape of the returned samples.
dtype: The type used for samples.
Returns:
A jnp.array of samples, of shape `shape`. Each element in the output has
a 50% change of being 1 or -1.
"""
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _rademacher(key, shape, dtype)
@partial(jit, static_argnums=(1, 2))
def _rademacher(key, shape, dtype):
bernoulli_samples = bernoulli(key=key, p=0.5, shape=shape)
return (2 * bernoulli_samples - 1).astype(dtype)
def maxwell(key: jnp.ndarray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample from a one sided Maxwell distribution.
The scipy counterpart is `scipy.stats.maxwell`.
Args:
key: a PRNGKey key.
shape: The shape of the returned samples.
dtype: The type used for samples.
Returns:
A jnp.array of samples, of shape `shape`.
"""
# Generate samples using:
# sqrt(X^2 + Y^2 + Z^2), X,Y,Z ~N(0,1)
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `maxwell` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _maxwell(key, shape, dtype)
@partial(jit, static_argnums=(1, 2))
def _maxwell(key, shape, dtype):
shape = shape + (3,)
norm_rvs = normal(key=key, shape=shape, dtype=dtype)
return jnp.linalg.norm(norm_rvs, axis=-1)
def double_sided_maxwell(key: jnp.ndarray,
loc: RealArray,
scale: RealArray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample from a double sided Maxwell distribution.
Samples using:
loc + scale* sgn(U-0.5)* one_sided_maxwell U~Unif;
Args:
key: a PRNGKey key.
loc: The location parameter of the distribution.
scale: The scale parameter of the distribution.
shape: The shape added to the parameters loc and scale broadcastable shape.
dtype: The type used for samples.
Returns:
A jnp.array of samples.
"""
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `double_sided_maxwell` must be a float"
f" dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _double_sided_maxwell(key, loc, scale, shape, dtype)
@partial(jit, static_argnums=(3, 4))
def _double_sided_maxwell(key, loc, scale, shape, dtype):
params_shapes = lax.broadcast_shapes(np.shape(loc), np.shape(scale))
if not shape:
shape = params_shapes
shape = shape + params_shapes
maxwell_key, rademacher_key = split(key)
maxwell_rvs = maxwell(maxwell_key, shape=shape, dtype=dtype)
# Generate random signs for the symmetric variates.
random_sign = rademacher(rademacher_key, shape=shape, dtype=dtype)
assert random_sign.shape == maxwell_rvs.shape
return random_sign * maxwell_rvs * scale + loc
def weibull_min(key: jnp.ndarray,
scale: RealArray,
concentration: RealArray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
"""Sample from a Weibull distribution.
The scipy counterpart is `scipy.stats.weibull_min`.
Args:
key: a PRNGKey key.
scale: The scale parameter of the distribution.
concentration: The concentration parameter of the distribution.
shape: The shape added to the parameters loc and scale broadcastable shape.
dtype: The type used for samples.
Returns:
A jnp.array of samples.
"""
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `weibull_min` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _weibull_min(key, scale, concentration, shape, dtype)
@partial(jit, static_argnums=(3, 4))
def _weibull_min(key, scale, concentration, shape, dtype):
random_uniform = uniform(
key=key, shape=shape, minval=0, maxval=1, dtype=dtype)
# Inverse weibull CDF.
return jnp.power(-jnp.log1p(-random_uniform), 1.0/concentration) * scale
| 37.895363 | 121 | 0.675286 |
from functools import partial
from typing import Any, Optional, Sequence, Union
import warnings
import numpy as np
from jax import lax
from jax import core
from jax import numpy as jnp
from jax._src import dtypes
from jax.core import NamedShape
from jax._src.api import jit, vmap
from jax._src.numpy.lax_numpy import _constant_like, _convert_and_clip_integer, _check_arraylike
from jax.lib import xla_bridge
from jax.lib import xla_client
from jax.lib import cuda_prng
from jax.numpy.linalg import cholesky, svd, eigh
from jax.interpreters import ad
from jax.interpreters import batching
from jax.interpreters import xla
from jax._src.util import prod
Array = Any
RealArray = Array
IntegerArray = Array
DTypeLikeInt = Any
DTypeLikeFloat = Any
_UINT_DTYPES = {8: jnp.uint8, 16: jnp.uint16, 32: jnp.uint32, 64: jnp.uint64}
def PRNGKey(seed: int) -> jnp.ndarray:
if isinstance(seed, int):
seed_arr = jnp.asarray(np.int64(seed))
else:
seed_arr = jnp.asarray(seed)
if seed_arr.shape:
raise TypeError(f"PRNGKey seed must be a scalar; got {seed!r}.")
if not np.issubdtype(seed_arr.dtype, np.integer):
raise TypeError(f"PRNGKey seed must be an integer; got {seed!r}")
convert = lambda k: lax.reshape(lax.convert_element_type(k, np.uint32), [1])
k1 = convert(lax.shift_right_logical(seed_arr, lax._const(seed_arr, 32)))
k2 = convert(jnp.bitwise_and(seed_arr, np.uint32(0xFFFFFFFF)))
return lax.concatenate([k1, k2], 0)
def _is_prng_key(key: jnp.ndarray) -> bool:
try:
return key.shape == (2,) and key.dtype == np.uint32
except AttributeError:
return False
t(dtype):
if not jnp.issubdtype(dtype, np.integer):
raise TypeError("_rotate_left only accepts integer dtypes.")
nbits = np.array(jnp.iinfo(dtype).bits, dtype)
def _rotate_left(x, d):
if lax.dtype(d) != dtype:
d = lax.convert_element_type(d, dtype)
if lax.dtype(x) != dtype:
x = lax.convert_element_type(x, dtype)
return lax.shift_left(x, d) | lax.shift_right_logical(x, nbits - d)
return _rotate_left
def _bit_stats(bits):
return np.array([list(map(int, np.binary_repr(x, 64))) for x in bits]).mean(0)
a.dtype != jnp.uint32 for a in args):
raise TypeError("Arguments to threefry2x32 must have uint32 type, got {}"
.format(args))
if all(isinstance(arg, core.ShapedArray) for arg in args):
shape = lax._broadcasting_shape_rule(*args)
named_shape = core.join_named_shapes(*(a.named_shape for a in args))
aval = core.ShapedArray(shape, jnp.dtype(jnp.uint32), named_shape=named_shape)
else:
aval = core.UnshapedArray(jnp.dtype(jnp.uint32))
return (aval,) * 2
rotate_left = _make_rotate_left(np.uint32)
def apply_round(v, rot):
v = v[:]
v[0] = v[0] + v[1]
v[1] = rotate_left(v[1], rot)
v[1] = v[0] ^ v[1]
return v
def rotate_list(xs):
return xs[1:] + xs[:1]
def rolled_loop_step(i, state):
x, ks, rotations = state
for r in rotations[0]:
x = apply_round(x, r)
new_x = [x[0] + ks[0], x[1] + ks[1] + jnp.asarray(i + 1, dtype=np.uint32)]
return new_x, rotate_list(ks), rotate_list(rotations)
def _threefry2x32_lowering(key1, key2, x1, x2, use_rolled_loops=True):
x = [x1, x2]
rotations = [np.array([13, 15, 26, 6], dtype=np.uint32),
np.array([17, 29, 16, 24], dtype=np.uint32)]
ks = [key1, key2, key1 ^ key2 ^ np.uint32(0x1BD11BDA)]
x[0] = x[0] + ks[0]
x[1] = x[1] + ks[1]
if use_rolled_loops:
x, _, _ = lax.fori_loop(0, 5, rolled_loop_step, (x, rotate_list(ks), rotations))
else:
for r in rotations[0]:
x = apply_round(x, r)
x[0] = x[0] + ks[1]
x[1] = x[1] + ks[2] + np.uint32(1)
for r in rotations[1]:
x = apply_round(x, r)
x[0] = x[0] + ks[2]
x[1] = x[1] + ks[0] + np.uint32(2)
for r in rotations[0]:
x = apply_round(x, r)
x[0] = x[0] + ks[0]
x[1] = x[1] + ks[1] + np.uint32(3)
for r in rotations[1]:
x = apply_round(x, r)
x[0] = x[0] + ks[1]
x[1] = x[1] + ks[2] + np.uint32(4)
for r in rotations[0]:
x = apply_round(x, r)
x[0] = x[0] + ks[2]
x[1] = x[1] + ks[0] + np.uint32(5)
return tuple(x)
def _threefry2x32_gpu_translation_rule(c, k1, k2, x1, x2):
shape = lax.broadcast_shapes(
c.get_shape(k1).dimensions(), c.get_shape(k2).dimensions(),
c.get_shape(x1).dimensions(), c.get_shape(x2).dimensions())
rank = len(shape)
if 0 in shape:
zeros = xla_client.ops.Broadcast(
xla_bridge.constant(c, np.array(0, np.uint32)), shape)
return xla_client.ops.Tuple(c, [zeros, zeros])
def _broadcast(x):
ndims = c.get_shape(x).rank()
return xla_client.ops.BroadcastInDim(x, shape,
tuple(range(rank - ndims, rank)))
return cuda_prng.threefry2x32(
c, (_broadcast(k1), _broadcast(k2)), (_broadcast(x1), _broadcast(x2)))
threefry2x32_p = core.Primitive("threefry2x32")
threefry2x32_p.multiple_results = True
threefry2x32_p.def_impl(partial(xla.apply_primitive, threefry2x32_p))
threefry2x32_p.def_abstract_eval(_threefry2x32_abstract_eval)
batching.defbroadcasting(threefry2x32_p)
xla.translations_with_avals[threefry2x32_p] = xla.lower_fun(
partial(_threefry2x32_lowering, use_rolled_loops=False),
multiple_results=True, with_avals=True)
xla.backend_specific_translations['cpu'][threefry2x32_p] = xla.lower_fun(
partial(_threefry2x32_lowering, use_rolled_loops=True),
multiple_results=True)
if cuda_prng:
xla.backend_specific_translations['gpu'][threefry2x32_p] = \
_threefry2x32_gpu_translation_rule
@jit
def threefry_2x32(keypair, count):
key1, key2 = keypair
if not lax.dtype(key1) == lax.dtype(key2) == lax.dtype(count) == np.uint32:
msg = "threefry_2x32 requires uint32 arguments, got {}"
raise TypeError(msg.format([lax.dtype(x) for x in [key1, key2, count]]))
odd_size = count.size % 2
if odd_size:
x = list(jnp.split(jnp.concatenate([count.ravel(), np.uint32([0])]), 2))
else:
x = list(jnp.split(count.ravel(), 2))
x = threefry2x32_p.bind(key1, key2, x[0], x[1])
out = jnp.concatenate(x)
assert out.dtype == np.uint32
return lax.reshape(out[:-1] if odd_size else out, count.shape)
def split(key: jnp.ndarray, num: int = 2) -> jnp.ndarray:
return _split(key, int(num))
@partial(jit, static_argnums=(1,))
def _split(key, num) -> jnp.ndarray:
counts = lax.iota(np.uint32, num * 2)
return lax.reshape(threefry_2x32(key, counts), (num, 2))
def fold_in(key: jnp.ndarray, data: int) -> jnp.ndarray:
return _fold_in(key, jnp.uint32(data))
@jit
def _fold_in(key, data):
return threefry_2x32(key, PRNGKey(data))
@partial(jit, static_argnums=(1, 2))
def _random_bits(key, bit_width, shape):
if not _is_prng_key(key):
raise TypeError("_random_bits got invalid prng key.")
if bit_width not in (8, 16, 32, 64):
raise TypeError("requires 8-, 16-, 32- or 64-bit field width.")
shape = core.as_named_shape(shape)
for name, size in shape.named_items:
real_size = lax.psum(1, name)
if real_size != size:
raise ValueError(f"The shape of axis {name} was specified as {size}, "
f"but it really is {real_size}")
axis_index = lax.axis_index(name)
key = fold_in(key, axis_index)
size = prod(shape.positional)
max_count = int(np.ceil(bit_width * size / 32))
nblocks, rem = divmod(max_count, jnp.iinfo(np.uint32).max)
if not nblocks:
bits = threefry_2x32(key, lax.iota(np.uint32, rem))
else:
keys = split(key, nblocks + 1)
subkeys, last_key = keys[:-1], keys[-1]
blocks = vmap(threefry_2x32, in_axes=(0, None))(subkeys, lax.iota(np.uint32, jnp.iinfo(np.uint32).max))
last = threefry_2x32(last_key, lax.iota(np.uint32, rem))
bits = lax.concatenate([blocks.ravel(), last], 0)
dtype = _UINT_DTYPES[bit_width]
if bit_width == 64:
bits = [lax.convert_element_type(x, dtype) for x in jnp.split(bits, 2)]
bits = lax.shift_left(bits[0], dtype(32)) | bits[1]
elif bit_width in [8, 16]:
bits = lax.bitwise_and(
np.uint32(np.iinfo(dtype).max),
lax.shift_right_logical(
lax.broadcast(bits, (1,)),
lax.mul(
np.uint32(bit_width),
lax.broadcasted_iota(np.uint32, (32 // bit_width, 1), 0)
)
)
)
bits = lax.reshape(bits, (np.uint32(max_count * 32 // bit_width),), (1, 0))
bits = lax.convert_element_type(bits, dtype)[:size]
return lax.reshape(bits, shape)
ion[Sequence[int], NamedShape], *param_shapes):
shape = core.as_named_shape(shape)
if param_shapes:
shape_ = lax.broadcast_shapes(shape.positional, *param_shapes)
if shape.positional != shape_:
msg = ("{} parameter shapes must be broadcast-compatible with shape "
"argument, and the result of broadcasting the shapes must equal "
"the shape argument, but got result {} for shape argument {}.")
raise ValueError(msg.format(name, shape_, shape))
def uniform(key: jnp.ndarray,
shape: Union[Sequence[int], NamedShape] = (),
dtype: DTypeLikeFloat = dtypes.float_,
minval: RealArray = 0.,
maxval: RealArray = 1.) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `uniform` must be a float dtype, "
f"got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.as_named_shape(shape)
return _uniform(key, shape, dtype, minval, maxval)
@partial(jit, static_argnums=(1, 2))
def _uniform(key, shape, dtype, minval, maxval) -> jnp.ndarray:
_check_shape("uniform", shape)
if not jnp.issubdtype(dtype, np.floating):
raise TypeError("uniform only accepts floating point dtypes.")
minval = lax.convert_element_type(minval, dtype)
maxval = lax.convert_element_type(maxval, dtype)
minval = lax.broadcast_to_rank(minval, shape.positional_rank)
maxval = lax.broadcast_to_rank(maxval, shape.positional_rank)
finfo = jnp.finfo(dtype)
nbits, nmant = finfo.bits, finfo.nmant
if nbits not in (16, 32, 64):
raise TypeError("uniform only accepts 32- or 64-bit dtypes.")
bits = _random_bits(key, nbits, shape)
float_bits = lax.bitwise_or(
lax.shift_right_logical(bits, np.array(nbits - nmant, lax.dtype(bits))),
np.array(1., dtype).view(_UINT_DTYPES[nbits]))
floats = lax.bitcast_convert_type(float_bits, dtype) - np.array(1., dtype)
return lax.max(
minval,
lax.reshape(floats * (maxval - minval) + minval, shape.positional))
def randint(key: jnp.ndarray,
shape: Sequence[int],
minval: IntegerArray,
maxval: IntegerArray,
dtype: DTypeLikeInt = dtypes.int_):
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _randint(key, shape, minval, maxval, dtype)
@partial(jit, static_argnums=(1, 4))
def _randint(key, shape, minval, maxval, dtype):
_check_shape("randint", shape, np.shape(minval), np.shape(maxval))
if not jnp.issubdtype(dtype, np.integer):
raise TypeError(f"randint only accepts integer dtypes, got {dtype}")
_check_arraylike("randint", minval, maxval)
minval = jnp.asarray(minval)
maxval = jnp.asarray(maxval)
if not jnp.issubdtype(minval.dtype, np.integer):
minval = minval.astype(int)
if not jnp.issubdtype(maxval.dtype, np.integer):
maxval = maxval.astype(int)
maxval_out_of_range = lax.gt(
maxval, _convert_and_clip_integer(jnp.array(jnp.iinfo(dtype).max, dtype), maxval.dtype))
minval = _convert_and_clip_integer(minval, dtype)
maxval = _convert_and_clip_integer(maxval, dtype)
minval = lax.broadcast_to_rank(minval, len(shape))
maxval = lax.broadcast_to_rank(maxval, len(shape))
nbits = jnp.iinfo(dtype).bits
if nbits not in (8, 16, 32, 64):
raise TypeError(f"randint only accepts 8-, 16-, 32-, or 64-bit dtypes, got {dtype}")
k1, k2 = split(key)
rbits = lambda key: _random_bits(key, nbits, shape)
higher_bits, lower_bits = rbits(k1), rbits(k2)
unsigned_dtype = _UINT_DTYPES[nbits]
span = lax.convert_element_type(maxval - minval, unsigned_dtype)
span = lax.select(maxval <= minval, lax.full_like(span, 1), span)
span = lax.select(
maxval_out_of_range & (maxval > minval),
lax.add(span, lax._const(span, 1)),
span)
multiplier = lax.rem(lax._const(span, 2 ** (nbits // 2)), span)
multiplier = lax.rem(lax.mul(multiplier, multiplier), span)
random_offset = lax.add(lax.mul(lax.rem(higher_bits, span), multiplier),
lax.rem(lower_bits, span))
random_offset = lax.rem(random_offset, span)
return lax.add(minval, lax.convert_element_type(random_offset, dtype))
def shuffle(key: jnp.ndarray, x: Array, axis: int = 0) -> jnp.ndarray:
msg = ("jax.random.shuffle is deprecated and will be removed in a future release. "
"Use jax.random.permutation")
warnings.warn(msg, FutureWarning)
return _shuffle(key, x, axis)
def permutation(key: jnp.ndarray, x: Array) -> jnp.ndarray:
if not np.ndim(x):
if not np.issubdtype(lax.dtype(x), np.integer):
raise TypeError("x must be an integer or at least 1-dimensional")
x = int(x)
return _shuffle(key, jnp.arange(x), 0)
elif np.ndim(x) == 1:
return _shuffle(key, x, 0)
else:
assert isinstance(x, jnp.ndarray)
ind = _shuffle(key, jnp.arange(x.shape[0]), 0)
return x[ind]
@partial(jit, static_argnums=(2,))
def _shuffle(key, x, axis) -> jnp.ndarray:
exponent = 3
uint32max = jnp.iinfo(np.uint32).max
num_rounds = int(np.ceil(exponent * np.log(max(1, x.size)) / np.log(uint32max)))
for _ in range(num_rounds):
key, subkey = split(key)
sort_keys = _random_bits(subkey, 32, x.shape)
_, x = lax.sort_key_val(sort_keys, x, axis)
return x
def choice(key: jnp.ndarray,
a: IntegerArray,
shape: Sequence[int] = (),
replace: bool = True,
p=None) -> jnp.ndarray:
if not isinstance(shape, Sequence):
raise TypeError("shape argument of jax.random.choice must be a sequence, "
f"got {shape}")
if np.ndim(a) not in [0, 1]:
raise ValueError("a must be an integer or 1-dimensional")
_check_arraylike("choice", a)
if np.ndim(a) == 0:
a = core.concrete_or_error(int, a, "The error occurred in jax.random.choice()")
else:
a = jnp.asarray(a)
n_inputs = int(a) if np.ndim(a) == 0 else len(a) # type: ignore[arg-type]
n_draws = prod(shape)
if n_draws == 0:
return jnp.zeros(shape, dtype=lax.dtype(a))
if n_inputs <= 0:
raise ValueError("a must be greater than 0 unless no samples are taken")
if not replace and n_draws > n_inputs:
raise ValueError("Cannot take a larger sample than population when 'replace=False'")
if p is None:
if replace:
ind = randint(key, shape, 0, n_inputs)
result = ind if np.ndim(a) == 0 else a[ind] # type: ignore[index]
else:
result = permutation(key, a)[:n_draws]
else:
if p.shape != (n_inputs,):
raise ValueError("p must be None or match the shape of a")
if replace:
p_cuml = jnp.cumsum(p)
r = p_cuml[-1] * (1 - uniform(key, shape))
ind = jnp.searchsorted(p_cuml, r)
result = ind if np.ndim(a) == 0 else a[ind] # type: ignore[index]
else:
# Gumbel top-k trick: https://timvieira.github.io/blog/post/2019/09/16/algorithms-for-sampling-without-replacement/
g = -gumbel(key, (n_inputs,)) - jnp.log(p)
ind = jnp.argsort(g)[:n_draws]
result = ind if np.ndim(a) == 0 else a[ind] # type: ignore[index]
return result.reshape(shape)
def normal(key: jnp.ndarray,
shape: Union[Sequence[int], NamedShape] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.inexact):
raise ValueError(f"dtype argument to `normal` must be a float or complex dtype, "
f"got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.as_named_shape(shape)
return _normal(key, shape, dtype) # type: ignore
@partial(jit, static_argnums=(1, 2))
def _normal(key, shape, dtype) -> jnp.ndarray:
if dtypes.issubdtype(dtype, np.complexfloating):
sqrt2 = np.array(np.sqrt(2), dtype)
key_re, key_im = split(key)
real_dtype = np.array(0, dtype).real.dtype
_re = _normal_real(key_re, shape, real_dtype)
_im = _normal_real(key_im, shape, real_dtype)
return (_re + 1j * _im) / sqrt2
else:
return _normal_real(key, shape, dtype) # type: ignore
@partial(jit, static_argnums=(1, 2))
def _normal_real(key, shape, dtype) -> jnp.ndarray:
_check_shape("normal", shape)
lo = np.nextafter(np.array(-1., dtype), np.array(0., dtype), dtype=dtype)
hi = np.array(1., dtype)
u = uniform(key, shape, dtype, lo, hi) # type: ignore[arg-type]
return np.array(np.sqrt(2), dtype) * lax.erf_inv(u)
def multivariate_normal(key: jnp.ndarray,
mean: RealArray,
cov: RealArray,
shape: Optional[Sequence[int]] = None,
dtype: DTypeLikeFloat = dtypes.float_,
method: str = 'cholesky') -> jnp.ndarray:
if method not in {'svd', 'eigh', 'cholesky'}:
raise ValueError("method must be one of {'svd', 'eigh', 'cholesky'}")
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `multivariate_normal` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
if shape is not None:
shape = core.canonicalize_shape(shape)
return _multivariate_normal(key, mean, cov, shape, dtype, method) # type: ignore
@partial(jit, static_argnums=(3, 4, 5))
def _multivariate_normal(key, mean, cov, shape, dtype, method) -> jnp.ndarray:
if not np.ndim(mean) >= 1:
msg = "multivariate_normal requires mean.ndim >= 1, got mean.ndim == {}"
raise ValueError(msg.format(np.ndim(mean)))
if not np.ndim(cov) >= 2:
msg = "multivariate_normal requires cov.ndim >= 2, got cov.ndim == {}"
raise ValueError(msg.format(np.ndim(cov)))
n = mean.shape[-1]
if np.shape(cov)[-2:] != (n, n):
msg = ("multivariate_normal requires cov.shape == (..., n, n) for n={n}, "
"but got cov.shape == {shape}.")
raise ValueError(msg.format(n=n, shape=np.shape(cov)))
if shape is None:
shape = lax.broadcast_shapes(mean.shape[:-1], cov.shape[:-2])
else:
_check_shape("normal", shape, mean.shape[:-1], cov.shape[:-2])
if method == 'svd':
(u, s, _) = svd(cov)
factor = u * jnp.sqrt(s)
elif method == 'eigh':
(w, v) = eigh(cov)
factor = v * jnp.sqrt(w)
else: # 'cholesky'
factor = cholesky(cov)
normal_samples = normal(key, shape + mean.shape[-1:], dtype)
return mean + jnp.einsum('...ij,...j->...i', factor, normal_samples)
def truncated_normal(key: jnp.ndarray,
lower: RealArray,
upper: RealArray,
shape: Optional[Union[Sequence[int], NamedShape]] = None,
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `truncated_normal` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
if shape is not None:
shape = core.as_named_shape(shape)
return _truncated_normal(key, lower, upper, shape, dtype) # type: ignore
@partial(jit, static_argnums=(3, 4))
def _truncated_normal(key, lower, upper, shape, dtype) -> jnp.ndarray:
if shape is None:
shape = lax.broadcast_shapes(np.shape(lower), np.shape(upper))
else:
_check_shape("truncated_normal", shape, np.shape(lower), np.shape(upper))
sqrt2 = np.array(np.sqrt(2), dtype)
lower = lax.convert_element_type(lower, dtype)
upper = lax.convert_element_type(upper, dtype)
a = lax.erf(lower / sqrt2)
b = lax.erf(upper / sqrt2)
if not jnp.issubdtype(dtype, np.floating):
raise TypeError("truncated_normal only accepts floating point dtypes.")
u = uniform(key, shape, dtype, minval=a, maxval=b)
out = sqrt2 * lax.erf_inv(u)
# Clamp the value to the open interval (lower, upper) to make sure that
# rounding (or if we chose `a` for `u`) doesn't push us outside of the range.
return jnp.clip(
out,
lax.nextafter(lax.stop_gradient(lower), np.array(np.inf, dtype=dtype)),
lax.nextafter(lax.stop_gradient(upper), np.array(-np.inf, dtype=dtype)))
def bernoulli(key: jnp.ndarray,
p: RealArray = np.float32(0.5),
shape: Optional[Union[Sequence[int], NamedShape]] = None) -> jnp.ndarray:
dtype = dtypes.canonicalize_dtype(lax.dtype(p))
if shape is not None:
shape = core.as_named_shape(shape)
if not jnp.issubdtype(dtype, np.floating):
msg = "bernoulli probability `p` must have a floating dtype, got {}."
raise TypeError(msg.format(dtype))
p = lax.convert_element_type(p, dtype)
return _bernoulli(key, p, shape)
@partial(jit, static_argnums=(2,))
def _bernoulli(key, p, shape) -> jnp.ndarray:
if shape is None:
shape = np.shape(p)
else:
_check_shape("bernoulli", shape, np.shape(p))
return uniform(key, shape, lax.dtype(p)) < p
def beta(key: jnp.ndarray,
a: RealArray,
b: RealArray,
shape: Optional[Sequence[int]] = None,
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `beta` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
if shape is not None:
shape = core.canonicalize_shape(shape)
return _beta(key, a, b, shape, dtype)
def _beta(key, a, b, shape, dtype):
if shape is None:
shape = lax.broadcast_shapes(np.shape(a), np.shape(b))
else:
_check_shape("beta", shape, np.shape(a), np.shape(b))
a = lax.convert_element_type(a, dtype)
b = lax.convert_element_type(b, dtype)
key_a, key_b = split(key)
a = jnp.broadcast_to(a, shape)
b = jnp.broadcast_to(b, shape)
gamma_a = gamma(key_a, a, shape, dtype)
gamma_b = gamma(key_b, b, shape, dtype)
return gamma_a / (gamma_a + gamma_b)
def cauchy(key: jnp.ndarray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `cauchy` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _cauchy(key, shape, dtype)
@partial(jit, static_argnums=(1, 2))
def _cauchy(key, shape, dtype):
_check_shape("cauchy", shape)
u = uniform(key, shape, dtype, minval=jnp.finfo(dtype).eps, maxval=1.)
pi = _constant_like(u, np.pi)
return lax.tan(lax.mul(pi, lax.sub(u, _constant_like(u, 0.5))))
def dirichlet(key: jnp.ndarray,
alpha: RealArray,
shape: Optional[Sequence[int]] = None,
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `dirichlet` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
if shape is not None:
shape = core.canonicalize_shape(shape)
return _dirichlet(key, alpha, shape, dtype)
@partial(jit, static_argnums=(2, 3))
def _dirichlet(key, alpha, shape, dtype):
if not np.ndim(alpha) >= 1:
msg = "dirichlet requires alpha.ndim >= 1, got alpha.ndim == {}"
raise ValueError(msg.format(np.ndim(alpha)))
if shape is None:
shape = np.shape(alpha)[:-1]
else:
_check_shape("dirichlet", shape, np.shape(alpha)[:-1])
alpha = lax.convert_element_type(alpha, dtype)
gamma_samples = gamma(key, alpha, shape + np.shape(alpha)[-1:], dtype)
return gamma_samples / jnp.sum(gamma_samples, axis=-1, keepdims=True)
def exponential(key: jnp.ndarray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `exponential` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _exponential(key, shape, dtype)
@partial(jit, static_argnums=(1, 2))
def _exponential(key, shape, dtype):
_check_shape("exponential", shape)
u = uniform(key, shape, dtype)
return lax.neg(lax.log1p(lax.neg(u)))
def _gamma_one(key, alpha):
nstant_like(alpha, 1)
minus_one = _constant_like(alpha, -1)
one_over_two = _constant_like(alpha, 0.5)
one_over_three = _constant_like(alpha, 1. / 3.)
squeeze_const = _constant_like(alpha, 0.0331)
dtype = lax.dtype(alpha)
key, subkey = split(key)
boost = lax.select(lax.ge(alpha, one),
one,
lax.pow(uniform(subkey, (), dtype=dtype), lax.div(one, alpha)))
alpha = lax.select(lax.ge(alpha, one), alpha, lax.add(alpha, one))
d = lax.sub(alpha, one_over_three)
c = lax.div(one_over_three, lax.sqrt(d))
def _cond_fn(kXVU):
_, X, V, U = kXVU
cond = lax.bitwise_and(lax.ge(U, lax.sub(one, lax.mul(squeeze_const, lax.mul(X, X)))),
lax.ge(lax.log(U), lax.add(lax.mul(X, one_over_two),
lax.mul(d, lax.add(lax.sub(one, V),
lax.log(V))))))
return cond
def _body_fn(kXVU):
def _next_kxv(kxv):
key = kxv[0]
key, subkey = split(key)
x = normal(subkey, (), dtype=dtype)
v = lax.add(one, lax.mul(x, c))
return key, x, v
key = kXVU[0]
key, x_key, U_key = split(key, 3)
_, x, v = lax.while_loop(lambda kxv: lax.le(kxv[2], zero), _next_kxv, (x_key, zero, minus_one))
X = lax.mul(x, x)
V = lax.mul(lax.mul(v, v), v)
U = uniform(U_key, (), dtype=dtype)
return key, X, V, U
_, _, V, _ = lax.while_loop(_cond_fn, _body_fn, (key, zero, one, _constant_like(alpha, 2)))
z = lax.mul(lax.mul(d, V), boost)
return lax.select(lax.eq(z, zero), jnp.finfo(z.dtype).tiny, z)
def _gamma_grad(sample, a):
samples = jnp.reshape(sample, -1)
alphas = jnp.reshape(a, -1)
if xla_bridge.get_backend().platform == 'cpu':
grads = lax.map(lambda args: lax.random_gamma_grad(*args), (alphas, samples))
else:
grads = vmap(lax.random_gamma_grad)(alphas, samples)
return grads.reshape(np.shape(a))
def _gamma_impl(key, a, use_vmap=False):
a_shape = jnp.shape(a)
key_ndim = jnp.ndim(key) - 1
key = jnp.reshape(key, (-1, 2))
key = vmap(split, in_axes=(0, None))(key, prod(a_shape[key_ndim:]))
keys = jnp.reshape(key, (-1, 2))
alphas = jnp.reshape(a, -1)
if use_vmap:
samples = vmap(_gamma_one)(keys, alphas)
else:
samples = lax.map(lambda args: _gamma_one(*args), (keys, alphas))
return jnp.reshape(samples, a_shape)
def _gamma_batching_rule(batched_args, batch_dims):
k, a = batched_args
bk, ba = batch_dims
size = next(t.shape[i] for t, i in zip(batched_args, batch_dims) if i is not None)
k = batching.bdim_at_front(k, bk, size)
a = batching.bdim_at_front(a, ba, size)
return random_gamma_p.bind(k, a), 0
random_gamma_p = core.Primitive('random_gamma')
random_gamma_p.def_impl(_gamma_impl)
random_gamma_p.def_abstract_eval(lambda key, a: core.raise_to_shaped(a))
ad.defjvp2(random_gamma_p, None, lambda tangent, ans, key, a: tangent * _gamma_grad(ans, a))
xla.translations_with_avals[random_gamma_p] = xla.lower_fun(
partial(_gamma_impl, use_vmap=True),
multiple_results=False, with_avals=True)
xla.backend_specific_translations['cpu'][random_gamma_p] = xla.lower_fun(
partial(_gamma_impl, use_vmap=False),
multiple_results=False)
batching.primitive_batchers[random_gamma_p] = _gamma_batching_rule
def gamma(key: jnp.ndarray,
a: RealArray,
shape: Optional[Sequence[int]] = None,
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `gamma` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
if shape is not None:
shape = core.canonicalize_shape(shape)
return _gamma(key, a, shape, dtype)
@partial(jit, static_argnums=(2, 3))
def _gamma(key, a, shape, dtype):
if shape is None:
shape = np.shape(a)
else:
_check_shape("gamma", shape, np.shape(a))
a = lax.convert_element_type(a, dtype)
if np.shape(a) != shape:
a = jnp.broadcast_to(a, shape)
return random_gamma_p.bind(key, a)
@partial(jit, static_argnums=(2, 3, 4))
def _poisson_knuth(key, lam, shape, dtype, max_iters):
# Reference:
# https://en.wikipedia.org/wiki/Poisson_distribution#Generating_Poisson-distributed_random_variables
def body_fn(carry):
i, k, rng, log_prod = carry
rng, subkey = split(rng)
k = lax.select(log_prod > -lam, k + 1, k)
u = uniform(subkey, shape, np.float32)
return i + 1, k, rng, log_prod + jnp.log(u)
def cond_fn(carry):
i, log_prod = carry[0], carry[3]
return (log_prod > -lam).any() & (i < max_iters)
k_init = lax.full_like(lam, 0, dtype, shape)
log_rate_init = lax.full_like(lam, 0, np.float32, shape)
k = lax.while_loop(cond_fn, body_fn, (0, k_init, key, log_rate_init))[1]
return (k - 1).astype(dtype)
@partial(jit, static_argnums=(2, 3, 4))
def _poisson_rejection(key, lam, shape, dtype, max_iters):
# Transformed rejection due to Hormann.
# Reference:
# http://citeseer.ist.psu.edu/viewdoc/citations;jsessionid=1BEB35946CC807879F55D42512E5490C?doi=10.1.1.48.3054.
log_lam = lax.log(lam)
b = 0.931 + 2.53 * lax.sqrt(lam)
a = -0.059 + 0.02483 * b
inv_alpha = 1.1239 + 1.1328 / (b - 3.4)
v_r = 0.9277 - 3.6224 / (b - 2)
def body_fn(carry):
i, k_out, accepted, key = carry
key, subkey_0, subkey_1 = split(key, 3)
u = uniform(subkey_0, shape, lam.dtype) - 0.5
v = uniform(subkey_1, shape, lam.dtype)
u_shifted = 0.5 - abs(u)
k = lax.floor((2 * a / u_shifted + b) * u + lam + 0.43)
s = lax.log(v * inv_alpha / (a / (u_shifted * u_shifted) + b))
t = -lam + k * log_lam - lax.lgamma(k + 1)
accept1 = (u_shifted >= 0.07) & (v <= v_r)
reject = (k < 0) | ((u_shifted < 0.013) & (v > u_shifted))
accept2 = s <= t
accept = accept1 | (~reject & accept2)
k_out = lax.select(accept, k, k_out)
accepted |= accept
return i + 1, k_out, accepted, key
def cond_fn(carry):
i, k_out, accepted, key = carry
return (~accepted).any() & (i < max_iters)
k_init = lax.full_like(lam, -1, lam.dtype, shape)
accepted = lax.full_like(lam, False, jnp.bool_, shape)
k = lax.while_loop(cond_fn, body_fn, (0, k_init, accepted, key))[1]
return k.astype(dtype)
@partial(jit, static_argnums=(2, 3))
def _poisson(key, lam, shape, dtype):
# The implementation matches TensorFlow and NumPy:
# https://github.com/tensorflow/tensorflow/blob/v2.2.0-rc3/tensorflow/core/kernels/random_poisson_op.cc
# https://github.com/numpy/numpy/blob/v1.18.3/numpy/random/src/distributions/distributions.c#L574
# For lambda < 10, we use the Knuth algorithm; otherwise, we use transformed
# rejection sampling.
use_knuth = lam < 10
lam_knuth = lax.select(use_knuth, lam, lax.full_like(lam, 0.0))
# The acceptance probability for rejection sampling maxes out at 89% as
# λ -> ∞, so pick some arbitrary large value.
lam_rejection = lax.select(use_knuth, lax.full_like(lam, 1e5), lam)
max_iters = dtype.type(jnp.iinfo(dtype).max) # insanely conservative
result = lax.select(
use_knuth,
_poisson_knuth(key, lam_knuth, shape, dtype, max_iters),
_poisson_rejection(key, lam_rejection, shape, dtype, max_iters),
)
return lax.select(lam == 0, jnp.zeros_like(result), result)
def poisson(key: jnp.ndarray,
lam: RealArray,
shape: Sequence[int] = (),
dtype: DTypeLikeInt = dtypes.int_) -> jnp.ndarray:
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
if np.shape(lam) != shape:
lam = jnp.broadcast_to(lam, shape)
lam = lax.convert_element_type(lam, np.float32)
return _poisson(key, lam, shape, dtype)
def gumbel(key: jnp.ndarray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `gumbel` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _gumbel(key, shape, dtype)
@partial(jit, static_argnums=(1, 2))
def _gumbel(key, shape, dtype):
_check_shape("gumbel", shape)
return -jnp.log(-jnp.log(
uniform(key, shape, dtype, minval=jnp.finfo(dtype).tiny, maxval=1.)))
def categorical(key: jnp.ndarray,
logits: RealArray,
axis: int = -1,
shape: Optional[Sequence[int]] = None) -> jnp.ndarray:
if axis >= 0:
axis -= len(logits.shape)
batch_shape = tuple(np.delete(logits.shape, axis))
if shape is None:
shape = batch_shape
else:
shape = tuple(shape)
_check_shape("categorical", shape, batch_shape)
sample_shape = shape[:len(shape)-len(batch_shape)]
return jnp.argmax(
gumbel(key, sample_shape + logits.shape, logits.dtype) +
lax.expand_dims(logits, tuple(range(len(sample_shape)))),
axis=axis)
def laplace(key: jnp.ndarray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `laplace` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _laplace(key, shape, dtype)
@partial(jit, static_argnums=(1, 2))
def _laplace(key, shape, dtype):
_check_shape("laplace", shape)
u = uniform(
key, shape, dtype, minval=-1. + jnp.finfo(dtype).epsneg, maxval=1.)
return lax.mul(lax.sign(u), lax.log1p(lax.neg(lax.abs(u))))
def logistic(key: jnp.ndarray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `logistic` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _logistic(key, shape, dtype)
@partial(jit, static_argnums=(1, 2))
def _logistic(key, shape, dtype):
_check_shape("logistic", shape)
x = uniform(key, shape, dtype, minval=jnp.finfo(dtype).eps, maxval=1.)
return lax.log(lax.div(x, lax.sub(lax._const(x, 1), x)))
def pareto(key: jnp.ndarray,
b: RealArray,
shape: Optional[Sequence[int]] = None,
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `pareto` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
if shape is not None:
shape = core.canonicalize_shape(shape)
return _pareto(key, b, shape, dtype)
@partial(jit, static_argnums=(2, 3))
def _pareto(key, b, shape, dtype):
if shape is None:
shape = np.shape(b)
else:
_check_shape("pareto", shape)
b = lax.convert_element_type(b, dtype)
e = exponential(key, shape, dtype)
return lax.exp(e / b)
def t(key: jnp.ndarray,
df: RealArray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `t` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _t(key, df, shape, dtype)
@partial(jit, static_argnums=(2, 3))
def _t(key, df, shape, dtype):
if shape is None:
shape = np.shape(df)
else:
_check_shape("t", shape, np.shape(df))
df = lax.convert_element_type(df, dtype)
key_n, key_g = split(key)
n = normal(key_n, shape, dtype)
two = _constant_like(n, 2)
half_df = lax.div(df, two)
g = gamma(key_n, half_df, shape, dtype)
return n * jnp.sqrt(half_df / g)
def rademacher(key: jnp.ndarray,
shape: Sequence[int],
dtype: DTypeLikeInt = dtypes.int_) -> jnp.ndarray:
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _rademacher(key, shape, dtype)
@partial(jit, static_argnums=(1, 2))
def _rademacher(key, shape, dtype):
bernoulli_samples = bernoulli(key=key, p=0.5, shape=shape)
return (2 * bernoulli_samples - 1).astype(dtype)
def maxwell(key: jnp.ndarray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
# Generate samples using:
# sqrt(X^2 + Y^2 + Z^2), X,Y,Z ~N(0,1)
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `maxwell` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _maxwell(key, shape, dtype)
@partial(jit, static_argnums=(1, 2))
def _maxwell(key, shape, dtype):
shape = shape + (3,)
norm_rvs = normal(key=key, shape=shape, dtype=dtype)
return jnp.linalg.norm(norm_rvs, axis=-1)
def double_sided_maxwell(key: jnp.ndarray,
loc: RealArray,
scale: RealArray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `double_sided_maxwell` must be a float"
f" dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _double_sided_maxwell(key, loc, scale, shape, dtype)
@partial(jit, static_argnums=(3, 4))
def _double_sided_maxwell(key, loc, scale, shape, dtype):
params_shapes = lax.broadcast_shapes(np.shape(loc), np.shape(scale))
if not shape:
shape = params_shapes
shape = shape + params_shapes
maxwell_key, rademacher_key = split(key)
maxwell_rvs = maxwell(maxwell_key, shape=shape, dtype=dtype)
# Generate random signs for the symmetric variates.
random_sign = rademacher(rademacher_key, shape=shape, dtype=dtype)
assert random_sign.shape == maxwell_rvs.shape
return random_sign * maxwell_rvs * scale + loc
def weibull_min(key: jnp.ndarray,
scale: RealArray,
concentration: RealArray,
shape: Sequence[int] = (),
dtype: DTypeLikeFloat = dtypes.float_) -> jnp.ndarray:
if not dtypes.issubdtype(dtype, np.floating):
raise ValueError(f"dtype argument to `weibull_min` must be a float "
f"dtype, got {dtype}")
dtype = dtypes.canonicalize_dtype(dtype)
shape = core.canonicalize_shape(shape)
return _weibull_min(key, scale, concentration, shape, dtype)
@partial(jit, static_argnums=(3, 4))
def _weibull_min(key, scale, concentration, shape, dtype):
random_uniform = uniform(
key=key, shape=shape, minval=0, maxval=1, dtype=dtype)
# Inverse weibull CDF.
return jnp.power(-jnp.log1p(-random_uniform), 1.0/concentration) * scale
| true | true |
f7371b5b2a5e5017c741a7f83e5e0f886f7ce7f7 | 1,772 | py | Python | app/models/m2m.py | zhangadrian/mini-shop-server | 3a1f29784b707d14f2435322cdc2efdaab4ec85e | [
"MIT"
] | 1 | 2020-09-25T09:23:06.000Z | 2020-09-25T09:23:06.000Z | app/models/m2m.py | zhangadrian/mini-shop-server | 3a1f29784b707d14f2435322cdc2efdaab4ec85e | [
"MIT"
] | null | null | null | app/models/m2m.py | zhangadrian/mini-shop-server | 3a1f29784b707d14f2435322cdc2efdaab4ec85e | [
"MIT"
] | null | null | null | # _*_ coding: utf-8 _*_
"""
Created by Allen7D on 2018/6/17.
"""
from sqlalchemy import Column, Integer, String, ForeignKey
from app.models.base import Base
from app.models.image import Image
__author__ = 'Allen7D'
class Theme2Product(Base):
__tablename__ = 'theme_product'
theme_id = Column(Integer, ForeignKey('theme.id'), primary_key=True, comment='主题外键')
product_id = Column(Integer, ForeignKey('product.id'), primary_key=True, comment='商品外键')
class Product2Image(Base):
__tablename__ = 'product_image'
id = Column(Integer, primary_key=True, autoincrement=True)
img_id = Column(Integer, ForeignKey('image.id'), nullable=False, comment='外键,关联图片表')
order = Column(Integer, nullable=False, comment='图片排序序号')
product_id = Column(Integer, ForeignKey('product.id'), nullable=False, comment='外键, 商品id')
def keys(self):
self.hide('id', 'img_id', 'product_id', 'order').append('img_url')
return self.fields
@property
def img_url(self):
return Image.get_img_by_id(id=self.img_id).url
class Product2Property(Base):
__tablename__ = 'product_property'
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(30), comment='详情属性名称')
detail = Column(String(255), nullable=False, comment='详情属性')
product_id = Column(Integer, ForeignKey('product.id'), nullable=False, comment='外键, 商品id')
class Order2Product(Base):
__tablename__ = 'order_product'
order_id = Column(Integer, primary_key=True, comment='联合主键,订单id')
product_id = Column(Integer, primary_key=True, comment='联合主键,商品id')
count = Column(Integer, nullable=False, comment='商品数量')
def __init__(self, order_id=None, product_id=None, count=None):
self.order_id = order_id
self.product_id = product_id
self.count = count
super(Order2Product, self).__init__()
| 32.814815 | 91 | 0.744921 |
from sqlalchemy import Column, Integer, String, ForeignKey
from app.models.base import Base
from app.models.image import Image
__author__ = 'Allen7D'
class Theme2Product(Base):
__tablename__ = 'theme_product'
theme_id = Column(Integer, ForeignKey('theme.id'), primary_key=True, comment='主题外键')
product_id = Column(Integer, ForeignKey('product.id'), primary_key=True, comment='商品外键')
class Product2Image(Base):
__tablename__ = 'product_image'
id = Column(Integer, primary_key=True, autoincrement=True)
img_id = Column(Integer, ForeignKey('image.id'), nullable=False, comment='外键,关联图片表')
order = Column(Integer, nullable=False, comment='图片排序序号')
product_id = Column(Integer, ForeignKey('product.id'), nullable=False, comment='外键, 商品id')
def keys(self):
self.hide('id', 'img_id', 'product_id', 'order').append('img_url')
return self.fields
@property
def img_url(self):
return Image.get_img_by_id(id=self.img_id).url
class Product2Property(Base):
__tablename__ = 'product_property'
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(30), comment='详情属性名称')
detail = Column(String(255), nullable=False, comment='详情属性')
product_id = Column(Integer, ForeignKey('product.id'), nullable=False, comment='外键, 商品id')
class Order2Product(Base):
__tablename__ = 'order_product'
order_id = Column(Integer, primary_key=True, comment='联合主键,订单id')
product_id = Column(Integer, primary_key=True, comment='联合主键,商品id')
count = Column(Integer, nullable=False, comment='商品数量')
def __init__(self, order_id=None, product_id=None, count=None):
self.order_id = order_id
self.product_id = product_id
self.count = count
super(Order2Product, self).__init__()
| true | true |
f7371ccf38a7e9235c8bc7e0fc8da0c8f2647d3a | 8,520 | py | Python | tests/test_generic.py | kevinlai219/Mezzanine-Django | a92cde236448e19cdbf853b5cb34f99a60972147 | [
"BSD-2-Clause"
] | 3,053 | 2015-01-01T08:16:01.000Z | 2022-03-31T12:06:03.000Z | tests/test_generic.py | kevinlai219/Mezzanine-Django | a92cde236448e19cdbf853b5cb34f99a60972147 | [
"BSD-2-Clause"
] | 818 | 2015-01-03T05:44:45.000Z | 2022-03-08T00:06:23.000Z | tests/test_generic.py | kevinlai219/Mezzanine-Django | a92cde236448e19cdbf853b5cb34f99a60972147 | [
"BSD-2-Clause"
] | 1,352 | 2015-01-01T06:17:14.000Z | 2022-03-26T16:21:54.000Z | from unittest import skipUnless
from django.contrib.contenttypes.models import ContentType
from django.template import Context, Template
from django.urls import reverse
from mezzanine.blog.models import BlogPost
from mezzanine.conf import settings
from mezzanine.core.models import CONTENT_STATUS_PUBLISHED
from mezzanine.generic.forms import KeywordsWidget, RatingForm
from mezzanine.generic.models import AssignedKeyword, Keyword, ThreadedComment
from mezzanine.generic.views import comment
from mezzanine.pages.models import RichTextPage
from mezzanine.utils.tests import TestCase
class GenericTests(TestCase):
@skipUnless("mezzanine.blog" in settings.INSTALLED_APPS, "blog app required")
def test_rating(self):
"""
Test that ratings can be posted and avarage/count are calculated.
"""
blog_post = BlogPost.objects.create(
title="Ratings", user=self._user, status=CONTENT_STATUS_PUBLISHED
)
if settings.RATINGS_ACCOUNT_REQUIRED:
self.client.login(username=self._username, password=self._password)
data = RatingForm(None, blog_post).initial
for value in settings.RATINGS_RANGE:
data["value"] = value
response = self.client.post(reverse("rating"), data=data)
response.delete_cookie("mezzanine-rating")
blog_post = BlogPost.objects.get(id=blog_post.id)
count = len(settings.RATINGS_RANGE)
_sum = sum(settings.RATINGS_RANGE)
average = _sum / count
if settings.RATINGS_ACCOUNT_REQUIRED:
self.assertEqual(blog_post.rating_count, 1)
self.assertEqual(blog_post.rating_sum, settings.RATINGS_RANGE[-1])
self.assertEqual(blog_post.rating_average, settings.RATINGS_RANGE[-1] / 1)
else:
self.assertEqual(blog_post.rating_count, count)
self.assertEqual(blog_post.rating_sum, _sum)
self.assertEqual(blog_post.rating_average, average)
@skipUnless("mezzanine.blog" in settings.INSTALLED_APPS, "blog app required")
def test_comment_ratings(self):
"""
Test that a generic relation defined on one of Mezzanine's generic
models (in this case ratings of comments) correctly sets its
extra fields.
"""
blog_post = BlogPost.objects.create(title="Post with comments", user=self._user)
content_type = ContentType.objects.get_for_model(blog_post)
kwargs = {
"content_type": content_type,
"object_pk": blog_post.id,
"site_id": settings.SITE_ID,
"comment": "First!!!11",
}
comment = ThreadedComment.objects.create(**kwargs)
comment.rating.create(value=settings.RATINGS_RANGE[0])
comment.rating.create(value=settings.RATINGS_RANGE[-1])
comment = ThreadedComment.objects.get(pk=comment.pk)
self.assertEqual(len(comment.rating.all()), comment.rating_count)
self.assertEqual(
comment.rating_average,
(settings.RATINGS_RANGE[0] + settings.RATINGS_RANGE[-1]) / 2,
)
@skipUnless("mezzanine.blog" in settings.INSTALLED_APPS, "blog app required")
def test_comment_queries(self):
"""
Test that rendering comments executes the same number of
queries, regardless of the number of nested replies.
"""
blog_post = BlogPost.objects.create(title="Post", user=self._user)
content_type = ContentType.objects.get_for_model(blog_post)
kwargs = {
"content_type": content_type,
"object_pk": blog_post.id,
"site_id": settings.SITE_ID,
}
template = "{% load comment_tags %}{% comment_thread blog_post %}"
context = {
"blog_post": blog_post,
"posted_comment_form": None,
"unposted_comment_form": None,
}
if settings.COMMENTS_ACCOUNT_REQUIRED:
self.queries_used_for_template(template, **context)
before = self.queries_used_for_template(template, **context)
self.assertTrue(before > 0)
self.create_recursive_objects(ThreadedComment, "replied_to", **kwargs)
after = self.queries_used_for_template(template, **context)
self.assertEqual(before, after)
@skipUnless("mezzanine.pages" in settings.INSTALLED_APPS, "pages app required")
def test_keywords(self):
"""
Test that the keywords_string field is correctly populated.
"""
page = RichTextPage.objects.create(title="test keywords")
keywords = {"how", "now", "brown", "cow"}
Keyword.objects.all().delete()
for keyword in keywords:
keyword_id = Keyword.objects.get_or_create(title=keyword)[0].id
page.keywords.get_or_create(keyword_id=keyword_id)
page = RichTextPage.objects.get(id=page.id)
self.assertEqual(keywords, set(page.keywords_string.split()))
# Test removal.
first = Keyword.objects.all()[0]
keywords.remove(first.title)
first.delete()
page = RichTextPage.objects.get(id=page.id)
self.assertEqual(keywords, set(page.keywords_string.split()))
page.delete()
def test_delete_unused(self):
"""
Only ``Keyword`` instances without any assignments should be deleted.
"""
assigned_keyword = Keyword.objects.create(title="assigned")
Keyword.objects.create(title="unassigned")
AssignedKeyword.objects.create(
keyword_id=assigned_keyword.id, content_object=RichTextPage(pk=1)
)
Keyword.objects.delete_unused(keyword_ids=[assigned_keyword.id])
self.assertEqual(Keyword.objects.count(), 2)
Keyword.objects.delete_unused()
self.assertEqual(Keyword.objects.count(), 1)
self.assertEqual(Keyword.objects.all()[0].id, assigned_keyword.id)
def test_comment_form_returns_400_when_missing_data(self):
"""
Assert 400 status code response when expected data is missing from
the comment form. This simulates typical malicious bot behavior.
"""
request = self._request_factory.post(reverse("comment"))
if settings.COMMENTS_ACCOUNT_REQUIRED:
request.user = self._user
request.session = {}
response = comment(request)
self.assertEqual(response.status_code, 400)
def test_multiple_comment_forms(self):
template = Template(
"""
{% load comment_tags %}
{% comments_for post1 %}
{% comments_for post2 %}
"""
)
request = self._request_factory.get(reverse("comment"))
request.user = self._user
context = {
"post1": BlogPost.objects.create(title="Post #1", user=self._user),
"post2": BlogPost.objects.create(title="Post #2", user=self._user),
"request": request,
}
result = template.render(Context(context))
self.assertInHTML(
'<input id="id_object_pk" name="object_pk" '
'type="hidden" value="%d" />' % context["post2"].pk,
result,
)
def test_keywords_widget(self):
"""
Test that Keywords widget is returning proper value
for form rendering and its support for different data types.
"""
keyword_widget = KeywordsWidget()
keywords = {"how", "now", "brown"}
Keyword.objects.all().delete()
keyword_id_list = []
for keyword in keywords:
keyword_id = Keyword.objects.get_or_create(title=keyword)[0].id
keyword_id_list.append(keyword_id)
keyword_id_string = ",".join(map(str, keyword_id_list))
values_from_string = keyword_widget.decompress(keyword_id_string)
self.assertIn("how", values_from_string[1])
self.assertIn("now", values_from_string[1])
self.assertIn("brown", values_from_string[1])
for keyword_id in keyword_id_list:
AssignedKeyword.objects.create(
keyword_id=keyword_id, content_object=RichTextPage(pk=1)
)
assigned_keywords = AssignedKeyword.objects.all()
values_from_relation = keyword_widget.decompress(assigned_keywords)
self.assertIn("how", values_from_relation[1])
self.assertIn("now", values_from_relation[1])
self.assertIn("brown", values_from_relation[1])
self.assertEqual(("", ""), keyword_widget.decompress(None))
| 40.571429 | 88 | 0.653404 | from unittest import skipUnless
from django.contrib.contenttypes.models import ContentType
from django.template import Context, Template
from django.urls import reverse
from mezzanine.blog.models import BlogPost
from mezzanine.conf import settings
from mezzanine.core.models import CONTENT_STATUS_PUBLISHED
from mezzanine.generic.forms import KeywordsWidget, RatingForm
from mezzanine.generic.models import AssignedKeyword, Keyword, ThreadedComment
from mezzanine.generic.views import comment
from mezzanine.pages.models import RichTextPage
from mezzanine.utils.tests import TestCase
class GenericTests(TestCase):
@skipUnless("mezzanine.blog" in settings.INSTALLED_APPS, "blog app required")
def test_rating(self):
blog_post = BlogPost.objects.create(
title="Ratings", user=self._user, status=CONTENT_STATUS_PUBLISHED
)
if settings.RATINGS_ACCOUNT_REQUIRED:
self.client.login(username=self._username, password=self._password)
data = RatingForm(None, blog_post).initial
for value in settings.RATINGS_RANGE:
data["value"] = value
response = self.client.post(reverse("rating"), data=data)
response.delete_cookie("mezzanine-rating")
blog_post = BlogPost.objects.get(id=blog_post.id)
count = len(settings.RATINGS_RANGE)
_sum = sum(settings.RATINGS_RANGE)
average = _sum / count
if settings.RATINGS_ACCOUNT_REQUIRED:
self.assertEqual(blog_post.rating_count, 1)
self.assertEqual(blog_post.rating_sum, settings.RATINGS_RANGE[-1])
self.assertEqual(blog_post.rating_average, settings.RATINGS_RANGE[-1] / 1)
else:
self.assertEqual(blog_post.rating_count, count)
self.assertEqual(blog_post.rating_sum, _sum)
self.assertEqual(blog_post.rating_average, average)
@skipUnless("mezzanine.blog" in settings.INSTALLED_APPS, "blog app required")
def test_comment_ratings(self):
blog_post = BlogPost.objects.create(title="Post with comments", user=self._user)
content_type = ContentType.objects.get_for_model(blog_post)
kwargs = {
"content_type": content_type,
"object_pk": blog_post.id,
"site_id": settings.SITE_ID,
"comment": "First!!!11",
}
comment = ThreadedComment.objects.create(**kwargs)
comment.rating.create(value=settings.RATINGS_RANGE[0])
comment.rating.create(value=settings.RATINGS_RANGE[-1])
comment = ThreadedComment.objects.get(pk=comment.pk)
self.assertEqual(len(comment.rating.all()), comment.rating_count)
self.assertEqual(
comment.rating_average,
(settings.RATINGS_RANGE[0] + settings.RATINGS_RANGE[-1]) / 2,
)
@skipUnless("mezzanine.blog" in settings.INSTALLED_APPS, "blog app required")
def test_comment_queries(self):
blog_post = BlogPost.objects.create(title="Post", user=self._user)
content_type = ContentType.objects.get_for_model(blog_post)
kwargs = {
"content_type": content_type,
"object_pk": blog_post.id,
"site_id": settings.SITE_ID,
}
template = "{% load comment_tags %}{% comment_thread blog_post %}"
context = {
"blog_post": blog_post,
"posted_comment_form": None,
"unposted_comment_form": None,
}
if settings.COMMENTS_ACCOUNT_REQUIRED:
self.queries_used_for_template(template, **context)
before = self.queries_used_for_template(template, **context)
self.assertTrue(before > 0)
self.create_recursive_objects(ThreadedComment, "replied_to", **kwargs)
after = self.queries_used_for_template(template, **context)
self.assertEqual(before, after)
@skipUnless("mezzanine.pages" in settings.INSTALLED_APPS, "pages app required")
def test_keywords(self):
page = RichTextPage.objects.create(title="test keywords")
keywords = {"how", "now", "brown", "cow"}
Keyword.objects.all().delete()
for keyword in keywords:
keyword_id = Keyword.objects.get_or_create(title=keyword)[0].id
page.keywords.get_or_create(keyword_id=keyword_id)
page = RichTextPage.objects.get(id=page.id)
self.assertEqual(keywords, set(page.keywords_string.split()))
first = Keyword.objects.all()[0]
keywords.remove(first.title)
first.delete()
page = RichTextPage.objects.get(id=page.id)
self.assertEqual(keywords, set(page.keywords_string.split()))
page.delete()
def test_delete_unused(self):
assigned_keyword = Keyword.objects.create(title="assigned")
Keyword.objects.create(title="unassigned")
AssignedKeyword.objects.create(
keyword_id=assigned_keyword.id, content_object=RichTextPage(pk=1)
)
Keyword.objects.delete_unused(keyword_ids=[assigned_keyword.id])
self.assertEqual(Keyword.objects.count(), 2)
Keyword.objects.delete_unused()
self.assertEqual(Keyword.objects.count(), 1)
self.assertEqual(Keyword.objects.all()[0].id, assigned_keyword.id)
def test_comment_form_returns_400_when_missing_data(self):
request = self._request_factory.post(reverse("comment"))
if settings.COMMENTS_ACCOUNT_REQUIRED:
request.user = self._user
request.session = {}
response = comment(request)
self.assertEqual(response.status_code, 400)
def test_multiple_comment_forms(self):
template = Template(
"""
{% load comment_tags %}
{% comments_for post1 %}
{% comments_for post2 %}
"""
)
request = self._request_factory.get(reverse("comment"))
request.user = self._user
context = {
"post1": BlogPost.objects.create(title="Post #1", user=self._user),
"post2": BlogPost.objects.create(title="Post #2", user=self._user),
"request": request,
}
result = template.render(Context(context))
self.assertInHTML(
'<input id="id_object_pk" name="object_pk" '
'type="hidden" value="%d" />' % context["post2"].pk,
result,
)
def test_keywords_widget(self):
keyword_widget = KeywordsWidget()
keywords = {"how", "now", "brown"}
Keyword.objects.all().delete()
keyword_id_list = []
for keyword in keywords:
keyword_id = Keyword.objects.get_or_create(title=keyword)[0].id
keyword_id_list.append(keyword_id)
keyword_id_string = ",".join(map(str, keyword_id_list))
values_from_string = keyword_widget.decompress(keyword_id_string)
self.assertIn("how", values_from_string[1])
self.assertIn("now", values_from_string[1])
self.assertIn("brown", values_from_string[1])
for keyword_id in keyword_id_list:
AssignedKeyword.objects.create(
keyword_id=keyword_id, content_object=RichTextPage(pk=1)
)
assigned_keywords = AssignedKeyword.objects.all()
values_from_relation = keyword_widget.decompress(assigned_keywords)
self.assertIn("how", values_from_relation[1])
self.assertIn("now", values_from_relation[1])
self.assertIn("brown", values_from_relation[1])
self.assertEqual(("", ""), keyword_widget.decompress(None))
| true | true |
f7371d98ef6f603bc1192d540e496dcf869ac1ed | 4,335 | py | Python | experiments/ashvin/icml2020/hand/adaptive/clamp_easy1.py | Asap7772/railrl_evalsawyer | baba8ce634d32a48c7dfe4dc03b123e18e96e0a3 | [
"MIT"
] | null | null | null | experiments/ashvin/icml2020/hand/adaptive/clamp_easy1.py | Asap7772/railrl_evalsawyer | baba8ce634d32a48c7dfe4dc03b123e18e96e0a3 | [
"MIT"
] | null | null | null | experiments/ashvin/icml2020/hand/adaptive/clamp_easy1.py | Asap7772/railrl_evalsawyer | baba8ce634d32a48c7dfe4dc03b123e18e96e0a3 | [
"MIT"
] | null | null | null | """
AWR + SAC from demo experiment
"""
from rlkit.demos.source.dict_to_mdp_path_loader import DictToMDPPathLoader
from rlkit.launchers.experiments.awac.awac_rl import experiment, process_args
import rlkit.misc.hyperparameter as hyp
from rlkit.launchers.arglauncher import run_variants
from rlkit.torch.sac.policies import GaussianPolicy
from rlkit.torch.networks import Clamp
if __name__ == "__main__":
variant = dict(
num_epochs=5001,
num_eval_steps_per_epoch=1000,
num_trains_per_train_loop=1000,
num_expl_steps_per_train_loop=1000,
min_num_steps_before_training=1000,
max_path_length=1000,
batch_size=1024,
replay_buffer_size=int(1E6),
layer_size=256,
policy_class=GaussianPolicy,
policy_kwargs=dict(
hidden_sizes=[256, 256, 256, 256],
max_log_std=0,
min_log_std=-6,
std_architecture="values",
# num_gaussians=1,
),
algorithm="SAC",
version="normal",
collection_mode='batch',
trainer_kwargs=dict(
discount=0.99,
soft_target_tau=5e-3,
target_update_period=1,
policy_lr=3E-4,
qf_lr=3E-4,
reward_scale=1,
beta=1,
use_automatic_entropy_tuning=False,
alpha=0,
compute_bc=False,
bc_num_pretrain_steps=0,
q_num_pretrain1_steps=0,
q_num_pretrain2_steps=25000,
policy_weight_decay=1e-4,
q_weight_decay=0,
bc_loss_type="mse",
rl_weight=1.0,
use_awr_update=True,
use_reparam_update=False,
reparam_weight=0.0,
awr_weight=0.0,
bc_weight=1.0,
post_bc_pretrain_hyperparams=dict(
bc_weight=0.0,
compute_bc=False,
),
reward_transform_kwargs=None, # r' = r + 1
terminal_transform_kwargs=None, # t = 0
),
launcher_config=dict(
num_exps_per_instance=1,
region='us-west-2',
),
path_loader_class=DictToMDPPathLoader,
path_loader_kwargs=dict(
obs_key="state_observation",
demo_paths=[
# dict(
# path="demos/icml2020/hand/pen2_sparse.npy",
# obs_dict=True,
# is_demo=True,
# ),
# dict(
# path="demos/icml2020/hand/pen_bc5.npy",
# obs_dict=False,
# is_demo=False,
# train_split=0.9,
# ),
],
),
add_env_demos=True,
add_env_offpolicy_data=True,
# logger_variant=dict(
# tensorboard=True,
# ),
load_demos=True,
pretrain_policy=True,
pretrain_rl=True,
# save_pretrained_algorithm=True,
# snapshot_mode="all",
)
search_space = {
'env': ["pen-sparse-v0", "door-sparse-v0"],
# 'env': ["relocate-sparse-v0", ],
'trainer_kwargs.bc_loss_type': ["mle"],
'trainer_kwargs.awr_loss_type': ["mle"],
'seedid': range(3),
'trainer_kwargs.beta': [0.3, 0.5],
'trainer_kwargs.reparam_weight': [0.0, ],
'trainer_kwargs.awr_weight': [1.0],
'trainer_kwargs.bc_weight': [1.0, ],
'policy_kwargs.std_architecture': ["values", ],
# 'trainer_kwargs.compute_bc': [True, ],
'trainer_kwargs.awr_use_mle_for_vf': [True, ],
'trainer_kwargs.awr_sample_actions': [False, ],
'trainer_kwargs.awr_min_q': [True, ],
'trainer_kwargs.q_weight_decay': [0],
'trainer_kwargs.reward_transform_kwargs': [None, ],
'trainer_kwargs.terminal_transform_kwargs': [dict(m=0, b=0), ],
'qf_kwargs.output_activation': [Clamp(max=0)],
'trainer_kwargs.train_bc_on_rl_buffer':[True],
# 'policy_kwargs.num_gaussians': [1, ],
}
sweeper = hyp.DeterministicHyperparameterSweeper(
search_space, default_parameters=variant,
)
variants = []
for variant in sweeper.iterate_hyperparameters():
variants.append(variant)
run_variants(experiment, variants, process_args)
| 30.528169 | 77 | 0.570934 |
from rlkit.demos.source.dict_to_mdp_path_loader import DictToMDPPathLoader
from rlkit.launchers.experiments.awac.awac_rl import experiment, process_args
import rlkit.misc.hyperparameter as hyp
from rlkit.launchers.arglauncher import run_variants
from rlkit.torch.sac.policies import GaussianPolicy
from rlkit.torch.networks import Clamp
if __name__ == "__main__":
variant = dict(
num_epochs=5001,
num_eval_steps_per_epoch=1000,
num_trains_per_train_loop=1000,
num_expl_steps_per_train_loop=1000,
min_num_steps_before_training=1000,
max_path_length=1000,
batch_size=1024,
replay_buffer_size=int(1E6),
layer_size=256,
policy_class=GaussianPolicy,
policy_kwargs=dict(
hidden_sizes=[256, 256, 256, 256],
max_log_std=0,
min_log_std=-6,
std_architecture="values",
),
algorithm="SAC",
version="normal",
collection_mode='batch',
trainer_kwargs=dict(
discount=0.99,
soft_target_tau=5e-3,
target_update_period=1,
policy_lr=3E-4,
qf_lr=3E-4,
reward_scale=1,
beta=1,
use_automatic_entropy_tuning=False,
alpha=0,
compute_bc=False,
bc_num_pretrain_steps=0,
q_num_pretrain1_steps=0,
q_num_pretrain2_steps=25000,
policy_weight_decay=1e-4,
q_weight_decay=0,
bc_loss_type="mse",
rl_weight=1.0,
use_awr_update=True,
use_reparam_update=False,
reparam_weight=0.0,
awr_weight=0.0,
bc_weight=1.0,
post_bc_pretrain_hyperparams=dict(
bc_weight=0.0,
compute_bc=False,
),
reward_transform_kwargs=None,
terminal_transform_kwargs=None, # t = 0
),
launcher_config=dict(
num_exps_per_instance=1,
region='us-west-2',
),
path_loader_class=DictToMDPPathLoader,
path_loader_kwargs=dict(
obs_key="state_observation",
demo_paths=[
# dict(
# path="demos/icml2020/hand/pen2_sparse.npy",
# obs_dict=True,
# is_demo=True,
# ),
# dict(
# path="demos/icml2020/hand/pen_bc5.npy",
# obs_dict=False,
# is_demo=False,
# train_split=0.9,
# ),
],
),
add_env_demos=True,
add_env_offpolicy_data=True,
# logger_variant=dict(
# tensorboard=True,
# ),
load_demos=True,
pretrain_policy=True,
pretrain_rl=True,
# save_pretrained_algorithm=True,
# snapshot_mode="all",
)
search_space = {
'env': ["pen-sparse-v0", "door-sparse-v0"],
# 'env': ["relocate-sparse-v0", ],
'trainer_kwargs.bc_loss_type': ["mle"],
'trainer_kwargs.awr_loss_type': ["mle"],
'seedid': range(3),
'trainer_kwargs.beta': [0.3, 0.5],
'trainer_kwargs.reparam_weight': [0.0, ],
'trainer_kwargs.awr_weight': [1.0],
'trainer_kwargs.bc_weight': [1.0, ],
'policy_kwargs.std_architecture': ["values", ],
# 'trainer_kwargs.compute_bc': [True, ],
'trainer_kwargs.awr_use_mle_for_vf': [True, ],
'trainer_kwargs.awr_sample_actions': [False, ],
'trainer_kwargs.awr_min_q': [True, ],
'trainer_kwargs.q_weight_decay': [0],
'trainer_kwargs.reward_transform_kwargs': [None, ],
'trainer_kwargs.terminal_transform_kwargs': [dict(m=0, b=0), ],
'qf_kwargs.output_activation': [Clamp(max=0)],
'trainer_kwargs.train_bc_on_rl_buffer':[True],
# 'policy_kwargs.num_gaussians': [1, ],
}
sweeper = hyp.DeterministicHyperparameterSweeper(
search_space, default_parameters=variant,
)
variants = []
for variant in sweeper.iterate_hyperparameters():
variants.append(variant)
run_variants(experiment, variants, process_args)
| true | true |
f7371e7a24db2a163c30e05bd54a1875e8e24373 | 5,655 | py | Python | kernel/base_kernel.py | omarsou/kernel_method_kaggle_challenge | 0f2e85166112b231699d9c9f7e3ae894e5ff7766 | [
"Apache-2.0"
] | 1 | 2021-03-26T08:40:07.000Z | 2021-03-26T08:40:07.000Z | kernel/base_kernel.py | omarsou/kernel_method_kaggle_challenge | 0f2e85166112b231699d9c9f7e3ae894e5ff7766 | [
"Apache-2.0"
] | null | null | null | kernel/base_kernel.py | omarsou/kernel_method_kaggle_challenge | 0f2e85166112b231699d9c9f7e3ae894e5ff7766 | [
"Apache-2.0"
] | null | null | null | import numpy as np
import pickle
class Kernel:
def __init__(self):
self.train_phi = None
self.K_matrix = None
self.test_phi = None
self.X_train = None
pass
def build_gram_matrix(self, X):
raise NotImplementedError("Method build_gram_matrix not implemented.")
def test(self, x):
raise NotImplementedError("Method test not implemented.")
def save_kernel(self, path):
with open(path, "wb") as f:
pickle.dump(self, f)
@staticmethod
def load_kernel(path):
with open(path, "rb") as f:
kernel_class = pickle.load(f)
return kernel_class
class KernelIPExplicit(Kernel):
def __init__(self):
super().__init__()
def build_gram_matrix(self, X):
n = X.shape[0]
output = np.zeros((n, n))
self.train_phi = list()
for i in range(n):
item = X.loc[i, X.columns[1]]
self.train_phi.append(self.make_phi(item))
for i in range(n):
for j in range(i, n):
value = self.inner_product_phi(self.train_phi[i], self.train_phi[j])
output[i, j] = output[j, i] = value
self.K_matrix = output
def test(self, indice_x):
n = len(self.train_phi)
output = np.zeros(n)
for i in range(n):
output[i] = self.inner_product_phi(self.train_phi[i], self.test_phi[indice_x])
return output
def make_test_phi(self, X):
n = X.shape[0]
self.test_phi = []
for i in range(n):
item = X.loc[i, X.columns[1]]
self.test_phi.append(self.make_phi(item, train=False))
return
def make_phi(self, item, train=True):
raise NotImplementedError("Method make_phi not implemented.")
def inner_product_phi(self, phi1, phi2):
raise NotImplementedError("Method inner_product_phi not implemented.")
class KernelIPImplicit(Kernel):
def __init__(self):
super().__init__()
def build_gram_matrix(self, X):
n = X.shape[0]
self.X_train = X
output = np.zeros((n, n))
for i in range(n):
for j in range(i, n):
value1, value2 = X.loc[i, X.columns[1]], X.loc[j, X.columns[1]]
output[i, j] = output[j, i] = self.K(value1, value2)
self.K_matrix = output
def test(self, x):
X = self.X_train
n = X.shape[0]
output = np.zeros(n)
for i in range(n):
output[i] = self.K(X.loc[i, X.columns[1]], x)
def K(self, item1, item2):
raise NotImplementedError("Method K not implemented")
class SumKernel:
def __init__(self):
self.train_phi = list()
self.K_matrix = None
self.test_phi = None
self.X_train = None
pass
def build_gram_matrix(self, X):
raise NotImplementedError("Method build_gram_matrix_sum not implemented.")
def build_gram_matrix_one(self, X, param):
raise NotImplementedError("Method build_gram_matrix not implemented.")
def test(self, x):
raise NotImplementedError("Method test not implemented.")
def save_kernel(self, path):
with open(path, "wb") as f:
pickle.dump(self, f)
@staticmethod
def load_kernel(path):
with open(path, "rb") as f:
kernel_class = pickle.load(f)
return kernel_class
class SumKernelIPExplicitError(BaseException):
pass
class SumKernelIPExplicit(SumKernel):
def __init__(self, lst_params):
super().__init__()
if not isinstance(lst_params, list):
raise SumKernelIPExplicitError("If you want to use only one param, you should use the individual param "
"class method.")
self.lst_params = lst_params
def build_gram_matrix(self, X):
n = X.shape[0]
output = np.zeros((n, n))
for params in self.lst_params:
intermediate_output, train_phi = self.build_gram_matrix_one(X, params)
self.train_phi.append(train_phi)
output += intermediate_output
self.K_matrix = output
def build_gram_matrix_one(self, X, params):
n = X.shape[0]
output = np.zeros((n, n))
train_phi = list()
for i in range(n):
item = X.loc[i, X.columns[1]]
train_phi.append(self.make_phi(item, True, params))
for i in range(n):
for j in range(i, n):
value = self.inner_product_phi(train_phi[i], train_phi[j])
output[i, j] = output[j, i] = value
return output, train_phi
def test(self, indice_x):
n = len(self.train_phi[0])
output = np.zeros(n)
for idx, params in enumerate(self.lst_params):
current_output = 0
for i in range(n):
current_output += self.inner_product_phi(self.train_phi[idx][i], self.test_phi[idx][indice_x])
return output
def make_test_phi(self, X):
n = X.shape[0]
self.test_phi = []
for params in self.lst_params:
current_test_phi = list()
for i in range(n):
item = X.loc[i, X.columns[1]]
current_test_phi.append(self.make_phi(item, train=False, params=params))
self.test_phi.append(current_test_phi)
return
def make_phi(self, item, train=True, params=None):
raise NotImplementedError("Method make_phi not implemented.")
def inner_product_phi(self, phi1, phi2):
raise NotImplementedError("Method inner_product_phi not implemented.")
| 30.567568 | 116 | 0.589036 | import numpy as np
import pickle
class Kernel:
def __init__(self):
self.train_phi = None
self.K_matrix = None
self.test_phi = None
self.X_train = None
pass
def build_gram_matrix(self, X):
raise NotImplementedError("Method build_gram_matrix not implemented.")
def test(self, x):
raise NotImplementedError("Method test not implemented.")
def save_kernel(self, path):
with open(path, "wb") as f:
pickle.dump(self, f)
@staticmethod
def load_kernel(path):
with open(path, "rb") as f:
kernel_class = pickle.load(f)
return kernel_class
class KernelIPExplicit(Kernel):
def __init__(self):
super().__init__()
def build_gram_matrix(self, X):
n = X.shape[0]
output = np.zeros((n, n))
self.train_phi = list()
for i in range(n):
item = X.loc[i, X.columns[1]]
self.train_phi.append(self.make_phi(item))
for i in range(n):
for j in range(i, n):
value = self.inner_product_phi(self.train_phi[i], self.train_phi[j])
output[i, j] = output[j, i] = value
self.K_matrix = output
def test(self, indice_x):
n = len(self.train_phi)
output = np.zeros(n)
for i in range(n):
output[i] = self.inner_product_phi(self.train_phi[i], self.test_phi[indice_x])
return output
def make_test_phi(self, X):
n = X.shape[0]
self.test_phi = []
for i in range(n):
item = X.loc[i, X.columns[1]]
self.test_phi.append(self.make_phi(item, train=False))
return
def make_phi(self, item, train=True):
raise NotImplementedError("Method make_phi not implemented.")
def inner_product_phi(self, phi1, phi2):
raise NotImplementedError("Method inner_product_phi not implemented.")
class KernelIPImplicit(Kernel):
def __init__(self):
super().__init__()
def build_gram_matrix(self, X):
n = X.shape[0]
self.X_train = X
output = np.zeros((n, n))
for i in range(n):
for j in range(i, n):
value1, value2 = X.loc[i, X.columns[1]], X.loc[j, X.columns[1]]
output[i, j] = output[j, i] = self.K(value1, value2)
self.K_matrix = output
def test(self, x):
X = self.X_train
n = X.shape[0]
output = np.zeros(n)
for i in range(n):
output[i] = self.K(X.loc[i, X.columns[1]], x)
def K(self, item1, item2):
raise NotImplementedError("Method K not implemented")
class SumKernel:
def __init__(self):
self.train_phi = list()
self.K_matrix = None
self.test_phi = None
self.X_train = None
pass
def build_gram_matrix(self, X):
raise NotImplementedError("Method build_gram_matrix_sum not implemented.")
def build_gram_matrix_one(self, X, param):
raise NotImplementedError("Method build_gram_matrix not implemented.")
def test(self, x):
raise NotImplementedError("Method test not implemented.")
def save_kernel(self, path):
with open(path, "wb") as f:
pickle.dump(self, f)
@staticmethod
def load_kernel(path):
with open(path, "rb") as f:
kernel_class = pickle.load(f)
return kernel_class
class SumKernelIPExplicitError(BaseException):
pass
class SumKernelIPExplicit(SumKernel):
def __init__(self, lst_params):
super().__init__()
if not isinstance(lst_params, list):
raise SumKernelIPExplicitError("If you want to use only one param, you should use the individual param "
"class method.")
self.lst_params = lst_params
def build_gram_matrix(self, X):
n = X.shape[0]
output = np.zeros((n, n))
for params in self.lst_params:
intermediate_output, train_phi = self.build_gram_matrix_one(X, params)
self.train_phi.append(train_phi)
output += intermediate_output
self.K_matrix = output
def build_gram_matrix_one(self, X, params):
n = X.shape[0]
output = np.zeros((n, n))
train_phi = list()
for i in range(n):
item = X.loc[i, X.columns[1]]
train_phi.append(self.make_phi(item, True, params))
for i in range(n):
for j in range(i, n):
value = self.inner_product_phi(train_phi[i], train_phi[j])
output[i, j] = output[j, i] = value
return output, train_phi
def test(self, indice_x):
n = len(self.train_phi[0])
output = np.zeros(n)
for idx, params in enumerate(self.lst_params):
current_output = 0
for i in range(n):
current_output += self.inner_product_phi(self.train_phi[idx][i], self.test_phi[idx][indice_x])
return output
def make_test_phi(self, X):
n = X.shape[0]
self.test_phi = []
for params in self.lst_params:
current_test_phi = list()
for i in range(n):
item = X.loc[i, X.columns[1]]
current_test_phi.append(self.make_phi(item, train=False, params=params))
self.test_phi.append(current_test_phi)
return
def make_phi(self, item, train=True, params=None):
raise NotImplementedError("Method make_phi not implemented.")
def inner_product_phi(self, phi1, phi2):
raise NotImplementedError("Method inner_product_phi not implemented.")
| true | true |
f7371ec75a914d7e5a9387e66d158edac391f0f3 | 89 | py | Python | app/main.py | dimigor/musicQuest | 60580ee9b5d95dfe17bf4a9d0c8cb1dad2877605 | [
"MIT"
] | null | null | null | app/main.py | dimigor/musicQuest | 60580ee9b5d95dfe17bf4a9d0c8cb1dad2877605 | [
"MIT"
] | 7 | 2019-02-19T18:34:26.000Z | 2021-06-01T23:24:39.000Z | app/main.py | dimigor/musicQuest | 60580ee9b5d95dfe17bf4a9d0c8cb1dad2877605 | [
"MIT"
] | null | null | null | """Main module"""
def main():
"""Main app"""
if __name__ == '__main__':
main()
| 11.125 | 26 | 0.516854 |
def main():
if __name__ == '__main__':
main()
| true | true |
f7371eebe3802355cc2ccb6ed6ade7e8b307e78a | 7,641 | py | Python | data_voc.py | Nebula4869/real-time-object-detection-YOLOv4 | a7b692999210747fd49cec2c35f2b7d8d5b7eecc | [
"MIT"
] | 2 | 2020-06-19T07:15:45.000Z | 2020-10-25T15:31:55.000Z | data_voc.py | Nebula4869/TensorFlow_YOLOv4 | a7b692999210747fd49cec2c35f2b7d8d5b7eecc | [
"MIT"
] | null | null | null | data_voc.py | Nebula4869/TensorFlow_YOLOv4 | a7b692999210747fd49cec2c35f2b7d8d5b7eecc | [
"MIT"
] | 2 | 2020-08-13T09:20:36.000Z | 2020-10-10T11:51:11.000Z | import numpy as np
import random
import xml
import cv2
import os
def read_file(file_name):
"""
读取 file_name 文件全部内容
return:文件内容list
"""
if not os.path.isfile(file_name):
return None
result = []
with open(file_name, 'r') as f:
for line in f.readlines():
# 去掉换行符和空格
line = line.strip('\n').strip()
if len(line) == 0:
continue
result.append(line)
return result
def word2id(names_file):
"""
得到 名字 到 id 的转换字典
return {}
"""
id_dict = {}
contents = read_file(names_file)
for i in range(len(contents)):
id_dict[str(contents[i])] = i
return id_dict
def parse_voc_xml(file_name, names_dict):
"""
解析voc数据集的 xml 文件,每一个列表表示一个图片中的全部标签
return [ [id1, x1, y1, w1, h1], [id2, x2, y2, w2, h2], ... ]
"""
# print(file_name)
# print(names_dict)
result = []
if not os.path.isfile(file_name):
return None
doc = xml.dom.minidom.parse(file_name)
root = doc.documentElement
size = root.getElementsByTagName('size')[0]
width = int(size.getElementsByTagName('width')[0].childNodes[0].data)
height = int(size.getElementsByTagName('height')[0].childNodes[0].data)
objs = root.getElementsByTagName('object')
for obj in objs:
name = obj.getElementsByTagName('name')[0].childNodes[0].data
name_id = names_dict[name]
bndbox = obj.getElementsByTagName('bndbox')[0]
xmin = int(float(bndbox.getElementsByTagName('xmin')[0].childNodes[0].data))
ymin = int(float(bndbox.getElementsByTagName('ymin')[0].childNodes[0].data))
xmax = int(float(bndbox.getElementsByTagName('xmax')[0].childNodes[0].data))
ymax = int(float(bndbox.getElementsByTagName('ymax')[0].childNodes[0].data))
x = (xmax + xmin) / 2.0 / width
w = (xmax - xmin) / width
y = (ymax + ymin) / 2.0 / height
h = (ymax - ymin) / height
result.append([name_id, x, y, w, h])
return result
class Data:
def __init__(self, voc_root_dir, voc_dir_ls, voc_names, class_num, batch_size, anchors, multi_scale_img=True, width=608, height=608):
self.data_dirs = [os.path.join(os.path.join(voc_root_dir, voc_dir), "JPEGImages") for voc_dir in voc_dir_ls] # 数据文件路径
self.class_num = class_num # 分类数
self.batch_size = batch_size
self.anchors = np.asarray(anchors).astype(np.float32).reshape([-1, 2]) / [width, height] # [9,2]
print("anchors:\n", self.anchors)
self.multi_scale_img = multi_scale_img # 多尺度缩放图片
self.imgs_path = []
self.labels_path = []
self.num_batch = 0 # 多少个 batch 了
self.num_imgs = 0 # 一共多少张图片
self.width = width
self.height = height
self.names_dict = word2id(voc_names) # 名字到 id 的字典
# 初始化各项参数
self.__init_args()
# 初始化各项参数
def __init_args(self):
print("message:开始初始化路径")
# init imgs path
for voc_dir in self.data_dirs:
for img_name in os.listdir(voc_dir):
img_path = os.path.join(voc_dir, img_name)
label_path = img_path.replace("JPEGImages", "Annotations")
label_path = label_path.replace(img_name.split('.')[-1], "xml")
if not os.path.isfile(img_path):
print("warning:VOC 图片文件'"+str(img_path)+"'不存在")
continue
if not os.path.isfile(label_path):
print("warning:VOC 标签文件'"+str(label_path)+"'不存在")
continue
self.imgs_path.append(img_path)
self.labels_path.append(label_path)
self.num_imgs += 1
print("message:VOC 数据初始化完成,一共有 "+str(self.num_imgs)+" 张图片")
if self.num_imgs <= 0:
raise ValueError("没有可训练的图片, 程序退出")
return
# 读取图片
def read_img(self, img_file):
"""
读取 img_file, 并 resize
return:img, RGB & float
"""
if not os.path.exists(img_file):
return None
img = cv2.imread(img_file)
img = cv2.resize(img, (self.width, self.height))
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img = img.astype(np.float32)
img = img/255.0
return img
# 读取标签
def read_label(self, label_file, names_dict):
"""
读取 label_file, 并生成 label_y1, label_y2, label_y3
return:label_y1, label_y2, label_y3
"""
contents = parse_voc_xml(label_file, names_dict)
if not contents:
return None, None, None
label_y1 = np.zeros((self.height // 32, self.width // 32, 3, 5 + self.class_num), np.float32)
label_y2 = np.zeros((self.height // 16, self.width // 16, 3, 5 + self.class_num), np.float32)
label_y3 = np.zeros((self.height // 8, self.width // 8, 3, 5 + self.class_num), np.float32)
y_true = [label_y3, label_y2, label_y1]
ratio = {0: 8, 1: 16, 2: 32}
for label in contents:
label_id = int(label[0])
box = np.asarray(label[1: 5]).astype(np.float32) # label中保存的就是 x,y,w,h
best_giou = 0
best_index = 0
for i in range(len(self.anchors)):
min_wh = np.minimum(box[2:4], self.anchors[i])
max_wh = np.maximum(box[2:4], self.anchors[i])
giou = (min_wh[0] * min_wh[1]) / (max_wh[0] * max_wh[1])
if giou > best_giou:
best_giou = giou
best_index = i
# 012->0, 345->1, 678->2
x = int(np.floor(box[0] * self.width / ratio[best_index // 3]))
y = int(np.floor(box[1] * self.height / ratio[best_index // 3]))
k = best_index % 3
y_true[best_index // 3][y, x, k, 0:4] = box
y_true[best_index // 3][y, x, k, 4:5] = 1.0
y_true[best_index // 3][y, x, k, 5 + label_id] = 1.0
return label_y1, label_y2, label_y3
# 加载 batch_size 的数据
def __get_data(self):
"""
加载 batch_size 的标签和数据
return:imgs, label_y1, label_y2, label_y3
"""
# 十个 batch 随机一次 size
if self.multi_scale_img and (self.num_batch % 10 == 0):
random_size = random.randint(10, 19) * 32
self.width = self.height = random_size
imgs = []
labels_y1, labels_y2, labels_y3 = [], [], []
count = 0
while count < self.batch_size:
curr_index = random.randint(0, self.num_imgs - 1)
img_name = self.imgs_path[curr_index]
label_name = self.labels_path[curr_index]
img = self.read_img(img_name)
label_y1, label_y2, label_y3 = self.read_label(label_name, self.names_dict)
if img is None:
print("VOC 文件'" + img_name + "'读取异常")
continue
if label_y1 is None:
print("VOC 文件'" + label_name + "'读取异常")
continue
imgs.append(img)
labels_y1.append(label_y1)
labels_y2.append(label_y2)
labels_y3.append(label_y3)
count += 1
self.num_batch += 1
imgs = np.asarray(imgs)
labels_y1 = np.asarray(labels_y1)
labels_y2 = np.asarray(labels_y2)
labels_y3 = np.asarray(labels_y3)
return imgs, labels_y1, labels_y2, labels_y3
# 迭代器
def __next__(self):
"""
迭代获得一个 batch 的数据
"""
return self.__get_data()
| 33.366812 | 137 | 0.55477 | import numpy as np
import random
import xml
import cv2
import os
def read_file(file_name):
if not os.path.isfile(file_name):
return None
result = []
with open(file_name, 'r') as f:
for line in f.readlines():
line = line.strip('\n').strip()
if len(line) == 0:
continue
result.append(line)
return result
def word2id(names_file):
id_dict = {}
contents = read_file(names_file)
for i in range(len(contents)):
id_dict[str(contents[i])] = i
return id_dict
def parse_voc_xml(file_name, names_dict):
result = []
if not os.path.isfile(file_name):
return None
doc = xml.dom.minidom.parse(file_name)
root = doc.documentElement
size = root.getElementsByTagName('size')[0]
width = int(size.getElementsByTagName('width')[0].childNodes[0].data)
height = int(size.getElementsByTagName('height')[0].childNodes[0].data)
objs = root.getElementsByTagName('object')
for obj in objs:
name = obj.getElementsByTagName('name')[0].childNodes[0].data
name_id = names_dict[name]
bndbox = obj.getElementsByTagName('bndbox')[0]
xmin = int(float(bndbox.getElementsByTagName('xmin')[0].childNodes[0].data))
ymin = int(float(bndbox.getElementsByTagName('ymin')[0].childNodes[0].data))
xmax = int(float(bndbox.getElementsByTagName('xmax')[0].childNodes[0].data))
ymax = int(float(bndbox.getElementsByTagName('ymax')[0].childNodes[0].data))
x = (xmax + xmin) / 2.0 / width
w = (xmax - xmin) / width
y = (ymax + ymin) / 2.0 / height
h = (ymax - ymin) / height
result.append([name_id, x, y, w, h])
return result
class Data:
def __init__(self, voc_root_dir, voc_dir_ls, voc_names, class_num, batch_size, anchors, multi_scale_img=True, width=608, height=608):
self.data_dirs = [os.path.join(os.path.join(voc_root_dir, voc_dir), "JPEGImages") for voc_dir in voc_dir_ls]
self.class_num = class_num
self.batch_size = batch_size
self.anchors = np.asarray(anchors).astype(np.float32).reshape([-1, 2]) / [width, height]
print("anchors:\n", self.anchors)
self.multi_scale_img = multi_scale_img
self.imgs_path = []
self.labels_path = []
self.num_batch = 0
self.num_imgs = 0
self.width = width
self.height = height
self.names_dict = word2id(voc_names)
self.__init_args()
def __init_args(self):
print("message:开始初始化路径")
for voc_dir in self.data_dirs:
for img_name in os.listdir(voc_dir):
img_path = os.path.join(voc_dir, img_name)
label_path = img_path.replace("JPEGImages", "Annotations")
label_path = label_path.replace(img_name.split('.')[-1], "xml")
if not os.path.isfile(img_path):
print("warning:VOC 图片文件'"+str(img_path)+"'不存在")
continue
if not os.path.isfile(label_path):
print("warning:VOC 标签文件'"+str(label_path)+"'不存在")
continue
self.imgs_path.append(img_path)
self.labels_path.append(label_path)
self.num_imgs += 1
print("message:VOC 数据初始化完成,一共有 "+str(self.num_imgs)+" 张图片")
if self.num_imgs <= 0:
raise ValueError("没有可训练的图片, 程序退出")
return
def read_img(self, img_file):
if not os.path.exists(img_file):
return None
img = cv2.imread(img_file)
img = cv2.resize(img, (self.width, self.height))
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img = img.astype(np.float32)
img = img/255.0
return img
def read_label(self, label_file, names_dict):
contents = parse_voc_xml(label_file, names_dict)
if not contents:
return None, None, None
label_y1 = np.zeros((self.height // 32, self.width // 32, 3, 5 + self.class_num), np.float32)
label_y2 = np.zeros((self.height // 16, self.width // 16, 3, 5 + self.class_num), np.float32)
label_y3 = np.zeros((self.height // 8, self.width // 8, 3, 5 + self.class_num), np.float32)
y_true = [label_y3, label_y2, label_y1]
ratio = {0: 8, 1: 16, 2: 32}
for label in contents:
label_id = int(label[0])
box = np.asarray(label[1: 5]).astype(np.float32)
best_giou = 0
best_index = 0
for i in range(len(self.anchors)):
min_wh = np.minimum(box[2:4], self.anchors[i])
max_wh = np.maximum(box[2:4], self.anchors[i])
giou = (min_wh[0] * min_wh[1]) / (max_wh[0] * max_wh[1])
if giou > best_giou:
best_giou = giou
best_index = i
x = int(np.floor(box[0] * self.width / ratio[best_index // 3]))
y = int(np.floor(box[1] * self.height / ratio[best_index // 3]))
k = best_index % 3
y_true[best_index // 3][y, x, k, 0:4] = box
y_true[best_index // 3][y, x, k, 4:5] = 1.0
y_true[best_index // 3][y, x, k, 5 + label_id] = 1.0
return label_y1, label_y2, label_y3
def __get_data(self):
if self.multi_scale_img and (self.num_batch % 10 == 0):
random_size = random.randint(10, 19) * 32
self.width = self.height = random_size
imgs = []
labels_y1, labels_y2, labels_y3 = [], [], []
count = 0
while count < self.batch_size:
curr_index = random.randint(0, self.num_imgs - 1)
img_name = self.imgs_path[curr_index]
label_name = self.labels_path[curr_index]
img = self.read_img(img_name)
label_y1, label_y2, label_y3 = self.read_label(label_name, self.names_dict)
if img is None:
print("VOC 文件'" + img_name + "'读取异常")
continue
if label_y1 is None:
print("VOC 文件'" + label_name + "'读取异常")
continue
imgs.append(img)
labels_y1.append(label_y1)
labels_y2.append(label_y2)
labels_y3.append(label_y3)
count += 1
self.num_batch += 1
imgs = np.asarray(imgs)
labels_y1 = np.asarray(labels_y1)
labels_y2 = np.asarray(labels_y2)
labels_y3 = np.asarray(labels_y3)
return imgs, labels_y1, labels_y2, labels_y3
def __next__(self):
return self.__get_data()
| true | true |
f7371f829f82414f3de9e91c766e37ed961a216c | 9,605 | py | Python | pennylane/__init__.py | gtr8/pennylane | fd055108ac73f02e55a44d19a62891dcd7726d4a | [
"Apache-2.0"
] | null | null | null | pennylane/__init__.py | gtr8/pennylane | fd055108ac73f02e55a44d19a62891dcd7726d4a | [
"Apache-2.0"
] | null | null | null | pennylane/__init__.py | gtr8/pennylane | fd055108ac73f02e55a44d19a62891dcd7726d4a | [
"Apache-2.0"
] | null | null | null | # Copyright 2018-2021 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This is the top level module from which all basic functions and classes of
PennyLane can be directly imported.
"""
from importlib import reload
import numpy as _np
import pkg_resources
from semantic_version import Spec, Version
import pennylane.init
import pennylane.fourier
import pennylane.kernels
import pennylane.math
import pennylane.operation
import pennylane.qaoa as qaoa
import pennylane.qnn
import pennylane.templates
from pennylane._device import Device, DeviceError
from pennylane._grad import grad, jacobian, finite_diff
from pennylane._qubit_device import QubitDevice
from pennylane._version import __version__
from pennylane.about import about
from pennylane.circuit_graph import CircuitGraph
from pennylane.configuration import Configuration
from pennylane.io import *
from pennylane.measure import density_matrix, expval, probs, sample, state, var
from pennylane.ops import *
from pennylane.optimize import *
from pennylane.qnode import QNode, qnode
from pennylane.templates import broadcast, layer, template
from pennylane.transforms import (
adjoint,
draw,
ControlledOperation,
ctrl,
measurement_grouping,
metric_tensor,
specs,
qfunc_transform,
single_tape_transform,
quantum_monte_carlo,
)
from pennylane.utils import inv
from pennylane.vqe import ExpvalCost, Hamiltonian, VQECost
# QueuingContext and collections needs to be imported after all other pennylane imports
from .collections import QNodeCollection, apply, dot, map, sum
from .queuing import QueuingContext
import pennylane.grouping # pylint:disable=wrong-import-order
# Look for an existing configuration file
default_config = Configuration("config.toml")
class QuantumFunctionError(Exception):
"""Exception raised when an illegal operation is defined in a quantum function."""
def _get_device_entrypoints():
"""Returns a dictionary mapping the device short name to the
loadable entrypoint"""
return {entry.name: entry for entry in pkg_resources.iter_entry_points("pennylane.plugins")}
def refresh_devices():
"""Scan installed PennyLane plugins to refresh the device list."""
# This function does not return anything; instead, it has a side effect
# which is to update the global plugin_devices variable.
# We wish to retain the behaviour of a global plugin_devices dictionary,
# as re-importing pkg_resources can be a very slow operation on systems
# with a large number of installed packages.
global plugin_devices # pylint:disable=global-statement
reload(pkg_resources)
plugin_devices = _get_device_entrypoints()
# get list of installed devices
plugin_devices = _get_device_entrypoints()
# get chemistry plugin
class NestedAttrError:
"""This class mocks out the qchem module in case
it is not installed. Any attempt to print an instance
of this class, or to access an attribute of this class,
results in an import error, directing the user to the installation
instructions for PennyLane Qchem"""
error_msg = (
"PennyLane-QChem not installed. \n\nTo access the qchem "
"module, you can install PennyLane-QChem via pip:"
"\n\npip install pennylane-qchem"
"\n\nFor more details, see the quantum chemistry documentation:"
"\nhttps://pennylane.readthedocs.io/en/stable/introduction/chemistry.html"
)
def __str__(self):
raise ImportError(self.error_msg) from None
def __getattr__(self, name):
raise ImportError(self.error_msg) from None
__repr__ = __str__
qchem = NestedAttrError()
for entry in pkg_resources.iter_entry_points("pennylane.qchem"):
if entry.name == "OpenFermion":
qchem = entry.load()
def device(name, *args, **kwargs):
r"""device(name, wires=1, *args, **kwargs)
Load a :class:`~.Device` and return the instance.
This function is used to load a particular quantum device,
which can then be used to construct QNodes.
PennyLane comes with support for the following devices:
* :mod:`'default.qubit' <pennylane.devices.default_qubit>`: a simple
state simulator of qubit-based quantum circuit architectures.
* :mod:`'default.gaussian' <pennylane.devices.default_gaussian>`: a simple simulator
of Gaussian states and operations on continuous-variable circuit architectures.
* :mod:`'default.qubit.tf' <pennylane.devices.default_qubit_tf>`: a state simulator
of qubit-based quantum circuit architectures written in TensorFlow, which allows
automatic differentiation through the simulation.
* :mod:`'default.qubit.autograd' <pennylane.devices.default_qubit_autograd>`: a state simulator
of qubit-based quantum circuit architectures which allows
automatic differentiation through the simulation via python's autograd library.
Additional devices are supported through plugins — see
the `available plugins <https://pennylane.ai/plugins.html>`_ for more
details.
All devices must be loaded by specifying their **short-name** as listed above,
followed by the **wires** (subsystems) you wish to initialize. The *wires*
argument can be an integer, in which case the wires of the device are addressed
by consecutive integers:
.. code-block:: python
dev = qml.device('default.qubit', wires=5)
def circuit():
qml.Hadamard(wires=1)
qml.Hadamard(wires=[0])
qml.CNOT(wires=[3, 4])
...
The *wires* argument can also be a sequence of unique numbers or strings, specifying custom wire labels
that the user employs to address the wires:
.. code-block:: python
dev = qml.device('default.qubit', wires=['ancilla', 'q11', 'q12', -1, 1])
def circuit():
qml.Hadamard(wires='q11')
qml.Hadamard(wires=['ancilla'])
qml.CNOT(wires=['q12', -1] )
...
Most devices accept a ``shots`` argument which specifies how many circuit executions
are used to estimate stochastic return values. In particular, ``qml.sample()`` measurements
will return as many samples as specified in the shots argument. The shots argument can be
changed on a per-call basis using the built-in ``shots`` keyword argument.
.. code-block:: python
dev = qml.device('default.qubit', wires=1, shots=10)
@qml.qnode(dev)
def circuit(a):
qml.RX(a, wires=0)
return qml.sample(qml.PauliZ(wires=0))
>>> circuit(0.8) # 10 samples are returned
[ 1 1 1 -1 -1 1 1 1 1 1]
>>> circuit(0.8, shots=3)) # default is overwritten for this call
[1 1 1]
>>> circuit(0.8) # back to default of 10 samples
[ 1 1 1 -1 -1 1 1 1 1 1]
Some devices may accept additional arguments. For instance,
``default.gaussian`` accepts the keyword argument ``hbar``, to set
the convention used in the commutation relation :math:`[\x,\p]=i\hbar`
(by default set to 2).
Please refer to the documentation for the individual devices to see any
additional arguments that might be required or supported.
Args:
name (str): the name of the device to load
wires (int): the number of wires (subsystems) to initialise
the device with
Keyword Args:
config (pennylane.Configuration): a PennyLane configuration object
that contains global and/or device specific configurations.
"""
if name not in plugin_devices:
# Device does not exist in the loaded device list.
# Attempt to refresh the devices, in case the user
# installed the plugin during the current Python session.
refresh_devices()
if name in plugin_devices:
options = {}
# load global configuration settings if available
config = kwargs.get("config", default_config)
if config:
# combine configuration options with keyword arguments.
# Keyword arguments take preference, followed by device options,
# followed by plugin options, followed by global options.
options.update(config["main"])
options.update(config[name.split(".")[0] + ".global"])
options.update(config[name])
kwargs.pop("config", None)
options.update(kwargs)
# loads the device class
plugin_device_class = plugin_devices[name].load()
if Version(version()) not in Spec(plugin_device_class.pennylane_requires):
raise DeviceError(
"The {} plugin requires PennyLane versions {}, however PennyLane "
"version {} is installed.".format(
name, plugin_device_class.pennylane_requires, __version__
)
)
# load device
return plugin_device_class(*args, **options)
raise DeviceError("Device does not exist. Make sure the required plugin is installed.")
def version():
"""Returns the PennyLane version number."""
return __version__
| 36.109023 | 107 | 0.705778 |
from importlib import reload
import numpy as _np
import pkg_resources
from semantic_version import Spec, Version
import pennylane.init
import pennylane.fourier
import pennylane.kernels
import pennylane.math
import pennylane.operation
import pennylane.qaoa as qaoa
import pennylane.qnn
import pennylane.templates
from pennylane._device import Device, DeviceError
from pennylane._grad import grad, jacobian, finite_diff
from pennylane._qubit_device import QubitDevice
from pennylane._version import __version__
from pennylane.about import about
from pennylane.circuit_graph import CircuitGraph
from pennylane.configuration import Configuration
from pennylane.io import *
from pennylane.measure import density_matrix, expval, probs, sample, state, var
from pennylane.ops import *
from pennylane.optimize import *
from pennylane.qnode import QNode, qnode
from pennylane.templates import broadcast, layer, template
from pennylane.transforms import (
adjoint,
draw,
ControlledOperation,
ctrl,
measurement_grouping,
metric_tensor,
specs,
qfunc_transform,
single_tape_transform,
quantum_monte_carlo,
)
from pennylane.utils import inv
from pennylane.vqe import ExpvalCost, Hamiltonian, VQECost
from .collections import QNodeCollection, apply, dot, map, sum
from .queuing import QueuingContext
import pennylane.grouping
default_config = Configuration("config.toml")
class QuantumFunctionError(Exception):
def _get_device_entrypoints():
return {entry.name: entry for entry in pkg_resources.iter_entry_points("pennylane.plugins")}
def refresh_devices():
global plugin_devices
reload(pkg_resources)
plugin_devices = _get_device_entrypoints()
plugin_devices = _get_device_entrypoints()
class NestedAttrError:
error_msg = (
"PennyLane-QChem not installed. \n\nTo access the qchem "
"module, you can install PennyLane-QChem via pip:"
"\n\npip install pennylane-qchem"
"\n\nFor more details, see the quantum chemistry documentation:"
"\nhttps://pennylane.readthedocs.io/en/stable/introduction/chemistry.html"
)
def __str__(self):
raise ImportError(self.error_msg) from None
def __getattr__(self, name):
raise ImportError(self.error_msg) from None
__repr__ = __str__
qchem = NestedAttrError()
for entry in pkg_resources.iter_entry_points("pennylane.qchem"):
if entry.name == "OpenFermion":
qchem = entry.load()
def device(name, *args, **kwargs):
if name not in plugin_devices:
refresh_devices()
if name in plugin_devices:
options = {}
config = kwargs.get("config", default_config)
if config:
options.update(config["main"])
options.update(config[name.split(".")[0] + ".global"])
options.update(config[name])
kwargs.pop("config", None)
options.update(kwargs)
plugin_device_class = plugin_devices[name].load()
if Version(version()) not in Spec(plugin_device_class.pennylane_requires):
raise DeviceError(
"The {} plugin requires PennyLane versions {}, however PennyLane "
"version {} is installed.".format(
name, plugin_device_class.pennylane_requires, __version__
)
)
return plugin_device_class(*args, **options)
raise DeviceError("Device does not exist. Make sure the required plugin is installed.")
def version():
return __version__
| true | true |
f7372048e09ce1663e14828ff0ae6631ff232e70 | 7,984 | py | Python | mlrun/frameworks/onnx/model_handler.py | hayesgb/mlrun | 9a8b469b37d7d74f2d04dc956b2966f88fe4e890 | [
"Apache-2.0"
] | null | null | null | mlrun/frameworks/onnx/model_handler.py | hayesgb/mlrun | 9a8b469b37d7d74f2d04dc956b2966f88fe4e890 | [
"Apache-2.0"
] | null | null | null | mlrun/frameworks/onnx/model_handler.py | hayesgb/mlrun | 9a8b469b37d7d74f2d04dc956b2966f88fe4e890 | [
"Apache-2.0"
] | null | null | null | import os
from typing import Any, Dict, List, Union
import onnx
import onnxoptimizer
import mlrun
from mlrun.artifacts import Artifact
from mlrun.frameworks._common import ModelHandler
class ONNXModelHandler(ModelHandler):
"""
Class for handling an ONNX model, enabling loading and saving it during runs.
"""
def __init__(
self,
model_name: str,
model_path: str = None,
model: onnx.ModelProto = None,
context: mlrun.MLClientCtx = None,
):
"""
Initialize the handler. The model can be set here so it won't require loading. Notice that if the model path
given is of a previously logged model (store model object path), all of the other configurations will be loaded
automatically as they were logged with the model, hence they are optional.
:param model_name: The model name for saving and logging the model.
:param model_path: Path to the model's directory to load it from. The onnx file must start with the given model
name and the directory must contain the onnx file. The model path can be also passed as a
model object path in the following format:
'store://models/<PROJECT_NAME>/<MODEL_NAME>:<VERSION>'.
:param model: Model to handle or None in case a loading parameters were supplied.
:param context: MLRun context to work with for logging the model.
:raise MLRunInvalidArgumentError: There was no model or model directory supplied.
"""
# Setup the base handler class:
super(ONNXModelHandler, self).__init__(
model_name=model_name, model_path=model_path, model=model, context=context,
)
# TODO: output_path won't work well with logging artifacts. Need to look into changing the logic of 'log_artifact'.
def save(
self, output_path: str = None, *args, **kwargs
) -> Union[Dict[str, Artifact], None]:
"""
Save the handled model at the given output path. If a MLRun context is available, the saved model files will be
logged and returned as artifacts.
:param output_path: The full path to the directory to save the handled model at. If not given, the context
stored will be used to save the model in the defaulted artifacts location.
:return The saved model artifacts dictionary if context is available and None otherwise.
"""
super(ONNXModelHandler, self).save(output_path=output_path)
# Setup the returning model artifacts list:
artifacts = {} # type: Dict[str, Artifact]
model_file = None # type: str
# Set the output path:
if output_path is None:
output_path = os.path.join(self._context.artifact_path, self._model_name)
# Save the model:
model_file = "{}.onnx".format(self._model_name)
onnx.save(self._model, model_file)
# Update the paths and log artifacts if context is available:
self._model_file = model_file
if self._context is not None:
artifacts[
self._get_model_file_artifact_name()
] = self._context.log_artifact(
model_file,
local_path=model_file,
artifact_path=output_path,
db_key=False,
)
return artifacts if self._context is not None else None
def load(self, *args, **kwargs):
"""
Load the specified model in this handler.
"""
super(ONNXModelHandler, self).load()
# Check that the model is well formed:
onnx.checker.check_model(self._model_file)
# Load the ONNX model:
self._model = onnx.load(self._model_file)
def log(
self,
labels: Dict[str, Union[str, int, float]] = None,
parameters: Dict[str, Union[str, int, float]] = None,
extra_data: Dict[str, Any] = None,
artifacts: Dict[str, Artifact] = None,
):
"""
Log the model held by this handler into the MLRun context provided.
:param labels: Labels to log the model with.
:param parameters: Parameters to log with the model.
:param extra_data: Extra data to log with the model.
:param artifacts: Artifacts to log the model with. Will be added to the extra data.
:raise MLRunInvalidArgumentError: In case a context is missing or there is no model in this handler.
"""
super(ONNXModelHandler, self).log(
labels=labels,
parameters=parameters,
extra_data=extra_data,
artifacts=artifacts,
)
# Set default values:
labels = {} if labels is None else labels
parameters = {} if parameters is None else parameters
extra_data = {} if extra_data is None else extra_data
artifacts = {} if artifacts is None else artifacts
# Save the model:
model_artifacts = self.save()
# Log the model:
self._context.log_model(
self._model_name,
db_key=self._model_name,
model_file=self._model_file,
framework="onnx",
labels=labels,
parameters=parameters,
metrics=self._context.results,
extra_data={**model_artifacts, **artifacts, **extra_data},
)
def optimize(self, optimizations: List[str] = None, fixed_point: bool = False):
"""
Use ONNX optimizer to optimize the ONNX model. The optimizations supported can be seen by calling
'onnxoptimizer.get_available_passes()'
:param optimizations: List of possible optimizations. If None, all of the optimizations will be used. Defaulted
to None.
:param fixed_point: Optimize the weights using fixed point. Defaulted to False.
"""
# Set the ONNX optimizations list:
onnx_optimizations = onnxoptimizer.get_fuse_and_elimination_passes()
if optimizations is None:
# Set to all optimizations:
optimizations = onnx_optimizations
# Optimize the model:
self._model = onnxoptimizer.optimize(
self._model, passes=optimizations, fixed_point=fixed_point
)
def to_onnx(self, *args, **kwargs) -> onnx.ModelProto:
"""
Convert the model in this handler to an ONNX model. In this case the handled ONNX model will simply be returned.
:return: The current handled ONNX model as there is nothing to convert.
"""
return self._model
def _collect_files_from_store_object(self):
"""
If the model path given is of a store object, collect the needed model files into this handler for later loading
the model.
"""
# Get the artifact and model file along with its extra data:
(
self._model_file,
self._model_artifact,
self._extra_data,
) = mlrun.artifacts.get_model(self._model_path)
# Get the model file:
if self._model_file.endswith(".pkl"):
self._model_file = self._extra_data[
self._get_model_file_artifact_name()
].local()
def _collect_files_from_local_path(self):
"""
If the model path given is of a local path, search for the needed model files and collect them into this handler
for later loading the model.
:raise MLRunNotFoundError: If the onnx file was not found.
"""
self._model_file = os.path.join(
self._model_path, "{}.onnx".format(self._model_name)
)
if not os.path.exists(self._model_file):
raise mlrun.errors.MLRunNotFoundError(
"The model file '{}.onnx' was not found within the given 'model_path': "
"'{}'".format(self._model_name, self._model_path)
)
| 39.330049 | 120 | 0.626628 | import os
from typing import Any, Dict, List, Union
import onnx
import onnxoptimizer
import mlrun
from mlrun.artifacts import Artifact
from mlrun.frameworks._common import ModelHandler
class ONNXModelHandler(ModelHandler):
def __init__(
self,
model_name: str,
model_path: str = None,
model: onnx.ModelProto = None,
context: mlrun.MLClientCtx = None,
):
super(ONNXModelHandler, self).__init__(
model_name=model_name, model_path=model_path, model=model, context=context,
)
def save(
self, output_path: str = None, *args, **kwargs
) -> Union[Dict[str, Artifact], None]:
super(ONNXModelHandler, self).save(output_path=output_path)
# Setup the returning model artifacts list:
artifacts = {} # type: Dict[str, Artifact]
model_file = None # type: str
# Set the output path:
if output_path is None:
output_path = os.path.join(self._context.artifact_path, self._model_name)
# Save the model:
model_file = "{}.onnx".format(self._model_name)
onnx.save(self._model, model_file)
# Update the paths and log artifacts if context is available:
self._model_file = model_file
if self._context is not None:
artifacts[
self._get_model_file_artifact_name()
] = self._context.log_artifact(
model_file,
local_path=model_file,
artifact_path=output_path,
db_key=False,
)
return artifacts if self._context is not None else None
def load(self, *args, **kwargs):
super(ONNXModelHandler, self).load()
# Check that the model is well formed:
onnx.checker.check_model(self._model_file)
# Load the ONNX model:
self._model = onnx.load(self._model_file)
def log(
self,
labels: Dict[str, Union[str, int, float]] = None,
parameters: Dict[str, Union[str, int, float]] = None,
extra_data: Dict[str, Any] = None,
artifacts: Dict[str, Artifact] = None,
):
super(ONNXModelHandler, self).log(
labels=labels,
parameters=parameters,
extra_data=extra_data,
artifacts=artifacts,
)
# Set default values:
labels = {} if labels is None else labels
parameters = {} if parameters is None else parameters
extra_data = {} if extra_data is None else extra_data
artifacts = {} if artifacts is None else artifacts
# Save the model:
model_artifacts = self.save()
# Log the model:
self._context.log_model(
self._model_name,
db_key=self._model_name,
model_file=self._model_file,
framework="onnx",
labels=labels,
parameters=parameters,
metrics=self._context.results,
extra_data={**model_artifacts, **artifacts, **extra_data},
)
def optimize(self, optimizations: List[str] = None, fixed_point: bool = False):
# Set the ONNX optimizations list:
onnx_optimizations = onnxoptimizer.get_fuse_and_elimination_passes()
if optimizations is None:
# Set to all optimizations:
optimizations = onnx_optimizations
# Optimize the model:
self._model = onnxoptimizer.optimize(
self._model, passes=optimizations, fixed_point=fixed_point
)
def to_onnx(self, *args, **kwargs) -> onnx.ModelProto:
return self._model
def _collect_files_from_store_object(self):
# Get the artifact and model file along with its extra data:
(
self._model_file,
self._model_artifact,
self._extra_data,
) = mlrun.artifacts.get_model(self._model_path)
# Get the model file:
if self._model_file.endswith(".pkl"):
self._model_file = self._extra_data[
self._get_model_file_artifact_name()
].local()
def _collect_files_from_local_path(self):
self._model_file = os.path.join(
self._model_path, "{}.onnx".format(self._model_name)
)
if not os.path.exists(self._model_file):
raise mlrun.errors.MLRunNotFoundError(
"The model file '{}.onnx' was not found within the given 'model_path': "
"'{}'".format(self._model_name, self._model_path)
)
| true | true |
f73720a8ba08ff93f66d33a63fc95d8dff2faeef | 2,623 | py | Python | jsonHelpers.py | Multifactored/6447FuzzerProject | f94094223813606121dc71b4d3b2c477e8b3a1bf | [
"MIT"
] | 1 | 2020-12-17T10:06:14.000Z | 2020-12-17T10:06:14.000Z | jsonHelpers.py | Multifactored/6447FuzzerProject | f94094223813606121dc71b4d3b2c477e8b3a1bf | [
"MIT"
] | null | null | null | jsonHelpers.py | Multifactored/6447FuzzerProject | f94094223813606121dc71b4d3b2c477e8b3a1bf | [
"MIT"
] | 1 | 2021-01-13T11:42:21.000Z | 2021-01-13T11:42:21.000Z | # pylint: disable=W0614
import sys
import json
import random
from helper import *
from itertools import combinations
import copy
def brokenJson(jsonInput: dict, key_set: list, maxPower: int):
''' similar to generateStr() except this has a higher chance of having the characters {}" '''
choices = r'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890::::::{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"'
maxPower = 2 ** maxPower
output = "{" + "\"" + generateStr(0) + "\": "+ ''.join(random.choice(choices) for i in range(maxPower)) + "}"
return output
def jsonInJson(jsonInput: dict, key_set: list):
''' Uses sample JSON input and creates a huge JSON with itself as the value with the original keys '''
output = {}
jsonStr = json.dumps(jsonInput)
for key in jsonInput.keys():
output[key] = jsonStr * 20
return output
def jsonRandomTyped(jsonInput: dict, key_set: list):
''' Mutates values of each combination in the input to random values according to input value types '''
output = []
for i in range(6):
for subset in key_set:
mutatedJson = copy.deepcopy(jsonInput)
for key in subset:
# find value type and generate random value according to that
val = jsonInput[key]
val = valGenerateTyped(val, i + 2)
mutatedJson[key] = val
output.append(mutatedJson)
return output
def fuzzJSON(sampleInputFile, binary, lock):
print("Fuzzing the JSON formatted sample input...\n", end="")
key_set = []
sampleInputFile.seek(0)
jsonInput = sampleInputFile.read()
jsonInput = json.loads(jsonInput)
choices = list(jsonInput.keys())
for i in range(1, len(choices) + 1):
for combs in combinations(choices, i):
key_set.append(combs)
for i in range(10):
for j in range(10):
if sendInputAndCheck(binary, brokenJson(jsonInput, key_set, j), lock):
return True, "Found vulnerability in JSON!"
for i in range(10):
badJson = generateWeirdJson(i).replace('\\"',"\"")
if sendInputAndCheck(binary, badJson, lock):
return True, "Found vulnerability in JSON!"
if sendInputAndCheck(binary, json.dumps(jsonInJson(jsonInput, key_set)), lock):
return True, "Found vulnerability in JSON!"
mutations = jsonRandomTyped(jsonInput, key_set)
for i in mutations:
if sendInputAndCheck(binary, json.dumps(i), lock):
return True, "Found vulnerability in JSON!"
return False
| 32.382716 | 157 | 0.627144 |
import sys
import json
import random
from helper import *
from itertools import combinations
import copy
def brokenJson(jsonInput: dict, key_set: list, maxPower: int):
choices = r'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890::::::{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"{}"'
maxPower = 2 ** maxPower
output = "{" + "\"" + generateStr(0) + "\": "+ ''.join(random.choice(choices) for i in range(maxPower)) + "}"
return output
def jsonInJson(jsonInput: dict, key_set: list):
output = {}
jsonStr = json.dumps(jsonInput)
for key in jsonInput.keys():
output[key] = jsonStr * 20
return output
def jsonRandomTyped(jsonInput: dict, key_set: list):
output = []
for i in range(6):
for subset in key_set:
mutatedJson = copy.deepcopy(jsonInput)
for key in subset:
val = jsonInput[key]
val = valGenerateTyped(val, i + 2)
mutatedJson[key] = val
output.append(mutatedJson)
return output
def fuzzJSON(sampleInputFile, binary, lock):
print("Fuzzing the JSON formatted sample input...\n", end="")
key_set = []
sampleInputFile.seek(0)
jsonInput = sampleInputFile.read()
jsonInput = json.loads(jsonInput)
choices = list(jsonInput.keys())
for i in range(1, len(choices) + 1):
for combs in combinations(choices, i):
key_set.append(combs)
for i in range(10):
for j in range(10):
if sendInputAndCheck(binary, brokenJson(jsonInput, key_set, j), lock):
return True, "Found vulnerability in JSON!"
for i in range(10):
badJson = generateWeirdJson(i).replace('\\"',"\"")
if sendInputAndCheck(binary, badJson, lock):
return True, "Found vulnerability in JSON!"
if sendInputAndCheck(binary, json.dumps(jsonInJson(jsonInput, key_set)), lock):
return True, "Found vulnerability in JSON!"
mutations = jsonRandomTyped(jsonInput, key_set)
for i in mutations:
if sendInputAndCheck(binary, json.dumps(i), lock):
return True, "Found vulnerability in JSON!"
return False
| true | true |
f7372209f7ffe645807e944b391334e5a5b29271 | 222 | py | Python | csf_tz/fleet_management/doctype/vehicle_checklist/test_vehicle_checklist.py | Craftint/CSF_TZ | b5cb2d59d8f4e958ad7d4cb89421cfbec992abc5 | [
"MIT"
] | 4 | 2021-09-24T12:30:32.000Z | 2022-03-19T14:55:34.000Z | csf_tz/fleet_management/doctype/vehicle_checklist/test_vehicle_checklist.py | Craftint/CSF_TZ | b5cb2d59d8f4e958ad7d4cb89421cfbec992abc5 | [
"MIT"
] | null | null | null | csf_tz/fleet_management/doctype/vehicle_checklist/test_vehicle_checklist.py | Craftint/CSF_TZ | b5cb2d59d8f4e958ad7d4cb89421cfbec992abc5 | [
"MIT"
] | 7 | 2021-09-24T12:30:33.000Z | 2022-03-21T11:34:02.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2019, Bravo Logistics and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestVehicleChecklist(unittest.TestCase):
pass
| 20.181818 | 54 | 0.779279 |
from __future__ import unicode_literals
import frappe
import unittest
class TestVehicleChecklist(unittest.TestCase):
pass
| true | true |
f7372291bf9ae614b10a1fd0a65f34965b237e12 | 5,996 | py | Python | baleen/utils/logger.py | ByteCubed/baleen | bb2ae323a3ab3a066a4a289401847e1251abc55d | [
"MIT"
] | 79 | 2016-05-31T00:33:07.000Z | 2022-01-17T07:33:41.000Z | baleen/utils/logger.py | ByteCubed/baleen | bb2ae323a3ab3a066a4a289401847e1251abc55d | [
"MIT"
] | 73 | 2016-02-18T19:52:42.000Z | 2016-10-08T21:48:55.000Z | baleen/utils/logger.py | ByteCubed/baleen | bb2ae323a3ab3a066a4a289401847e1251abc55d | [
"MIT"
] | 24 | 2015-11-18T11:38:30.000Z | 2017-05-19T14:07:18.000Z | # baleen.utils.logger
# Logging utility for Baleen
#
# Author: Benjamin Bengfort <benjamin@bengfort.com>
# Created: Mon Sep 22 15:47:34 2014 -0400
#
# Copyright (C) 2014 Bengfort.com
# For license information, see LICENSE.txt
#
# ID: logger.py [caaaaca] benjamin@bengfort.com $
"""
Logging utility for Baleen
"""
##########################################################################
## Imports
##########################################################################
import logging
import getpass
import warnings
import logging.config
from baleen.config import settings
from baleen.utils.timez import COMMON_DATETIME
##########################################################################
## Logging configuration
##########################################################################
configuration = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(name)s %(levelname)s [%(asctime)s] -- %(message)s',
'datefmt': COMMON_DATETIME,
}
},
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'logging.NullHandler',
},
'console': {
'level': 'WARNING',
'class': 'logging.StreamHandler',
'formatter': 'simple',
},
'logfile': {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': settings.logfile,
'maxBytes': '536870912', # 512 MB
'formatter': 'simple',
},
'mongolog': {
'level': 'INFO',
'class': 'baleen.utils.mongolog.MongoHandler',
}
},
'loggers': {
'baleen': {
'level': settings.loglevel,
'handlers': ['logfile'],
'propagagte': True,
},
'baleen.ingest': {
'level': 'INFO',
'handlers': ['logfile', 'mongolog'],
'propagate': False,
}
},
}
logging.config.dictConfigClass(configuration).configure()
if not settings.debug: logging.captureWarnings(True)
##########################################################################
## Logger utility
##########################################################################
class WrappedLogger(object):
"""
Wraps the Python logging module's logger object to ensure that all baleen
logging happens with the correct configuration as well as any extra
information that might be required by the log file (for example, the user
on the machine, hostname, IP address lookup, etc).
Subclasses must specify their logger as a class variable so all instances
have access to the same logging object.
"""
logger = None
def __init__(self, **kwargs):
self.raise_warnings = kwargs.pop('raise_warnings', settings.debug)
self.logger = kwargs.pop('logger', self.logger)
if not self.logger or not hasattr(self.logger, 'log'):
raise TypeError(
"Subclasses must specify a logger, not {}"
.format(type(self.logger))
)
self.extras = kwargs
def log(self, level, message, *args, **kwargs):
"""
This is the primary method to override to ensure logging with extra
options gets correctly specified.
"""
extra = self.extras.copy()
extra.update(kwargs.pop('extra', {}))
kwargs['extra'] = extra
self.logger.log(level, message, *args, **kwargs)
def debug(self, message, *args, **kwargs):
return self.log(logging.DEBUG, message, *args, **kwargs)
def info(self, message, *args, **kwargs):
return self.log(logging.INFO, message, *args, **kwargs)
def warning(self, message, *args, **kwargs):
"""
Specialized warnings system. If a warning subclass is passed into
the keyword arguments and raise_warnings is True - the warnning will
be passed to the warnings module.
"""
warncls = kwargs.pop('warning', None)
if warncls and self.raise_warnings:
warnings.warn(message, warncls)
return self.log(logging.WARNING, message, *args, **kwargs)
# Alias warn to warning
warn = warning
def error(self, message, *args, **kwargs):
return self.log(logging.ERROR, message, *args, **kwargs)
def critical(self, message, *args, **kwargs):
return self.log(logging.CRITICAL, message, *args, **kwargs)
##########################################################################
## The Ingestion Logger Class
##########################################################################
class IngestLogger(WrappedLogger):
"""
Performs logging for the baleen process with the log options above.
"""
logger = logging.getLogger('baleen.ingest')
def __init__(self, **kwargs):
self._user = kwargs.pop('user', None)
super(IngestLogger, self).__init__(**kwargs)
@property
def user(self):
if not self._user:
self._user = getpass.getuser()
return self._user
def log(self, level, message, *args, **kwargs):
"""
Provide current user as extra context to the logger
"""
extra = kwargs.pop('extra', {})
extra.update({
'user': self.user
})
kwargs['extra'] = extra
super(IngestLogger, self).log(level, message, *args, **kwargs)
##########################################################################
## Logging Mixin
##########################################################################
class LoggingMixin(object):
"""
Mix in to classes that need their own logging object!
"""
@property
def logger(self):
"""
Instantiates and returns a IngestLogger instance
"""
if not hasattr(self, '_logger') or not self._logger:
self._logger = IngestLogger()
return self._logger
| 29.24878 | 77 | 0.518012 | true | true | |
f73722ddcc2a5d42d935ea244ae58c6bc70ef916 | 3,302 | py | Python | fuzzers/005-tilegrid/cfg_int/top.py | rw1nkler/prjxray | aff076b47dcf6d653eb3ce791b41fd6cf4343edd | [
"ISC"
] | 583 | 2017-12-21T11:06:13.000Z | 2022-02-20T21:27:33.000Z | fuzzers/005-tilegrid/cfg_int/top.py | rw1nkler/prjxray | aff076b47dcf6d653eb3ce791b41fd6cf4343edd | [
"ISC"
] | 1,212 | 2017-12-22T15:05:06.000Z | 2022-02-19T13:04:59.000Z | fuzzers/005-tilegrid/cfg_int/top.py | mfkiwl/prjxray-xilinx-7-bitstream-fortmat | 5349556bc2c230801d6df0cf11bccb9cfd171639 | [
"ISC"
] | 134 | 2017-12-21T10:16:50.000Z | 2022-02-16T06:42:04.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017-2020 The Project X-Ray Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
import os
import random
random.seed(int(os.getenv("SEED"), 16))
from prjxray import util
from prjxray.db import Database
def gen_sites():
db = Database(util.get_db_root(), util.get_part())
grid = db.grid()
for tile_name in sorted(grid.tiles()):
loc = grid.loc_of_tilename(tile_name)
gridinfo = grid.gridinfo_at_loc(loc)
if gridinfo.tile_type != 'CFG_CENTER_MID':
continue
sites = {}
for site_name, site_type in gridinfo.sites.items():
if site_type not in sites:
sites[site_type] = []
sites[site_type].append(site_name)
for site_type in sites:
sites[site_type].sort()
int_grid_x = loc.grid_x + 3
int_tile_type = 'INT_L'
int_tile_locs = []
for dy in range(-9, 12):
# Skip the VBREAK tile.
if dy != 6:
int_tile_locs.append((int_grid_x, loc.grid_y + dy), )
int_tiles = []
for int_tile_loc in int_tile_locs:
int_gridinfo = grid.gridinfo_at_loc(int_tile_loc)
assert int_gridinfo.tile_type == int_tile_type, (
int_gridinfo.tile_type, int_tile_type)
int_tiles.append(grid.tilename_at_loc(int_tile_loc))
yield tile_name, sites, int_tiles
def write_params(params):
pinstr = 'tile,val\n'
for tile, (val) in sorted(params.items()):
pinstr += '%s,%s\n' % (tile, val)
open('params.csv', 'w').write(pinstr)
def run():
print('''
module top();
''')
sites = list(gen_sites())
# Only on CFG_CENTER_MID expected.
assert len(sites) == 1
tile_name, sites, int_tiles = sites[0]
assert len(sites['ICAP']) == 2, len(sites['ICAP'])
# int_tiles[6]:
# IMUX43 -> ICAP1_I31 = 0
# IMUX42 -> ICAP1_I30 = toggle 0/1
# int_tiles[7]:
# IMUX43 -> ICAP1_I15 = 0
# IMUX42 -> ICAP1_I14 = toggle 0/1
# int_tiles[8]:
# IMUX43 -> ICAP1_CSIB = 0
# IMUX42 -> ICAP1_RDWRB = toggle 0/1
ICAP1_I30 = random.randint(0, 1)
ICAP1_I14 = random.randint(0, 1)
ICAP1_RDWRB = random.randint(0, 1)
params = {}
params[int_tiles[6]] = ICAP1_I30
params[int_tiles[7]] = ICAP1_I14
params[int_tiles[8]] = ICAP1_RDWRB
print(
"""
wire [31:0] icap_i_{site};
wire icap_rdwrd_{site};
wire icap_csib_{site};
assign icap_i_{site}[31] = 0;
assign icap_i_{site}[30] = {ICAP1_I30};
assign icap_i_{site}[15] = 0;
assign icap_i_{site}[14] = {ICAP1_I14};
assign icap_csib_{site} = 0;
assign icap_rdwrb_{site} = {ICAP1_RDWRB};
(* KEEP, DONT_TOUCH, LOC = "{site}" *)
ICAPE2 icap_{site} (
.I(icap_i_{site}),
.RDWRB(icap_rdwrb_{site}),
.CSIB(icap_csib_{site})
);
""".format(
site=sites['ICAP'][1],
ICAP1_I30=ICAP1_I30,
ICAP1_I14=ICAP1_I14,
ICAP1_RDWRB=ICAP1_RDWRB))
print("endmodule")
write_params(params)
if __name__ == '__main__':
run()
| 25.596899 | 69 | 0.596002 |
import os
import random
random.seed(int(os.getenv("SEED"), 16))
from prjxray import util
from prjxray.db import Database
def gen_sites():
db = Database(util.get_db_root(), util.get_part())
grid = db.grid()
for tile_name in sorted(grid.tiles()):
loc = grid.loc_of_tilename(tile_name)
gridinfo = grid.gridinfo_at_loc(loc)
if gridinfo.tile_type != 'CFG_CENTER_MID':
continue
sites = {}
for site_name, site_type in gridinfo.sites.items():
if site_type not in sites:
sites[site_type] = []
sites[site_type].append(site_name)
for site_type in sites:
sites[site_type].sort()
int_grid_x = loc.grid_x + 3
int_tile_type = 'INT_L'
int_tile_locs = []
for dy in range(-9, 12):
if dy != 6:
int_tile_locs.append((int_grid_x, loc.grid_y + dy), )
int_tiles = []
for int_tile_loc in int_tile_locs:
int_gridinfo = grid.gridinfo_at_loc(int_tile_loc)
assert int_gridinfo.tile_type == int_tile_type, (
int_gridinfo.tile_type, int_tile_type)
int_tiles.append(grid.tilename_at_loc(int_tile_loc))
yield tile_name, sites, int_tiles
def write_params(params):
pinstr = 'tile,val\n'
for tile, (val) in sorted(params.items()):
pinstr += '%s,%s\n' % (tile, val)
open('params.csv', 'w').write(pinstr)
def run():
print('''
module top();
''')
sites = list(gen_sites())
assert len(sites) == 1
tile_name, sites, int_tiles = sites[0]
assert len(sites['ICAP']) == 2, len(sites['ICAP'])
ICAP1_I30 = random.randint(0, 1)
ICAP1_I14 = random.randint(0, 1)
ICAP1_RDWRB = random.randint(0, 1)
params = {}
params[int_tiles[6]] = ICAP1_I30
params[int_tiles[7]] = ICAP1_I14
params[int_tiles[8]] = ICAP1_RDWRB
print(
"""
wire [31:0] icap_i_{site};
wire icap_rdwrd_{site};
wire icap_csib_{site};
assign icap_i_{site}[31] = 0;
assign icap_i_{site}[30] = {ICAP1_I30};
assign icap_i_{site}[15] = 0;
assign icap_i_{site}[14] = {ICAP1_I14};
assign icap_csib_{site} = 0;
assign icap_rdwrb_{site} = {ICAP1_RDWRB};
(* KEEP, DONT_TOUCH, LOC = "{site}" *)
ICAPE2 icap_{site} (
.I(icap_i_{site}),
.RDWRB(icap_rdwrb_{site}),
.CSIB(icap_csib_{site})
);
""".format(
site=sites['ICAP'][1],
ICAP1_I30=ICAP1_I30,
ICAP1_I14=ICAP1_I14,
ICAP1_RDWRB=ICAP1_RDWRB))
print("endmodule")
write_params(params)
if __name__ == '__main__':
run()
| true | true |
f7372330a1b687246159fe0e9fb2a671ee65fb4d | 5,350 | py | Python | anthemtool/io/writer.py | xyrin88/anthemtool | a2fec1ac0b596765c9d1f620d9167353a1b39582 | [
"MIT"
] | 14 | 2019-04-01T03:30:33.000Z | 2022-01-14T22:17:11.000Z | anthemtool/io/writer.py | NicknineTheEagle/anthemtool | a2fec1ac0b596765c9d1f620d9167353a1b39582 | [
"MIT"
] | 6 | 2019-04-14T07:01:09.000Z | 2021-11-25T18:58:40.000Z | anthemtool/io/writer.py | NicknineTheEagle/anthemtool | a2fec1ac0b596765c9d1f620d9167353a1b39582 | [
"MIT"
] | 4 | 2019-09-13T20:03:18.000Z | 2021-07-21T11:02:06.000Z | import logging
from struct import unpack
from typing import Optional, Dict
from anthemtool.cas.cas import Cas
from anthemtool.io.providers.base import Decompressor
from anthemtool.util import PathUtil
LOG = logging.getLogger(__name__)
class CasWriter:
"""
Writer for CAS file entries.
"""
DECOMPRESSION_LOOKUP = {
0x70: 'null',
0x71: 'null',
0x1170: 'oodle',
}
def __init__(self, decompressors: Dict[str, Decompressor]) -> None:
"""
Initialize instance.
"""
self.decompressors = decompressors
def write(self, cas: Cas, offset: int, path: str, compressed_file_size: int,
file_size: Optional[int] = None) -> None:
"""
Write the given entry to the output path.
"""
# LOG.debug(
# "Writing cas=%s offset=0x%x size=0x%x outsize=0x%x to %s",
# cas, offset, compressed_file_size, file_size or 0x0, path
# )
PathUtil.ensure_base_path_exists(path)
result_size = 0
payload_size = 0
# Get CAS file handle
handle = cas.handle
handle.seek(offset)
# Open output file for writing
with open(path, "wb") as dst:
# Read until we reached the given compressed size
while payload_size < compressed_file_size:
size = unpack(">I", handle.read(4))[0]
magic = unpack(">H", handle.read(2))[0]
compressed_size = unpack(">H", handle.read(2))[0]
# LOG.debug(
# "Writing part size=0x%x outsize=0x%x magic=0x%x",
# size, compressed_size, magic
# )
# Determine how to read based on the magic
if magic == 0x1170:
# Oodle compression, read the compressed size
payload = handle.read(compressed_size)
elif magic in (0x70, 0x71):
# We are not sure about these, but they appear to be parts that
# reside in the CAS file uncompressed.
# Size and compressed size seems to be the same for parts with magic 0x70
if magic == 0x70 and size != compressed_size:
raise Exception("Expected size=0x{:x} and outsize=0x{:x} to match".format(
size, compressed_size
))
# For magic 0x71, compressed size seems to be always zero
if magic == 0x71 and compressed_size != 0x00:
raise Exception(
"Expected outsize=0x{:x} to be zero".format(compressed_size)
)
# Read uncompressed size
payload = handle.read(size)
else:
# Other compression algorithms are not supported
raise Exception(
"Unsupported compression magic=0x{:x} size=0x{:x} outsize=0x{:x} (path={:s}"
" cas={} offset=0x{:x}, size=0x{:x}, outsize=0x{:x})".format(
magic, compressed_size, size, path, cas,
offset, compressed_file_size, file_size or 0x0
)
)
# Invoke the appropriate decompressor
decompressor = self.get_decompressor(magic)
data = decompressor.decompress(payload, compressed_size, size)
# Increment counters to keep track of progress
result_size += len(data)
payload_size += len(payload) + 8
# Write the result to the disk
dst.write(data)
# LOG.debug(
# "File part written payload_size=0x%x total_payload_size=0x%x "
# "data_size=0x%x total_data_size=0x%x",
# len(payload), payload_size, len(data), result_size
# )
# LOG.debug(
# "Finished decompression payload_size=0x%x data_size=0x%x", payload_size, result_size
# )
# Make sure we read the exact amount of compressed bytes
if payload_size != compressed_file_size:
raise Exception(
"Decompression failed, size requested=0x{:x} but got=0x{:x}".format(
compressed_file_size, payload_size
)
)
# If we have a file_size, make sure it matches the length of the result
if file_size is not None and result_size != file_size:
raise Exception(
"Decompression failed, outsize requested=0x{:x} but got=0x{:x}".format(
file_size, result_size
)
)
def get_decompressor(self, magic: int) -> Decompressor:
"""
Get the decompressor for the given magic.
"""
if magic not in self.DECOMPRESSION_LOOKUP.keys():
raise Exception("No decompression mapping defined for magic 0x{:x}".format(magic))
key = self.DECOMPRESSION_LOOKUP[magic]
if key not in self.decompressors.keys():
raise Exception("No decompression implementation found for key {}".format(key))
return self.decompressors[key]
| 36.394558 | 100 | 0.538505 | import logging
from struct import unpack
from typing import Optional, Dict
from anthemtool.cas.cas import Cas
from anthemtool.io.providers.base import Decompressor
from anthemtool.util import PathUtil
LOG = logging.getLogger(__name__)
class CasWriter:
DECOMPRESSION_LOOKUP = {
0x70: 'null',
0x71: 'null',
0x1170: 'oodle',
}
def __init__(self, decompressors: Dict[str, Decompressor]) -> None:
self.decompressors = decompressors
def write(self, cas: Cas, offset: int, path: str, compressed_file_size: int,
file_size: Optional[int] = None) -> None:
PathUtil.ensure_base_path_exists(path)
result_size = 0
payload_size = 0
handle = cas.handle
handle.seek(offset)
with open(path, "wb") as dst:
while payload_size < compressed_file_size:
size = unpack(">I", handle.read(4))[0]
magic = unpack(">H", handle.read(2))[0]
compressed_size = unpack(">H", handle.read(2))[0]
if magic == 0x1170:
payload = handle.read(compressed_size)
elif magic in (0x70, 0x71):
if magic == 0x70 and size != compressed_size:
raise Exception("Expected size=0x{:x} and outsize=0x{:x} to match".format(
size, compressed_size
))
if magic == 0x71 and compressed_size != 0x00:
raise Exception(
"Expected outsize=0x{:x} to be zero".format(compressed_size)
)
payload = handle.read(size)
else:
raise Exception(
"Unsupported compression magic=0x{:x} size=0x{:x} outsize=0x{:x} (path={:s}"
" cas={} offset=0x{:x}, size=0x{:x}, outsize=0x{:x})".format(
magic, compressed_size, size, path, cas,
offset, compressed_file_size, file_size or 0x0
)
)
decompressor = self.get_decompressor(magic)
data = decompressor.decompress(payload, compressed_size, size)
result_size += len(data)
payload_size += len(payload) + 8
dst.write(data)
if payload_size != compressed_file_size:
raise Exception(
"Decompression failed, size requested=0x{:x} but got=0x{:x}".format(
compressed_file_size, payload_size
)
)
if file_size is not None and result_size != file_size:
raise Exception(
"Decompression failed, outsize requested=0x{:x} but got=0x{:x}".format(
file_size, result_size
)
)
def get_decompressor(self, magic: int) -> Decompressor:
if magic not in self.DECOMPRESSION_LOOKUP.keys():
raise Exception("No decompression mapping defined for magic 0x{:x}".format(magic))
key = self.DECOMPRESSION_LOOKUP[magic]
if key not in self.decompressors.keys():
raise Exception("No decompression implementation found for key {}".format(key))
return self.decompressors[key]
| true | true |
f73725366c46e1b0dca88e3d1b09147a23966eaf | 10,354 | py | Python | tensorflow/python/data/experimental/kernel_tests/prefetch_to_device_test.py | knightvishal/tensorflow | 5d3dd19b7146d954fc1b4e9e44e9881e75d363c1 | [
"Apache-2.0"
] | 52 | 2018-11-12T06:39:35.000Z | 2022-03-08T05:31:27.000Z | tensorflow/python/data/experimental/kernel_tests/prefetch_to_device_test.py | knightvishal/tensorflow | 5d3dd19b7146d954fc1b4e9e44e9881e75d363c1 | [
"Apache-2.0"
] | 2 | 2018-12-04T08:35:40.000Z | 2020-10-22T16:17:39.000Z | tensorflow/python/data/experimental/kernel_tests/prefetch_to_device_test.py | knightvishal/tensorflow | 5d3dd19b7146d954fc1b4e9e44e9881e75d363c1 | [
"Apache-2.0"
] | 17 | 2019-03-11T01:17:16.000Z | 2022-02-21T00:44:47.000Z | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `tf.data.experimental.prefetch_to_device()`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.data.experimental.ops import prefetching_ops
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
class PrefetchToDeviceTest(test_base.DatasetTestBase):
def testPrefetchToDevice(self):
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/cpu:1"))
# NOTE(mrry): This device block creates the "host" dataset and iterator on
# /cpu:0, and ensures that the prefetching is across devices. In typical use
# this would not be necessary, because the GPU device would not support any
# of the dataset-related ops.
with ops.device("/cpu:0"):
iterator = device_dataset.make_one_shot_iterator()
self.assertEqual(host_dataset.output_types, device_dataset.output_types)
self.assertEqual(host_dataset.output_types, iterator.output_types)
self.assertEqual(host_dataset.output_shapes, device_dataset.output_shapes)
self.assertEqual(host_dataset.output_shapes, iterator.output_shapes)
self.assertEqual(host_dataset.output_classes, device_dataset.output_classes)
self.assertEqual(host_dataset.output_classes, iterator.output_classes)
next_element = iterator.get_next()
self.assertEqual(dtypes.int64, next_element.dtype)
self.assertEqual([], next_element.shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config) as sess:
for i in range(10):
self.assertEqual(i, sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testPrefetchToSameDevice(self):
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device(
"/job:localhost/replica:0/task:0/device:CPU:0"))
# NOTE(mrry): This device block creates the "host" dataset and iterator on
# /cpu:0, and ensures that the prefetching is across devices. In typical use
# this would not be necessary, because the GPU device would not support any
# of the dataset-related ops.
with ops.device("/cpu:0"):
iterator = device_dataset.make_one_shot_iterator()
self.assertEqual(host_dataset.output_types, device_dataset.output_types)
self.assertEqual(host_dataset.output_types, iterator.output_types)
self.assertEqual(host_dataset.output_shapes, device_dataset.output_shapes)
self.assertEqual(host_dataset.output_shapes, iterator.output_shapes)
self.assertEqual(host_dataset.output_classes, device_dataset.output_classes)
self.assertEqual(host_dataset.output_classes, iterator.output_classes)
next_element = iterator.get_next()
self.assertEqual(dtypes.int64, next_element.dtype)
self.assertEqual([], next_element.shape)
with self.cached_session() as sess:
for i in range(10):
self.assertEqual(i, sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testPrefetchDictToDevice(self):
host_dataset = dataset_ops.Dataset.range(10).map(lambda x: {"a": x})
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/cpu:1"))
# NOTE(mrry): This device block creates the "host" dataset and iterator on
# /cpu:0, and ensures that the prefetching is across devices. In typical use
# this would not be necessary, because the GPU device would not support any
# of the dataset-related ops.
with ops.device("/cpu:0"):
iterator = device_dataset.make_one_shot_iterator()
self.assertEqual(host_dataset.output_types, device_dataset.output_types)
self.assertEqual(host_dataset.output_types, iterator.output_types)
self.assertEqual(host_dataset.output_shapes, device_dataset.output_shapes)
self.assertEqual(host_dataset.output_shapes, iterator.output_shapes)
self.assertEqual(host_dataset.output_classes, device_dataset.output_classes)
self.assertEqual(host_dataset.output_classes, iterator.output_classes)
next_element = iterator.get_next()
self.assertEqual(dtypes.int64, next_element["a"].dtype)
self.assertEqual([], next_element["a"].shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config) as sess:
for i in range(10):
self.assertEqual({"a": i}, sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testPrefetchSparseTensorsToDevice(self):
def make_tensor(i):
return sparse_tensor.SparseTensorValue(
indices=[[0, 0]], values=(i*[1]), dense_shape=[2, 2])
host_dataset = dataset_ops.Dataset.range(10).map(make_tensor)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/cpu:1"))
# NOTE(mrry): This device block creates the "host" dataset and iterator on
# /cpu:0, and ensures that the prefetching is across devices. In typical use
# this would not be necessary, because the GPU device would not support any
# of the dataset-related ops.
with ops.device("/cpu:0"):
iterator = device_dataset.make_one_shot_iterator()
self.assertEqual(host_dataset.output_types, device_dataset.output_types)
self.assertEqual(host_dataset.output_types, iterator.output_types)
self.assertEqual(host_dataset.output_shapes, device_dataset.output_shapes)
self.assertEqual(host_dataset.output_shapes, iterator.output_shapes)
self.assertEqual(host_dataset.output_classes, device_dataset.output_classes)
self.assertEqual(host_dataset.output_classes, iterator.output_classes)
next_element = iterator.get_next()
self.assertEqual(dtypes.int64, next_element.dtype)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config) as sess:
for i in range(10):
actual = sess.run(next_element)
self.assertAllEqual([i], actual.values)
self.assertAllEqual([[0, 0]], actual.indices)
self.assertAllEqual([2, 2], actual.dense_shape)
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testPrefetchToDeviceGpu(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/gpu:0"))
iterator = device_dataset.make_one_shot_iterator()
next_element = iterator.get_next()
with self.cached_session() as sess:
for i in range(10):
self.assertEqual(i, sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testPrefetchToDeviceWithReInit(self):
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/cpu:1"))
# NOTE(mrry): This device block creates the "host" dataset and iterator on
# /cpu:0, and ensures that the prefetching is across devices. In typical use
# this would not be necessary, because the GPU device would not support any
# of the dataset-related ops.
with ops.device("/cpu:0"):
iterator = device_dataset.make_initializable_iterator()
self.assertEqual(host_dataset.output_types, device_dataset.output_types)
self.assertEqual(host_dataset.output_types, iterator.output_types)
self.assertEqual(host_dataset.output_shapes, device_dataset.output_shapes)
self.assertEqual(host_dataset.output_shapes, iterator.output_shapes)
self.assertEqual(host_dataset.output_classes, device_dataset.output_classes)
self.assertEqual(host_dataset.output_classes, iterator.output_classes)
next_element = iterator.get_next()
self.assertEqual(dtypes.int64, next_element.dtype)
self.assertEqual([], next_element.shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config) as sess:
sess.run(iterator.initializer)
for i in range(5):
self.assertEqual(i, sess.run(next_element))
sess.run(iterator.initializer)
for i in range(10):
self.assertEqual(i, sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testPrefetchToDeviceGpuWithReInit(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/gpu:0"))
iterator = device_dataset.make_initializable_iterator()
next_element = iterator.get_next()
with self.cached_session() as sess:
sess.run(iterator.initializer)
for i in range(5):
self.assertEqual(i, sess.run(next_element))
sess.run(iterator.initializer)
for i in range(10):
self.assertEqual(i, sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
if __name__ == "__main__":
test.main()
| 44.059574 | 80 | 0.745026 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.data.experimental.ops import prefetching_ops
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
class PrefetchToDeviceTest(test_base.DatasetTestBase):
def testPrefetchToDevice(self):
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/cpu:1"))
with ops.device("/cpu:0"):
iterator = device_dataset.make_one_shot_iterator()
self.assertEqual(host_dataset.output_types, device_dataset.output_types)
self.assertEqual(host_dataset.output_types, iterator.output_types)
self.assertEqual(host_dataset.output_shapes, device_dataset.output_shapes)
self.assertEqual(host_dataset.output_shapes, iterator.output_shapes)
self.assertEqual(host_dataset.output_classes, device_dataset.output_classes)
self.assertEqual(host_dataset.output_classes, iterator.output_classes)
next_element = iterator.get_next()
self.assertEqual(dtypes.int64, next_element.dtype)
self.assertEqual([], next_element.shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config) as sess:
for i in range(10):
self.assertEqual(i, sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testPrefetchToSameDevice(self):
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device(
"/job:localhost/replica:0/task:0/device:CPU:0"))
with ops.device("/cpu:0"):
iterator = device_dataset.make_one_shot_iterator()
self.assertEqual(host_dataset.output_types, device_dataset.output_types)
self.assertEqual(host_dataset.output_types, iterator.output_types)
self.assertEqual(host_dataset.output_shapes, device_dataset.output_shapes)
self.assertEqual(host_dataset.output_shapes, iterator.output_shapes)
self.assertEqual(host_dataset.output_classes, device_dataset.output_classes)
self.assertEqual(host_dataset.output_classes, iterator.output_classes)
next_element = iterator.get_next()
self.assertEqual(dtypes.int64, next_element.dtype)
self.assertEqual([], next_element.shape)
with self.cached_session() as sess:
for i in range(10):
self.assertEqual(i, sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testPrefetchDictToDevice(self):
host_dataset = dataset_ops.Dataset.range(10).map(lambda x: {"a": x})
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/cpu:1"))
with ops.device("/cpu:0"):
iterator = device_dataset.make_one_shot_iterator()
self.assertEqual(host_dataset.output_types, device_dataset.output_types)
self.assertEqual(host_dataset.output_types, iterator.output_types)
self.assertEqual(host_dataset.output_shapes, device_dataset.output_shapes)
self.assertEqual(host_dataset.output_shapes, iterator.output_shapes)
self.assertEqual(host_dataset.output_classes, device_dataset.output_classes)
self.assertEqual(host_dataset.output_classes, iterator.output_classes)
next_element = iterator.get_next()
self.assertEqual(dtypes.int64, next_element["a"].dtype)
self.assertEqual([], next_element["a"].shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config) as sess:
for i in range(10):
self.assertEqual({"a": i}, sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testPrefetchSparseTensorsToDevice(self):
def make_tensor(i):
return sparse_tensor.SparseTensorValue(
indices=[[0, 0]], values=(i*[1]), dense_shape=[2, 2])
host_dataset = dataset_ops.Dataset.range(10).map(make_tensor)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/cpu:1"))
with ops.device("/cpu:0"):
iterator = device_dataset.make_one_shot_iterator()
self.assertEqual(host_dataset.output_types, device_dataset.output_types)
self.assertEqual(host_dataset.output_types, iterator.output_types)
self.assertEqual(host_dataset.output_shapes, device_dataset.output_shapes)
self.assertEqual(host_dataset.output_shapes, iterator.output_shapes)
self.assertEqual(host_dataset.output_classes, device_dataset.output_classes)
self.assertEqual(host_dataset.output_classes, iterator.output_classes)
next_element = iterator.get_next()
self.assertEqual(dtypes.int64, next_element.dtype)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config) as sess:
for i in range(10):
actual = sess.run(next_element)
self.assertAllEqual([i], actual.values)
self.assertAllEqual([[0, 0]], actual.indices)
self.assertAllEqual([2, 2], actual.dense_shape)
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testPrefetchToDeviceGpu(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/gpu:0"))
iterator = device_dataset.make_one_shot_iterator()
next_element = iterator.get_next()
with self.cached_session() as sess:
for i in range(10):
self.assertEqual(i, sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testPrefetchToDeviceWithReInit(self):
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/cpu:1"))
with ops.device("/cpu:0"):
iterator = device_dataset.make_initializable_iterator()
self.assertEqual(host_dataset.output_types, device_dataset.output_types)
self.assertEqual(host_dataset.output_types, iterator.output_types)
self.assertEqual(host_dataset.output_shapes, device_dataset.output_shapes)
self.assertEqual(host_dataset.output_shapes, iterator.output_shapes)
self.assertEqual(host_dataset.output_classes, device_dataset.output_classes)
self.assertEqual(host_dataset.output_classes, iterator.output_classes)
next_element = iterator.get_next()
self.assertEqual(dtypes.int64, next_element.dtype)
self.assertEqual([], next_element.shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config) as sess:
sess.run(iterator.initializer)
for i in range(5):
self.assertEqual(i, sess.run(next_element))
sess.run(iterator.initializer)
for i in range(10):
self.assertEqual(i, sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testPrefetchToDeviceGpuWithReInit(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/gpu:0"))
iterator = device_dataset.make_initializable_iterator()
next_element = iterator.get_next()
with self.cached_session() as sess:
sess.run(iterator.initializer)
for i in range(5):
self.assertEqual(i, sess.run(next_element))
sess.run(iterator.initializer)
for i in range(10):
self.assertEqual(i, sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
if __name__ == "__main__":
test.main()
| true | true |
f737255c92e3fa92498148c274c10a991fa13458 | 319 | py | Python | tuna_service_sdk/client.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | 5 | 2019-07-31T04:11:05.000Z | 2021-01-07T03:23:20.000Z | tuna_service_sdk/client.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | null | null | null | tuna_service_sdk/client.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import tuna_service_sdk.api.test_plan.test_plan_client
class Client(object):
def __init__(self, server_ip="", server_port=0, service_name=""):
self.test_plan = tuna_service_sdk.api.test_plan.test_plan_client.TestPlanClient(server_ip, server_port, service_name)
| 26.583333 | 125 | 0.714734 |
import tuna_service_sdk.api.test_plan.test_plan_client
class Client(object):
def __init__(self, server_ip="", server_port=0, service_name=""):
self.test_plan = tuna_service_sdk.api.test_plan.test_plan_client.TestPlanClient(server_ip, server_port, service_name)
| true | true |
f737263703d80fae6b4fd80e4e96de3d1c6c3b3e | 20 | py | Python | go/dlgo/mcts/__init__.py | huynq55/alpha-zero-general | 7c7b8a9a09b79178157ec6b6d379a071c9f0994a | [
"MIT"
] | 1 | 2021-04-20T23:01:22.000Z | 2021-04-20T23:01:22.000Z | go/dlgo/mcts/__init__.py | huynq55/alpha-zero-general | 7c7b8a9a09b79178157ec6b6d379a071c9f0994a | [
"MIT"
] | null | null | null | go/dlgo/mcts/__init__.py | huynq55/alpha-zero-general | 7c7b8a9a09b79178157ec6b6d379a071c9f0994a | [
"MIT"
] | 1 | 2020-06-11T21:55:31.000Z | 2020-06-11T21:55:31.000Z | from .mcts import *
| 10 | 19 | 0.7 | from .mcts import *
| true | true |
f73726db5f586c778ad0f7586b07281ffda1c0db | 3,676 | py | Python | sdks/python/apache_beam/examples/cookbook/group_with_coder_test.py | kjmrknsn/beam | 6a6adc8433deff10a5594bbf77cc9148ce0a951a | [
"Apache-2.0"
] | null | null | null | sdks/python/apache_beam/examples/cookbook/group_with_coder_test.py | kjmrknsn/beam | 6a6adc8433deff10a5594bbf77cc9148ce0a951a | [
"Apache-2.0"
] | null | null | null | sdks/python/apache_beam/examples/cookbook/group_with_coder_test.py | kjmrknsn/beam | 6a6adc8433deff10a5594bbf77cc9148ce0a951a | [
"Apache-2.0"
] | null | null | null | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Test for the custom coders example."""
# pytype: skip-file
from __future__ import absolute_import
import logging
import tempfile
import unittest
from apache_beam.examples.cookbook import group_with_coder
from apache_beam.testing.util import open_shards
# Patch group_with_coder.PlayerCoder.decode(). To test that the PlayerCoder was
# used, we do not strip the prepended 'x:' string when decoding a Player object.
group_with_coder.PlayerCoder.decode = lambda self, s: group_with_coder.Player( # type: ignore[assignment]
s.decode('utf-8'))
class GroupWithCoderTest(unittest.TestCase):
SAMPLE_RECORDS = [
'joe,10', 'fred,3', 'mary,7',
'joe,20', 'fred,6', 'ann,5',
'joe,30', 'ann,10', 'mary,1']
def create_temp_file(self, records):
with tempfile.NamedTemporaryFile(delete=False) as f:
for record in records:
f.write(b'%s\n' % record.encode('utf-8'))
return f.name
def test_basics_with_type_check(self):
# Run the workflow with pipeline_type_check option. This will make sure
# the typehints associated with all transforms will have non-default values
# and therefore any custom coders will be used. In our case we want to make
# sure the coder for the Player class will be used.
temp_path = self.create_temp_file(self.SAMPLE_RECORDS)
group_with_coder.run(
['--input=%s*' % temp_path,
'--output=%s.result' % temp_path],
save_main_session=False)
# Parse result file and compare.
results = []
with open_shards(temp_path + '.result-*-of-*') as result_file:
for line in result_file:
name, points = line.split(',')
results.append((name, int(points)))
logging.info('result: %s', results)
self.assertEqual(
sorted(results),
sorted([('x:ann', 15), ('x:fred', 9), ('x:joe', 60), ('x:mary', 8)]))
def test_basics_without_type_check(self):
# Run the workflow without pipeline_type_check option. This will make sure
# the typehints associated with all transforms will have default values and
# therefore any custom coders will not be used. The default coder (pickler)
# will be used instead.
temp_path = self.create_temp_file(self.SAMPLE_RECORDS)
group_with_coder.run(
['--no_pipeline_type_check',
'--input=%s*' % temp_path,
'--output=%s.result' % temp_path],
save_main_session=False)
# Parse result file and compare.
results = []
with open_shards(temp_path + '.result-*-of-*') as result_file:
for line in result_file:
name, points = line.split(',')
results.append((name, int(points)))
logging.info('result: %s', results)
self.assertEqual(
sorted(results),
sorted([('ann', 15), ('fred', 9), ('joe', 60), ('mary', 8)]))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
| 37.896907 | 106 | 0.691785 |
from __future__ import absolute_import
import logging
import tempfile
import unittest
from apache_beam.examples.cookbook import group_with_coder
from apache_beam.testing.util import open_shards
group_with_coder.PlayerCoder.decode = lambda self, s: group_with_coder.Player(
s.decode('utf-8'))
class GroupWithCoderTest(unittest.TestCase):
SAMPLE_RECORDS = [
'joe,10', 'fred,3', 'mary,7',
'joe,20', 'fred,6', 'ann,5',
'joe,30', 'ann,10', 'mary,1']
def create_temp_file(self, records):
with tempfile.NamedTemporaryFile(delete=False) as f:
for record in records:
f.write(b'%s\n' % record.encode('utf-8'))
return f.name
def test_basics_with_type_check(self):
temp_path = self.create_temp_file(self.SAMPLE_RECORDS)
group_with_coder.run(
['--input=%s*' % temp_path,
'--output=%s.result' % temp_path],
save_main_session=False)
results = []
with open_shards(temp_path + '.result-*-of-*') as result_file:
for line in result_file:
name, points = line.split(',')
results.append((name, int(points)))
logging.info('result: %s', results)
self.assertEqual(
sorted(results),
sorted([('x:ann', 15), ('x:fred', 9), ('x:joe', 60), ('x:mary', 8)]))
def test_basics_without_type_check(self):
temp_path = self.create_temp_file(self.SAMPLE_RECORDS)
group_with_coder.run(
['--no_pipeline_type_check',
'--input=%s*' % temp_path,
'--output=%s.result' % temp_path],
save_main_session=False)
results = []
with open_shards(temp_path + '.result-*-of-*') as result_file:
for line in result_file:
name, points = line.split(',')
results.append((name, int(points)))
logging.info('result: %s', results)
self.assertEqual(
sorted(results),
sorted([('ann', 15), ('fred', 9), ('joe', 60), ('mary', 8)]))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
| true | true |
f73729af353c6576a883d5caa43c247c40fa9b99 | 302 | py | Python | h2o-py/tests/testdir_misc/pyunit_https_import.py | PawarPawan/h2o-v3 | cf569a538c9e2ec16ba9fc1a75d14beda8f40c18 | [
"Apache-2.0"
] | null | null | null | h2o-py/tests/testdir_misc/pyunit_https_import.py | PawarPawan/h2o-v3 | cf569a538c9e2ec16ba9fc1a75d14beda8f40c18 | [
"Apache-2.0"
] | null | null | null | h2o-py/tests/testdir_misc/pyunit_https_import.py | PawarPawan/h2o-v3 | cf569a538c9e2ec16ba9fc1a75d14beda8f40c18 | [
"Apache-2.0"
] | null | null | null | import sys
sys.path.insert(1, "../../")
import h2o
def https_import(ip,port):
url = "https://s3.amazonaws.com/h2o-public-test-data/smalldata/prostate/prostate.csv.zip"
aa = h2o.import_file(path=url)
aa.show()
if __name__ == "__main__":
h2o.run_test(sys.argv, https_import)
| 20.133333 | 93 | 0.662252 | import sys
sys.path.insert(1, "../../")
import h2o
def https_import(ip,port):
url = "https://s3.amazonaws.com/h2o-public-test-data/smalldata/prostate/prostate.csv.zip"
aa = h2o.import_file(path=url)
aa.show()
if __name__ == "__main__":
h2o.run_test(sys.argv, https_import)
| true | true |
f73729b48f8dc18cff3a5092348a5588e7113e79 | 635 | py | Python | bin/rst2xml.py | ammarkhann/FinalSeniorCode | cd9a7ff34fc43f230bf1f3631115fe00e57ec98a | [
"MIT"
] | null | null | null | bin/rst2xml.py | ammarkhann/FinalSeniorCode | cd9a7ff34fc43f230bf1f3631115fe00e57ec98a | [
"MIT"
] | null | null | null | bin/rst2xml.py | ammarkhann/FinalSeniorCode | cd9a7ff34fc43f230bf1f3631115fe00e57ec98a | [
"MIT"
] | null | null | null | #!/Users/ammarkhan/Desktop/seniorproject/bin/python
# $Id: rst2xml.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing Docutils XML.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates Docutils-native XML from standalone '
'reStructuredText sources. ' + default_description)
publish_cmdline(writer_name='xml', description=description)
| 26.458333 | 70 | 0.744882 |
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates Docutils-native XML from standalone '
'reStructuredText sources. ' + default_description)
publish_cmdline(writer_name='xml', description=description)
| true | true |
f7372b8d838f5df538e72c052a6c4aaac3a98b94 | 2,181 | py | Python | hacker/qqpy/bin/painter.py | sanyueyuxincao/web-crawling | dc134bd6e23572a3ebfd851d0ffb6dd84cb16c1f | [
"MIT"
] | null | null | null | hacker/qqpy/bin/painter.py | sanyueyuxincao/web-crawling | dc134bd6e23572a3ebfd851d0ffb6dd84cb16c1f | [
"MIT"
] | null | null | null | hacker/qqpy/bin/painter.py | sanyueyuxincao/web-crawling | dc134bd6e23572a3ebfd851d0ffb6dd84cb16c1f | [
"MIT"
] | 2 | 2019-05-19T08:12:45.000Z | 2021-08-28T07:16:42.000Z | #!/Users/het/Desktop/hacker/qqpy/bin/python3.6
#
# The Python Imaging Library
# $Id$
#
# this demo script illustrates pasting into an already displayed
# photoimage. note that the current version of Tk updates the whole
# image every time we paste, so to get decent performance, we split
# the image into a set of tiles.
#
import sys
if sys.version_info[0] > 2:
import tkinter
else:
import Tkinter as tkinter
from PIL import Image, ImageTk
#
# painter widget
class PaintCanvas(tkinter.Canvas):
def __init__(self, master, image):
tkinter.Canvas.__init__(self, master,
width=image.size[0], height=image.size[1])
# fill the canvas
self.tile = {}
self.tilesize = tilesize = 32
xsize, ysize = image.size
for x in range(0, xsize, tilesize):
for y in range(0, ysize, tilesize):
box = x, y, min(xsize, x+tilesize), min(ysize, y+tilesize)
tile = ImageTk.PhotoImage(image.crop(box))
self.create_image(x, y, image=tile, anchor=tkinter.NW)
self.tile[(x, y)] = box, tile
self.image = image
self.bind("<B1-Motion>", self.paint)
def paint(self, event):
xy = event.x - 10, event.y - 10, event.x + 10, event.y + 10
im = self.image.crop(xy)
# process the image in some fashion
im = im.convert("L")
self.image.paste(im, xy)
self.repair(xy)
def repair(self, box):
# update canvas
dx = box[0] % self.tilesize
dy = box[1] % self.tilesize
for x in range(box[0]-dx, box[2]+1, self.tilesize):
for y in range(box[1]-dy, box[3]+1, self.tilesize):
try:
xy, tile = self.tile[(x, y)]
tile.paste(self.image.crop(xy))
except KeyError:
pass # outside the image
self.update_idletasks()
#
# main
if len(sys.argv) != 2:
print("Usage: painter file")
sys.exit(1)
root = tkinter.Tk()
im = Image.open(sys.argv[1])
if im.mode != "RGB":
im = im.convert("RGB")
PaintCanvas(root, im).pack()
root.mainloop()
| 25.658824 | 74 | 0.573132 |
import sys
if sys.version_info[0] > 2:
import tkinter
else:
import Tkinter as tkinter
from PIL import Image, ImageTk
class PaintCanvas(tkinter.Canvas):
def __init__(self, master, image):
tkinter.Canvas.__init__(self, master,
width=image.size[0], height=image.size[1])
self.tile = {}
self.tilesize = tilesize = 32
xsize, ysize = image.size
for x in range(0, xsize, tilesize):
for y in range(0, ysize, tilesize):
box = x, y, min(xsize, x+tilesize), min(ysize, y+tilesize)
tile = ImageTk.PhotoImage(image.crop(box))
self.create_image(x, y, image=tile, anchor=tkinter.NW)
self.tile[(x, y)] = box, tile
self.image = image
self.bind("<B1-Motion>", self.paint)
def paint(self, event):
xy = event.x - 10, event.y - 10, event.x + 10, event.y + 10
im = self.image.crop(xy)
im = im.convert("L")
self.image.paste(im, xy)
self.repair(xy)
def repair(self, box):
dx = box[0] % self.tilesize
dy = box[1] % self.tilesize
for x in range(box[0]-dx, box[2]+1, self.tilesize):
for y in range(box[1]-dy, box[3]+1, self.tilesize):
try:
xy, tile = self.tile[(x, y)]
tile.paste(self.image.crop(xy))
except KeyError:
pass
self.update_idletasks()
if len(sys.argv) != 2:
print("Usage: painter file")
sys.exit(1)
root = tkinter.Tk()
im = Image.open(sys.argv[1])
if im.mode != "RGB":
im = im.convert("RGB")
PaintCanvas(root, im).pack()
root.mainloop()
| true | true |
f7372c03657c52d022a2eeeff1c08b2ab274c993 | 829 | py | Python | squirrely_feed/aggregor/migrations/0001_initial.py | Circles24/squirrely-feed | 7de0504b39ffb937af4a653a64b8b0df98f2a75d | [
"MIT"
] | 4 | 2020-07-13T08:24:02.000Z | 2022-03-26T10:21:48.000Z | squirrely_feed/aggregor/migrations/0001_initial.py | Circles24/squirrely-feed | 7de0504b39ffb937af4a653a64b8b0df98f2a75d | [
"MIT"
] | null | null | null | squirrely_feed/aggregor/migrations/0001_initial.py | Circles24/squirrely-feed | 7de0504b39ffb937af4a653a64b8b0df98f2a75d | [
"MIT"
] | null | null | null | # Generated by Django 3.1.3 on 2020-11-18 06:50
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author', models.CharField(max_length=200, null=True)),
('title', models.CharField(max_length=500)),
('body', models.TextField()),
('source', models.CharField(max_length=200)),
('url', models.CharField(max_length=200)),
('created_at', models.DateTimeField(default=datetime.datetime.now)),
],
),
]
| 29.607143 | 114 | 0.574186 |
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author', models.CharField(max_length=200, null=True)),
('title', models.CharField(max_length=500)),
('body', models.TextField()),
('source', models.CharField(max_length=200)),
('url', models.CharField(max_length=200)),
('created_at', models.DateTimeField(default=datetime.datetime.now)),
],
),
]
| true | true |
f7372c26e075cc73dcde9614e08d881c17b7d965 | 57,288 | py | Python | page.py | Alekseybykov126/newtest | 1fab9756609bbb496ed2cd2363941f8039b27711 | [
"Apache-2.0"
] | null | null | null | page.py | Alekseybykov126/newtest | 1fab9756609bbb496ed2cd2363941f8039b27711 | [
"Apache-2.0"
] | null | null | null | page.py | Alekseybykov126/newtest | 1fab9756609bbb496ed2cd2363941f8039b27711 | [
"Apache-2.0"
] | null | null | null | from lib2to3.pgen2 import driver
#import requests
#import self as self
from selenium.webdriver.chrome import options
from selenium.webdriver.common.action_chains import ActionChains
from selenium import webdriver
from bs4 import BeautifulSoup
from selenium import webdriver
#from selenium.webdriver.firefox.webdriver import Webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from datetime import datetime
import logging
from selenium.webdriver.support.select import Select
import pytest
from selenium.webdriver.remote.command import Command
import time
from selenium import webdriver
import json
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
import logging
import urllib.request
import os
from time import sleep
from sys import exit
import random
SLEEP_SHORT = 4
SLEEP_MEDIUM = 15
SLEEP_LONG = 20
class BasePage(object):
def __init__(self, driver):
self.driver = driver
class ResultPage(BasePage):
def find_link(self, tag_, class_):
""" Найти подтверждающий элемент
"""
path: str = './/ttag[@class="cclass"]'
path_1 = path.replace('ttag', tag_)
xpath = path_1.replace('cclass', class_)
# page.waitForElementVisible(xpath, 7)
time.sleep(SLEEP_SHORT)
table = self.driver.page_source
soup = BeautifulSoup(table, 'html.parser')
# print('Это суп', soup)
ls = soup.find(tag_, class_)
# print(ls)
return ls
def find_all_link(self, tag_, class_):
"List of links"
page = MainPage(self.driver)
path = './/ttag[@class="cclass"]'
path_1 = path.replace('ttag', tag_)
xpath = path_1.replace('cclass', class_)
page.waitForElementVisible(xpath, 7)
time.sleep(SLEEP_SHORT)
table = self.driver.page_source
soup = BeautifulSoup(table, 'html.parser')
ts = soup.find_all(tag_, class_)
# print(ts)
return ts
def find_x(self, tag_, class_): # слайды
# print(self, tag_, class_) закомментил, потому что выводит ненужную строчку символов
table = self.driver.page_source
soup = BeautifulSoup(table, 'html.parser')
ts = soup.find(tag_, class_)
ls = ts.find('a').get('clip_name') # это дочь, в test_case_1 указывается родитель
# print(ls)
return ls
def find_y(self, tag_, class_): # год1
print(self, tag_, class_)
table = self.driver.page_source
soup = BeautifulSoup(table, 'html.parser')
ts = soup.find(tag_, class_)
ls = ts.find('button').get('filter__item filter__item_year').getText()
# print(ls) '//button[text()="Outliers"]'
return ls
def find_n(self, tag_, class_): # год2
print(self, tag_, class_)
table = self.driver.page_source
soup = BeautifulSoup(table, 'html.parser')
ts = soup.find(tag_, class_)
ls = ts.find('button').get('years-to').getText()
# print(ls)
return ls
def find_tag(self, tag_):
# print(self, tag_, class_)
time.sleep(SLEEP_LONG)
table = self.driver.page_source
soup = BeautifulSoup(table, 'html.parser')
# print('Это суп', soup)
ls = soup.find(tag_)
# print(ls)
return ls
def find_all_tag(self, tag_):
# print(self, tag_, class_)
time.sleep(SLEEP_SHORT)
table = self.driver.page_source
soup = BeautifulSoup(table, 'html.parser')
# print('Это суп', soup)
ls = soup.find_all(tag_)
# print(ls)
return ls
def simple_find(self, xpath, number):
ls = self.driver.find_elements_by_xpath(xpath)[number]
# print(ls)
return ls
def visible_xpath(self, xpath):
time.sleep(SLEEP_SHORT)
return EC.presence_of_element_located((By.XPATH, xpath))
class Admin(BasePage):
def click_f(self, name, stap):
page = MainPage(self.driver)
step = str(stap)
self.adminx = {
'Клик_In_Progress_статус': {'func': page.click_xpath, 'path': '//*[@id="issue_status_id"]/option[4]'},
'Клик_удалить_фильм_Малыш': {'func': page.click_xpath, 'path': './/a[text()="Прокат фильма (QA version)"]'},
# 'Клик_подтвердить': {'func':page.click_a}
'Клик_Принять': {'func': page.click_xpath, 'path': './/*[@id="issue-form"]/input[6]'},
'Клик_Ответить': {'func': page.click_xpath, 'path': './/span[@class="b-letter__foot__tab"]'},
'Клик_Отправить_письмо': {'func': page.click_xpath,'path': '//*[@id="b-toolbar__right"]/div[3]/div[2]/div/div[1]/div[1]/div/div[1]/span'},
'Клик_Входящие_mail_ru': {'func': page.click_xpath, 'path': '//*[@id="b-nav_folders"]/div/div[1]'},
'Клик_Чекбокс_Входящие_mail_ru': {'func': page.click_xpath, 'path': './/div[@class="b-checkbox__box"]'},
'Клик_Удалить_письма_из_mail_ru': {'func': page.click_xpath,'path': '//*[@id="b-toolbar__right"]/div[2]/div/div[2]/div[2]/div/div[1]/span'},
'Клик_список_статус': {'func': page.click_id, 'path': 'issue_status_id'},
'Админка_клик_найти': {'func': page.click_id, 'path': 'id-submit-search'},
'Админка_клик_чекбокс_1': {'func': page.click_id, 'path': 'action-toggle'},
'Админка_Действие': {'func': page.click_name, 'path': 'action'},
'Админка_Выбор_Удалить_пользователя': {'func': page.click_css,'path': '#action_block > label > select > option:nth-child(14)'},
'Админка_Выполнить': {'func': page.click_name, 'path': 'index'},
# 'Админка_подтвердить':{'func':page.click_css, 'path':'#content-main > form > input[type="submit"]'},
'Админка_подтвердить': {'func': page.click_xpath, 'path': '//input[@value="Да, я уверен"]'},
'Админка_большая_красная_кнопка': {'func': page.click_css, 'path': 'body > div > section > a'},
'Клик_первое_письмо': {'func': page.click_xpath, 'path': './/a[@class="js-href b-datalist__item__link"]'},
'Клик_второе_письмо': {'func': page.click_s_xpath, 'path': './/a[@class="js-href b-datalist__item__link"]','index': 1},
'Переключение_1_в_iframe': {'func': page.click_switch_to, 'path': 'iframe', 'index': 1},
'Возврат_фокуса_из_iframe': {'func': page.driver.switch_to.default_content, 'path': 'None'},
'Клик_Closed_статус': {'func': page.double, 'path_1': '//*[@id="issue_status_id"]/option[7]','path_2': './/option[@value="5"]'},
# 'Профили_посетителей': {'func': page.click_xpath, 'path': './/a[text()="Профили посетителей"]'},
'Профили_посетителей': {'func': page.click_xpath, 'path': './/a[@href="/admin/tvzavr_admin/customer/"]'},
}
# '': {'func': '', 'path': ''}
self.args = self.adminx[name]
self.func = self.args['func'] # имя функции из словаря
self.func(self.args) # Вызов нужной функции с id = path
page.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен')
#
#
# # эти клики еще не внесены в словарь (((
#
class CardFilm(BasePage):
def click_f(self, name, stap):
page = MainPage(self.driver)
step = str(stap)
self.cardx = {
'Клик_кнопки_просмотр_от_руб': {'func': page.click_xpath,'path': './/button[@class="clip-player__action button"]'},
'Клик_кнопки_напрокат_SD-10р': {'func': page.click_xpath,'path': './/button[@class="tariffs__buy js-payment-info"]'},
'Клик_кнопки_напрокат_SD-11.88р': {'func': page.click_xpath, 'path': './/button[@data-tariff-id="575"]'},
'Клик_Личный_счёт': {'func': page.click_xpath,'path': './/a[@class="tabs__link js-tabs-link"][contains(., "Личный счёт")]'},
'Клик_Оплатить_личный_счет': {'func': page.click_xpath,'path': './/button[@class="payment-cloudpayments__card button button_stretched js-buy-button"]'},
'Клик_кнопки_В_избранное': {'func': page.click_xpath, 'path': './/button[@class="clip__action"]'},
'Клик_кнопки_Убрать_из_избранного': {'func': page.click_xpath,'path': './/button[@class="clip__action clip__action_active"]'},
# 'Клик_кнопки_Прокат': {'func': page.click_xpath, 'path': './/span[@class="tvz-currency tvz-currency-RUB"]'},
'Клик_иконки_найденного_фильма': {'func': page.click_xpath, 'path': './/a[@href="/film/lunnyi-kamen/"]'},
'Клик_первого_фильма': {'func': page.click_xpath, 'path': './/a[@class="card card_clip"]'},
'Клик_иконки_избранного_фильма': {'func': page.click_xpath,'path': './/div[@class="clip-card__title tvz-overflow"]'},
'Клик_иконки_фильма_в_избранном': {'funk': page.click_xpath, 'path': './/a[@class="card card_clip"]'},
'Клик_Play': {'func': page.click_xpath, 'path': '//div[@class="tvz-button tvz-bpb2"]'},
'Клик_пауза': {'func': page.click_xpath, 'path': '//div[@class="tvz-button tvz-button_play"]'},
'Клик_вкладки_Описание': {'func': page.click_xpath,'path': '//h2[@class="clip__subheading"][contains(., "Описание")]'},
# 'Клик_вкладки_Комментарии': {'func': page.click_xpath, 'path': '//*[@id="page-content"]/div[2]/div[2]/div[4]/div/ul/li[4]/a'},
'Клик_вкладки_Серии': {'func': page.click_xpath,'path': '//h2[@class="clip__subheading"][contains(., "Серии")]'},
'Клик_2_ой_Серии_1_го_сезона': {'func': page.click_xpath,'path': '//div[@class="series-card__title"][contains(., "2 серия")]'},
'Клик_на_вкладку_Отзывы ': {'func': page.click_xpath, 'path': './/a[@data-target="clip-comments"]'},
'Клик_на_вкладку_Трейлеры': {'func': page.click_xpath, 'path': '//a[@data-target="clip-trailers"]'},
'Клик_на_вкладку_Награды': {'func': page.click_xpath, 'path': '//a[@data-target="clip-awards"]'},
'Клик_на_вкладку_описание': {'func': page.click_xpath, 'path': '//a[@data-target="clip-info"]'},
# '': {'func': '', 'path': ''}
}
self.args = self.cardx[name]
self.func = self.args['func'] # имя функции из словаря
self.func(self.args) # Вызов нужной функции с id = path
page.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен')
class Profile(BasePage):
def click_f(self, name, step_int):
result = ResultPage(self.driver)
page = MainPage(self.driver)
step = str(step_int)
self.profx = {
'Клик_Зарегистрироваться': {'func': page.click_id, 'path': 'email-registration-submit'},
'Клик_поиска_Лупа': {'func': page.click_id, 'path': 'header-search-button'},
'Клик_Подписки': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link"][contains(., "Подписки")]'},
# подписке в значке профиля
'Клик_Выйти': {'func': page.click_xpath,'path': './/button[@class="profile-menu__logout js-profile-logout"]'},
'Клик_Пополнить': {'func': page.click_xpath,'path': './/button[@class="cabinet-balance__replenish button button_stretched js-replenishment"]'},
'Клик_Личный_счет': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link"][contains(., "Счет")]'},
'Клик_Личный_счет_нового_пользователя': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link profile-menu__link_notified"][contains(., "Счет")]'},
'Клик_Регистрация': {'func': page.click_xpath, 'path': './/a[text()="Регистрация"]'},
'Клик_phone_Зарегистрироваться': {'func': page.click_css, 'path': '#register-submit'},
'Клик_значок_пользователя': {'func': page.click_xpath,'path': './/button[@class="header__profile js-profile-menu"]'},
'Клик_значок_нового_пользователя': {'func': page.click_xpath,'path': './/button[@class="header__profile header__profile_notified js-profile-menu"]'},
'Клик_мои_фильмы': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link"][contains(., "Мои фильмы")]'},
'Клик_крестик_всплывшего_окна_тройка': {'func': page.click_xpath,'path': './/button[@class="modal__close"]'},
'Клик_Настройки_профиля': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link"][contains(., "Настройки")]'},
'Клик_переход_в_настройки': {'func': page.click_xpath,'path': './/a[@class="tabs__link js-tabs-link"][contains(., "Настройки")]'},
'Клик_день_рождения': {'func': page.click_id, 'path': 'birthday-day'},
'Ввод_дня_рождения': {'func': page.click_css, 'path': '#birthday-day > option:nth-child(5)'},
'Клик_месяц_рождения': {'func': page.click_id, 'path': 'birthday-month'},
'Ввод_месяца_рождения': {'func': page.click_css, 'path': '#birthday-month > option:nth-child(5)'},
'Клик_год_рождения': {'func': page.click_id, 'path': 'birthday-year'},
'Ввод_года_рождения': {'func': page.click_xpath, 'path': './/option[@value="1990"]'},
'Клик_выбран_пол': {'func': page.click_xpath,'path': './/span[@class="toggle__label"][contains(., "Мужской пол")]'},
'Клик_Снятие_галочки_с_подписки': {'func': page.click_xpath,'path': './/span[text()="Да, я хочу получать подписку с обновлениями, акциями и подарками"]'},
'Клик_Снятие_галочки_с_продолжения_просмотра': {'func': page.click_xpath,'path': './/span[text()="Продолжать просмотр с места остановки"]'},
'Клик_Сохранить': {'func': page.click_xpath,'path': './/button[@class="cabinet-settings__button button button_stretched"][contains(., "Сохранить")]'},
'Клик_Избранное': {'func': page.click_xpath, 'path': './/a[text()="Избранное"]'},
}
# Profile
self.args = self.profx[name]
self.func = self.args['func'] # имя функции из словаря
self.func(self.args) # Вызов нужной функции с id = path
page.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен')
class MainPage(BasePage):
def click_f(self, name, stap):
step = str(stap)
self.pagex = {
'Клик_прокрутки_слайда_вправо': {'func': self.click_xpath,'path': './/button[@class="slider__navigation slider__navigation_next js-slider-navigation js-slider-navigation-next"]'},
'Клик_прокрутки_слайда_влево': {'func': self.click_xpath,'path': './/button[@class="slider__navigation slider__navigation_prev js-slider-navigation js-slider-navigation-prev"]'},
'Клик_поиска_Лупа': {'func': self.click_css, 'path': '#header-search-button'},
'Клик_кнопки_крестик': {'func': self.click_xpath, 'path': './/button[@class="modal__close"]'},
'Клик_Новинки': {'func': self.click_xpath, 'path': './/a[@href="/novinki/"]'},
'Показать_еще': {'func': self.click_xpath,'path': './/button[@class="catalog__more button js-catalog-more"]'},
# Подписки
'Клик_Подписки': {'func': self.click_xpath, 'path': './/a[@title="Подписка tvzavr"]'},
'Клик_Подписка_Отключи_рекламу': {'func': self.click_xpath,'path': './/a[@class="tabs__link js-tabs-link"][contains(., "«Отключи рекламу на tvzavr!»")]'},
'Клик_купить_за_99р': {'func': self.click_xpath,'path': './/button[@class="subscriptions__button button button_dark js-payment-info"]'},
'Клик_Бесплатно': {'func': self.click_xpath,'path': './/a[@class="header__link"][contains(., "Бесплатно")]'},
# Каталог и разделы
'Клик_Каталог': {'func': self.click_xpath, 'path': './/a[@class="header__link"][contains(., "Каталог")]'},
'Клик_Фильмы_в_каталоге': {'func': self.click_xpath,'path': './/button[@class="filter__category js-filter-category"][contains(., "Фильмы")]'},
'Клик_Мультфильмы_в_каталоге': {'func': self.click_xpath,'path': './/button[@class="filter__category js-filter-category"][contains(., "Мультфильмы")]'},
'Клик_Сериалы_в_каталоге': {'func': self.click_xpath,'path': './/button[@class="filter__category js-filter-category"][contains(., "Сериалы")]'},
'Клик_Годы_выпуска': {'func': self.click_xpath,'path': './/button[@class="filter__subcategory js-filter-subcategory"][contains(., "Годы выпуска")]'},
'Выставление_год_левый': {'func': self.click_xpath, 'path': './/div[@style="left: 22.7642%;"]'},
'Клик_Родительский_контроль': {'func': self.click_xpath, 'path': './/span[text()="Родительский контроль"]'},
'Клик_Бесплатные': {'func': self.click_xpath, 'path': './/span[text()="Бесплатные"]'},
# Страны
'Клик_страны': {'func': self.click_xpath,'path': './/button[@class="filter__subcategory js-filter-subcategory"][contains(., "Страны")]'},
'Клик_США': {'func': self.click_xpath, 'path': './/li[@data-filter-id="515"]'},
'Клик_Германия': {'func': self.click_xpath, 'path': './/li[@data-tag-name="Германия"]'},
'Клик_Южная_Корея': {'func': self.click_xpath, 'path': './/li[@data-filter-id="8789"]'},
'Клик_Япония': {'func': self.click_xpath, 'path': './/li[@data-filter-id="3467"]'},
'Клик_Испания': {'func': self.click_xpath, 'path': './/li[@data-filter-id="2600"]'},
'Клик_Турция': {'func': self.click_xpath, 'path': './/li[@data-filter-id="5287"]'},
'Клик_Россия': {'func': self.click_xpath, 'path': './/li[@data-filter-id="122"]'},
# Жанры
'Клик_Жанры': {'func': self.click_xpath,'path': './/button[@class="filter__subcategory js-filter-subcategory"][contains(., "Жанры")]'},
'Клик_боевик_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="690"]'},
'Клик_комедия_жанр': {'func': self.click_xpath, 'path': './/li[@data-tag-name="Комедия"]'},
'Клик_азиатский_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="21136"]'},
'Клик_Советский_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="7320"]'},
'Клик_приключения_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="702"]'},
'Клик_Детектив_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="693"]'},
'Клик_применить_фильтр': {'func': self.click_xpath,'path': './/button[@class="filter__apply button js-filter-apply"]'},
'Клик_кнопки_просмотр_от_руб': {'func': self.click_xpath,'path': './/button[@class="clip-player__action button"]'},
'Клик_Сериалы': {'func': self.click_xpath, 'path': './/a[text()="Сериалы"]'},
# матч тв
'Вход': {'func': self.click_xpath, 'path': './/button[@class="reset-button pm-gate__button"]'},
'Вход2': {'func': self.click_xpath, 'path': './/button[@data-action="click->pm-auth#login"]'},
# клик вход на всплывашке
'Далее': {'func': self.click_xpath, 'path': './/button[@type="submit"]'},
'Войти': {'func': self.click_xpath, 'path': './/button[@type="submit"]'},
# Mail
'Клик_Вход_через_Mailru': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_mr js-social-link"]'},
'Клик_Войти_и_разрешить_Mailru': {'func': self.click_xpath,'path': './/button[@class="ui-button-main"][contains(., "Войти и разрешить")]'},
# FaceBook
'Клик_Вход_через_FB': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_fb js-social-link"]'},
# login-social__link login-social__link_fb js-login-social-link
'Клик_Вход_через_VK': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_vk js-social-link"]'},
'Клик_Вход_через_OK': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_ok js-social-link"]'},
'Клик_Вход_через_G': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_gp js-social-link"]'},
'Клик_Вход_FB': {'func': self.click_id, 'path': 'loginbutton'},
'Клик_Вход_VK': {'func': self.click_id, 'path': 'install_allow'},
'Клик_Вход_ОК': {'func': self.click_xpath, 'path': './/input[@class="button-pro __wide form-actions_yes"]'},
'Снятие_галочки_чекбокса_запомнить_меня': {'func': self.click_xpath, 'path': './/span[@class="irc-vis"]'},
'Клик_кнопки_Далее_Google': {'func': self.click_xpath,'path': './/span[@class="RveJvd snByac"][contains(., "Далее")]'},
'Клик_1_Далее_Google': {'func': self.click_xpath,'path': './/span[@class="RveJvd snByac"][contains(., "Далее")]'},
'Войти_auth': {'func': self.click_id, 'path': 'authorization-submit'},
'По_номеру_телефона': {'func': self.click_xpath, 'path': './/a[@data-target="register-phone-tab"]'},
# Подборки
'Клик_Подборки': {'func': self.click_xpath, 'path': './/a[@class="header__link"][contains(., "Подборки")]'},
'Клик_Коллекции': {'func': self.click_xpath,'path': './/a[@class="filter__category"][contains(., "Коллекции")]'},
'Клик_Подборки_партнеров': {'func': self.click_xpath,'path': './/a[@class="filter__category"][contains(., "Подборки партнеров")]'},
'Клик_Детям': {'func': self.click_xpath, 'path': './/a[@class="header__link"][contains(., "Детям")]'},
'Клик_Спецпроекты': {'func': self.click_xpath,'path': './/a[@class="header__link"][contains(., "Спецпроекты")]'},
'Клик_Кино_равного_доступа': {'func': self.click_xpath,'path': './/div[@class="card__title"][contains(., "Кино равного доступа")]'},
'Проект, где ваши дети снимаются в кино': {'func': self.click_xpath,'path': './/div[@class="card__title"][contains(., "Проект, где ваши дети снимаются в кино")]'},
'Клик_TVZ': {'func': self.click_xpath,'path': './/div[@class="card__title"][contains(., "Кино равного доступа")]'}, #
'Обратная_связь': {'func': self.click_xpath,'path': './/button[@class="footer__link"][contains(., "Обратная связь")]'},
'Клик_Отправить_сообщение': {'func': self.click_xpath,'path': './/a[@class="header__link"][contains(., "Подборки")]'},
'Клик_Отправить_сообщение_обратная связь': {'func': self.click_xpath,'path': './/button[@class="feedback__submit button button_stretched"]'},
'Редактировать': {'func': self.click_xpath,'path': './/a[@class="header__link"][contains(., "Редактировать")]'},
'Клик_первого_фильма': {'func': self.click_xpath, 'path': './/div[@class="owl-item active"]'},
'Клик_постер_первого_фильма': {'func': self.click_xpath, 'path': './/a[@class="card card_clip"]'},
'Клик_постер_сериала_соседка_ты_дома': {'func': self.click_xpath,'path': '//a[@href="/film/sosedka-ty-doma/"]'},
'Клик_стрелка_Вниз': {'func': self.click_tag, 'path': 'body', 'send': Keys.DOWN},
'Переход_вниз_страницы': {'func': self.click_tag, 'path': 'body', 'send': Keys.END},
}
# MainPage
self.args = self.pagex[name]
self.func = self.args['func'] # имя функции из словаря
self.func(self.args) # Вызов нужной функции с id = path
self.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен')
def send_f(self, name, text, stap):
step = str(stap)
self.pages = {
# матч тв
# 'Ввод_пароля': {'func': self.send_css, 'path': 'password', 'text': text},
# 'Ввод_номера_телефона': {'func': self.send_name, 'path': 'phone', 'text': text},
# self.driver.find_element_by_xpath('.//input[@type="password"]').send_keys('Alekseybykov126')
# 'Ввод_номера_телефона': {'func': self.send_, 'path': 'phone', 'text': text},
'Ввод_в_строку_поиска': {'func': self.send_id, 'path': 'search-field', 'text': text},
'Ввод_2_в_строку_поиска': {'func': self.send_id, 'path': 'header-search-field', 'text': text},
# Регистрация ТВЗАВР
# 'Ввод_логина': {'func': self.send_name, 'path': 'email', 'text':text},
'Ввод_логина': {'func': self.send_name, 'path': 'email-registration__address', 'text': text},
'Ввод_пароля': {'func': self.send_css, 'path': '#register-email-password', 'text': text},
# Вход ТВЗАВР
'Ввод_логина_вход': {'func': self.send_name, 'path': 'login', 'text': text},
'Ввод_пароля_вход': {'func': self.send_css, 'path': '#auth-password', 'text': text},
# Гугл
'Ввод_логин_Google': {'func': self.send_id, 'path': 'identifierId', 'text': text},
'Ввод_пароль_Google': {'func': self.send_name, 'path': 'password', 'text': text},
# Mail
'Ввод_логин_Mailru': {'func': self.send_name, 'path': 'Login', 'text': text},
'Ввод_пароля_Mailru': {'func': self.send_css, 'path': '#password', 'text': text},
# Facebook
'Ввод_пароля_FB': {'func': self.send_css, 'path': '#pass', 'text': text},
# VK
'Ввод_пароля_VK': {'func': self.send_css, 'path': '#login_submit > div > div > input:nth-child(9)','text': text},
#Одноклассники
'Ввод_логина_OK': {'func': self.send_name, 'path': 'fr.email', 'text': text},
'Ввод_пароля_OK': {'func': self.send_css, 'path': '#field_password', 'text': text},
#'Ввод_сообщения_скайп': {'func': self.send_id, 'path': '#.public-DraftStyleDefault-block', 'text': text},
# 'Ввод_номера_телефона_reg': {'func': self.send_name, 'path': 'phone', 'text':text},
'Ввод_СМС_пароля_reg': {'func': self.send_name, 'path': 'code', 'text': text},
'feedback_имя_пользователя': {'func': self.send_id, 'path': 'feedback-name', 'text': text},
'feedback_e_mail_пользователя': {'func': self.send_id, 'path': 'feedback-email', 'text': text},
'feedback_сообщение_пользователя': {'func': self.send_id, 'path': 'feedback-decription', 'text': text},
'Ввод_ответа_пользователю': {'func': self.send_id, 'path': 'issue_notes', 'text': text},
'Ввод_текста_ответа_пользователя': {'func': self.send_id, 'path': 'tinymce', 'text': text},
# Админка
# 'Ввод_имени_в_Redmine': {'func': self.send_id, 'path': 'username', },
# 'Ввод_номера_телефона_auth': {'func': self.send_css, 'path': '#auth-login', 'text':text},
'Ввод_из_СМС_пароля_auth': {'func': self.send_css, 'path': '#auth-password', 'text': text},
# 'Ввод_сообщения_в_skype': {'func': self.send_css, 'path': '.public-DraftStyleDefault-block', 'text':text},
'Ввод_суммы_пополнения_счета': {'func': self.send_name, 'path': 'price', 'text': text},
'Ввод_профиль_old_пароля': {'func': self.send_id, 'path': 'cabinet-password-old', 'text': text},
'Ввод_профиль_new_пароля': {'func': self.send_id, 'path': 'cabinet-password-new', 'text': text},
'Ввод_профиль_rep_пароля': {'func': self.send_id, 'path': 'cabinet-password-repeat', 'text': text},
'Ввод_псевдонима': {'func': self.send_id, 'path': 'name', 'text': text},
'Админка_Ввод_в_поиск': {'func': self.send_name, 'path': 'q_q', 'text': text},
'Ввод_номера_карты_тройка': {'func': self.send_id, 'path': 'troika-binding-textbox', 'text': text},
# Оплата картой
# 'Ввод_номер_карты': {'func': self.send_name, 'path': }
}
self.args = self.pages[name]
self.func = self.args['func'] # имя функции из словаря
self.func(self.args) # Вызов нужной функции с id = path
self.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен')
# Функция клик пропуска фулл скрина test case 9 + all Sony
def click_button(self, bc):
button_xpath = ('.//button[text()="%s"]' % bc)
# self.loger_info(button_xpath)
self.driver.find_element_by_xpath(button_xpath).click()
# Функция клик кнопок верхнего меню
def click_div(self, dep):
up_xpath = ('.//div[text()="%s"]' % dep)
self.waitForElementClickable(up_xpath, 30)
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath(up_xpath).click()
# print('Клик', dep)
return
# Функция клик вкладок
def click_li(self, dep):
li_xpath = ('.//li[text()="%s"]' % dep)
self.waitForElementClickable(li_xpath, 30)
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath(li_xpath).click()
# print('Клик', dep)
#Функция клик вкладок .//a[text()="%s"]
def click_a(self, dep):
a_xpath = ('.//a[text()="%s"]' % dep)
self.waitForElementClickable(a_xpath, 10)
self.driver.find_element_by_xpath(a_xpath).click()
# print('Клик', dep)
# Функция клик вкладок
def click_span(self, dep):
span_xpath = ('.//span[text()="%s"]' % dep)
self.waitForElementClickable(span_xpath, 30)
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath(span_xpath).click()
# print('Клик', dep)
def click_id(self, args):
dep = args['path']
self.waitForIDVisible(dep, 10)
self.driver.find_element_by_id(dep).click()
# print('Клик', dep)
def click_name(self, args):
dep = args['path']
self.waitForNameVisible(dep, 30)
self.driver.find_element_by_name(dep).click()
# print('Клик', dep)
def click_xpath(self, args):
xpath = args['path']
self.waitForElementClickable(xpath, 30)
self.driver.find_element_by_xpath(xpath).click()
def click_css(self, args):
css = args['path']
print('css = ', css)
# self.waitForElementClickable(css, 30)
self.driver.find_element_by_css_selector(css).click()
def click_switch_to(self, args):
frame = args['path']
index = args['index']
# self.waitForElementClickable(css, 30)
self.driver.switch_to.frame(self.driver.find_elements_by_tag_name(frame)[index]).click()
def click_s_xpath(self, args):
xpath = args['path']
index = args['index']
self.waitForElementClickable(xpath, 30)
self.driver.find_elements_by_xpath(xpath)[index].click()
def double(self, args):
self.click_xpath(args['path_1'])
self.click_xpath(args['path_2'])
def click_tag(self, args):
self.driver.find_element_by_tag_name(args['path']).send_keys(args['send'])
#Проверка видимости элемента #Exception
def tester_vis_xpath(self, xpath):
self.waitForElementVisible(xpath, 5)
self.driver.find_element_by_xpath(xpath)
# print('Клик', dep)
#Проверка кликабельности элемента
def tester_click_xpath(self, xpath):
self.waitForElementClickable(xpath, 25)
self.driver.find_element_by_xpath(xpath)
# print('Клик', dep)
#Кнопки
def click_play(self):
xpath = '//*[@id="clip-player"]/div[16]'
self.waitForElementClickable(xpath, 35)
self.driver.find_element_by_xpath(xpath).click()
# # print('Клик', dep)
def click_stop(self):
# xpath = '//*[@id="clip-player"]/div[4]'
xpath = '//*[@id="clip-player"]/div[4]/div'
# xpath = '//*[@id="clip-player"]/div[3]'
css_sel = '#clip-player > div.tvz-button.tvz-button_play > div'
# self.waitForElementClickable(xpath, 15)
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath(xpath).click()
# self.driver.find_element_by_css_selector(css_sel).click()
# print('Клик', dep)
def click_enter(self):
self.click_xpath('.//button[@class="header__login"]')
def waitForElementPresent(self, xpath, timer):
WebDriverWait(self.driver, timer).until(EC.presence_of_element_located((By.XPATH, xpath)))
def waitForElementClickable(self, xpath, timer):
WebDriverWait(self.driver, timer).until(EC.element_to_be_clickable((By.XPATH, xpath)))
def waitForElementVisible(self, xpath, timer):
WebDriverWait(self.driver, timer).until(EC.presence_of_element_located((By.XPATH, xpath)))
def waitForNameVisible(self, name, timer):
WebDriverWait(self.driver, timer).until(EC.presence_of_element_located((By.NAME, name)))
def waitForIDVisible(self, id, timer):
WebDriverWait(self.driver, timer).until(EC.presence_of_element_located((By.ID, id)))
# OPERATIONS
# Функция ввода
def send_id(self, args):
d_id = args['path']
txt = args['text']
# self.loger_info('path = ' + d_id + ', text = ' + txt)
self.waitForIDVisible(d_id, 30)
self.driver.find_element_by_id(d_id).send_keys(txt)
return
def send_name(self, args):
d_name = args['path']
txt = args['text']
self.waitForNameVisible(d_name, 30)
self.driver.find_element_by_name(d_name).send_keys(txt)
return
def send_css(self, args):
d_name = args['path']
txt = args['text']
self.driver.find_element_by_css_selector(d_name).send_keys(txt)
return
# Функция ввода
def input(self, dclass, data):
li_xpath = ('.//input[@class="%s"]' % dclass)
# self.waitForElementClickable(li_xpath, 80)
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath(li_xpath).send_keys(data)
def rand_mail(self, lit):
d = str(datetime.today())
ds = d.replace('-', '')
d = ds.split(':')[0]
d_2 = ds.split(':')[1]
d_3 = d.replace(' ', '')
rand = d_3 + d_2
# self.loger_info(rand)
random_mail = 'tvzavrtest' + rand + lit + '@rrbbxvdr.rz'
return (random_mail, rand)
# def rand_number(self, lit):
# d = str(datetime.today())
# ds = d.replace('-', '')
# d = ds.split(':')[0]
# d_2 = ds.split(':')[1]
# d_3 = d.replace(' ', '')
# rand = d_3 + d_2
# self.loger_info(rand)
# random_number = rand + lit
# return (random_number, rand)
def code_phone(self, phone):
self.loger_info('Получение кода на телефон: ' + phone)
# url = 'http://www.tvzavr.ru/api/3.1/sms/send_confirm_code?phone=' + str(phone) + '&entity=empty&prv=smsfake_tvz'
url = 'http://www.tvzavr.ru/api/3.1/sms/send_confirm_code?phone=' + str(
phone) + '&entity=empty&prv=smstest_tvz' # получилось с этой
self.loger_info(url)
code = (requests.get(url)).text
self.loger_info('code_phone ' + code)
r_code = code.split(':')[3]
s_code = r_code.split('"')[1]
self.loger_info(s_code)
return s_code
# Функция проверки наличия элементов на странице.
def elem(self):
self.loger('Проверка элементов страницы')
# Проверка наличия ссылки "Новинки""
res_txt = str(ResultPage.find_link(self, "a", "header__link"))
self.loger(res_txt)
# Новинки - проверочное словосочетание надписи
assert ('Новинки') in res_txt
self.loger('Наличие ссылки "Новинки" подтверждено')
# Проверка наличия ссылки "Подписки""
res_txt = str(ResultPage.find_all_link(self, "a", "header__link"))
# Подписки - проверочное словосочетание надписи
assert ('Подписки') in res_txt
self.loger('Наличие ссылки "Подписки" подтверждено')
# Проверка наличия ссылки "Бесплатно""
# Бесплатно - проверочное словосочетание надписи
assert ('Бесплатно') in res_txt
self.loger('Наличие ссылки "Бесплатно" подтверждено')
# Проверка наличия ссылки "Подборки"
# Подборки - проверочное словосочетание надписи
assert ('Подборки') in res_txt
self.loger('Наличие ссылки "Подборки" подтверждено')
# Проверка наличия ссылки "Каталог""
# Каталог - проверочное словосочетание надписи
assert ('Каталог') in res_txt
self.loger('Наличие ссылки "Каталог" подтверждено')
# Проверка наличия ссылки "Детям""
# Детям - проверочное словосочетание надписи
assert ('Детям') in res_txt
self.loger('Наличие ссылки "Детям" подтверждено')
# Проверка наличия ссылки "Вход""
res_txt = str(ResultPage.find_link(self, "button", "header__login"))
# Вход - проверочное словосочетание надписи
assert ('Вход') in res_txt
self.loger(res_txt)
self.loger('Наличие ссылки "Вход" подтверждено')
def loger(self, text):
logging.basicConfig(format=u'%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG)
logging.info(text)
print(text)
def loger_info(self, text):
logging.basicConfig(format=u'%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG)
logging.info(text)
print(text)
def loger_error(self, text):
logging.basicConfig(format=u'%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG)
logging.error(text)
print(text)
# def send_sms(self, phone, message): # Функция отправки смс
# logging.info("Вызов функции отправки СМС")
# # chrome_options = webdriver.ChromeOptions()
# # chrome_options.add_argument("user-data-dir=C:\\Users\\user\\AppData\\Local\\Google\\Chrome\\User Data") # Запуск браузера с сохраненным профилем
# self.driver = webdriver.Chrome("C:\chromedriver\chromedriver.exe")
self.driver.get("https://app.mysms.com/")
self.driver.implicitly_wait(10)
time.sleep(SLEEP_MEDIUM)
self.driver.find_element_by_xpath('.//div[@class="gwt-Label"]').click() # Новое сообщение
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath('.//input[@class="recipientTextBox"]').send_keys(phone)
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath('.//div[@class="textarea"]').send_keys(message)
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath('.//button[@class="styledButton sendButton sim dropdown"]').click()
logging.info("Клик 'Отправить' произведен, СМС подтверждения отправлено")
self.driver.close()
return
def login_google(self, emailgo, passok):
time.sleep(SLEEP_SHORT)
self.send_f('Ввод_логин_Google', emailgo, 1)
time.sleep(2)
self.click_f('Клик_кнопки_Далее_Google', 6)
time.sleep(2)
self.send_f('Ввод_пароль_Google', passok, 1)
time.sleep(2)
self.click_f('Клик_кнопки_Далее_Google', 6)
time.sleep(2)
return
# def login_match(self, emailt, passw):
# time.sleep(SLEEP_SHORT)
# self.send_f('Ввод_name_логина', emailt, 2)
# self.send_f('Ввод_пароля_tvz', passw, 3)
# self.click_f('Клик_Войти_auth', 4)
# time.sleep(SLEEP_MEDIUM)
# return 06.08.2019
def login_matchtv(self, num_phone, passw):
self.click_f('Вход', 1)
time.sleep(1)
self.click_f('Вход2', 2)
time.sleep(2)
self.driver.find_element_by_xpath('.//input[@type="tel"]').send_keys(num_phone)
self.loger_info('Введён номер телефона: +7 ' + num_phone)
time.sleep(1)
self.click_f('Далее', 3)
time.sleep(1)
self.driver.find_element_by_xpath('.//input[@type="password"]').send_keys(passw)
self.loger_info('Введён пароль: ' + passw)
time.sleep(1)
self.click_f('Войти', 4)
time.sleep(1)
return
# def login_tvzavr(self, email, passw, sex):
# result = ResultPage(self.driver)
# prof = Profile(self.driver)
# self.waitForElementVisible('.//button[@class="header__login"]', 7)
# resic = result.find_link("button", "header__login")
# if "Вход" not in resic:
# if sex == 'male':
# prof.click_f('Клик_Аватарка_М', 1)
# else:
# prof.click_f('Клик_Аватарка_Ж', 1)
#
# prof.click_f('Клик_Выйти', 1)
# else:
# # Шаг 1 Нажать в шапке на кнопку "Вход".')
# self.click_f('Клик_Вход', 1)
# time.sleep(SLEEP_SHORT)
# # Шаг 2
# self.send_f('Ввод_name_логина', email, 2)
# self.send_f('Ввод_пароля_Google', passw, 3)
# self.click_f('Клик_Войти_auth', 4)
# time.sleep(SLEEP_LONG)
# try:
# self.click_f('Клик_кнопки_крестик', 6)
# except:
# print('нет акции мицубиси')
# Шаг 5
# if sex == 'male': 1.08.2019
# prof.click_f('Клик_Аватарка_М', 7)
# else:
# prof.click_f('Клик_Аватарка_Ж', 8)
# time.sleep(SLEEP_MEDIUM)
# return
def login_mailru(self, emailru, passw):
self.send_id('mailbox:login', emailru)
self.loger_info('Ввод логина на mail.ru ' + emailru + 'произведен')
time.sleep(SLEEP_SHORT)
self.send_id('mailbox:password', passw)
self.loger_info('Ввод пароля на mail.ru произведен')
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath('.//input[@class="o-control"]').click()
self.loger_info('Клик кнопки "Войти" на mail.ru произведен')
time.sleep(SLEEP_MEDIUM)
def registration(self, email, passw):
result = ResultPage(self.driver)
prof = Profile(self.driver)
resic = result.find_link("button", "header__login tvz-unauthorized")
if "Вход" not in resic:
prof.click_f('Клик_Аватарка_М', 1)
self.loger_info('Шаг 0 Клик на аватарку пользователя произведен')
self.driver.find_element_by_xpath('.//button[@class="profile-menu__logout js-profile-logout"]').click()
time.sleep(SLEEP_SHORT)
else:
self.click_enter()
self.loger_info('Шаг 1 Клик "Вход" произведен')
# Шаг 2
self.click_a('Регистрация')
self.loger_info('Шаг 2 Клик "Регистрация" произведен')
time.sleep(SLEEP_SHORT)
# Шаг 3
self.login('email', email)
print('Шаг 3 Ввод логина', email, 'произведен')
# Шаг 4
self.driver.find_element_by_css_selector('#register-email-password').send_keys(passw)
# page.login('password', passw)
self.loger_info('Шаг 4 Ввод пароля произведен')
time.sleep(SLEEP_SHORT)
# Шаг 5
self.driver.find_element_by_id('register-email-submit').click()
self.loger_info('Шаг 5 Клик "Зарегистрироваться" произведен')
time.sleep(7)
prof.click_f('Клик_Аватарка_М', 5)
self.loger_info('Шаг 6 Клик на аватарку пользователя произведен')
self.waitForElementVisible('.//div[@class="profile-menu__name __username"]', 7)
# Проверка авторизации пользователя"
resic = str(result.find_link("div", "profile-menu__name __username"))
# email - проверочное словосочетание надписи
assert (email) in resic
self.loger_info('Авторизация зарегистрированного пользователя с е-майлом ' + email + ' подтверждена')
time.sleep(SLEEP_SHORT)
def input_card(self, number, month, year, name_card, cvv):
result = ResultPage(self.driver)
self.driver.find_elements_by_xpath('.//input[@class="payment-cloudpayments__field textbox js-input"]')[
0].send_keys(number)
self.loger_info('Шаг 10 Ввод номера карты произведен ' + number)
time.sleep(3)
self.driver.find_elements_by_xpath('.//select[@class="dropdown js-input"]')[0].click()
time.sleep(1)
self.driver.find_element_by_xpath('.//option[@value="%s"]' % month).click()
self.loger_info('Ввод месяца карты произведен')
time.sleep(3)
self.driver.find_elements_by_xpath('.//select[@class="dropdown js-input"]')[1].click()
self.driver.find_element_by_xpath('.//option[@value="%s"]' % year).click()
self.loger_info('Ввод года карты произведен')
time.sleep(3)
# Заполнить поле "Имя держателя" - Ivanov Ivan
self.driver.find_elements_by_xpath('.//input[@class="payment-cloudpayments__field textbox js-input"]')[
1].send_keys(name_card)
self.loger_info('Ввод имени держателя карты произведен')
time.sleep(3)
# Заполнить поле "CVV код" - 526
self.driver.find_element_by_xpath(
'.//input[@class="payment-cloudpayments__field payment-cloudpayments__field_cvc textbox js-input"]').send_keys(
cvv)
self.loger_info('Ввод CVV код карты произведен')
time.sleep(4)
#Снять галочку "Сохранить данные карты"
# self.driver.find_element_by_xpath('.//span[@class="toggle__label"]').click()
# self.loger_info('Снятие галочки в чек-боксе"Сохранить данные карты" произведено')
# Нажать кнопку "Оплатить"clip-watch
#self.driver.find_element_by_xpath('.//button[@class="payment-cloudpayments__pay button button_stretched js-buy-button"]').click()
#self.loger_info('Клик "Оплатить" произведен')
#time.sleep(1)
# message = str(result.find_link("section", "tvz-alerts tvz-animation-fadeOut"))
# self.loger_info('Сообщение внизу формы оплаты:')
# self.loger_info('message:' + message)
def delete_mails(self, emailgo, passgo):
# self.driver.get('https://mail.google.com')
self.loger_info('Шаг 5 Переход на gmail.com произведен')
# self.login_google(emailgo, passgo)
time.sleep(SLEEP_SHORT)
# self.driver.get('https://mail.google.com/mail/u/0/#inbox')
# Удаление письма из почты
self.click_xpath('.//div[@class="J-J5-Ji J-JN-M-I-Jm"]')
# self.driver.find_element_by_id(':3d').click()
self.loger_info('Поставлена галочка чекбокс - выбор письма')
self.click_xpath('//*[@id=":5"]/div/div[1]/div[1]/div/div/div[2]/div[3]')
# self.click_xpath('.//div[@class="T-I J-J5-Ji nX T-I-ax7 T-I-Js-Gs mA"]')
self.loger_info('Клик кнопки "Удалить" письмо на gmail.com произведен')
time.sleep(SLEEP_SHORT)
@property
def consol_jenkins(self):
print('Запуск проверки консоли')
# p = subprocess.call('ffmpeg.exe -framerate 10 -f image2 -i "Frame%03d.jpg" -r 10 -s 620x380 Video.avi', shell=True)
# options = webdriver.ChromeOptions()
#self.driver = webdriver.Chrome("C:\chromedriver\chromedriver.exe", chrome_options=options)
options.add_argument('--incognito') # Запуск браузера в режиме инкогнито
# self.driver = webdriver.Chrome(options=options)
#self.driver = webdriver.Chrome("C:\chromedriver\chromedriver.exe")
self.driver.get("http://192.168.2.31:8080/jenkins/job/1_Regress/")
self.driver.maximize_window()
self.driver.implicitly_wait(10)
page = MainPage(self.driver)
result = ResultPage(self.driver)
self.send_name('j_username', 'admin')
self.send_name('j_password', 'admin')
self.click_xpath('.//div[@class="Checkbox-indicator"]')
self.click_name('Submit')
self.driver.implicitly_wait(5)
self.driver.find_elements_by_xpath('.//td[@class="build-row-cell"]')[0].click()
self.click_a('Вывод консоли')
res = self.driver.find_element_by_xpath('.//pre[@class="console-output"]').text
self.driver.close()
return res
def mail_send_web(self, login, passw):
self.driver.execute_script("window.open('','_blank');")
time.sleep(2)
self.driver.switch_to.window(self.driver.window_handles[1])
time.sleep(2)
result = ResultPage(self.driver)
self.driver.get("https://e.mail.ru/login")
self.driver.maximize_window()
self.driver.implicitly_wait(10)
self.loger_info(' Переход в mail произведен')
time.sleep(2)
#
# # self.driver.find_element_by_name("Login").send_keys('testmailtvzavr15')
self.driver.find_element_by_xpath('.//*[@id="root"]/div/div[3]/div/div/div/form/div[2]/div[2]/div[1]/div/div/div/div/div/div[1]')[0].click()
self.driver.find_element_by_xpath('.//*[@id="root"]/div/div[3]/div/div/div/form/div[2]/div[2]/div[1]/div/div/div/div/div/div[1]').send_keys('testmailtvzavr15')
time.sleep(2)
#
self.driver.find_element_by_xpath('.//span[@class="c01104 c0179 c01102 c0177"]').click() # Клик далее
time.sleep(3)
def scype_send_web(self, login, passw):
self.driver.execute_script("window.open('','_blank');")
time.sleep(2)
self.driver.switch_to.window(self.driver.window_handles[1])
time.sleep(2)
result = ResultPage(self.driver)
# options = webdriver.ChromeOptions()
# options.add_argument("--disable-notifications")
# options.add_argument('--incognito') # Запуск браузера в режиме инкогнито
# self.driver = webdriver.Chrome(options=options)
#self.driver = webdriver.Chrome("C:\chromedriver\chromedriver.exe")
self.driver.get("https://web.skype.com/ru/")
self.driver.maximize_window()
self.driver.implicitly_wait(10)
self.loger_info(' Переход в Skype произведен')
time.sleep(2)
self.driver.find_element_by_name("loginfmt").send_keys('79776410337')
time.sleep(2)
self.driver.find_element_by_xpath('.//input[@id="idSIButton9"]').click() # Клик далее
time.sleep(3)
self.driver.find_element_by_name("passwd").send_keys('Aleh1260337')
self.loger_info('Ввод Skype пароля произведен')
time.sleep(3)
self.driver.find_element_by_xpath('.//input[@id="idSIButton9"]').click() # Клик вход
self.loger_info('Клик Вход произведен')
time.sleep(3)
self.driver.find_element_by_xpath('.//div[@id="rx-vlv-6"]').click() # Клик по диалогу
self.loger_info('Шаг 100. Переход в чат Деплоймент произведен')
time.sleep(10)
#self.driver.find_element_by_css_selector(".public-DraftStyleDefault-block").click() # Клик по полю ввода
time.sleep(15)
#print('тут2222')
#time.sleep(2)
# self.driver.find_element_by_xpath('/html/body/div[1]/div/div[1]/div[2]/div/div[1]/div/div[2]/div/div[2]/div/div/div/div/div/div[2]/div[2]/div/div/div[1]/div/div/div/div/div[2]/div[3]').send_keys('text')
# self.send_f('Ввод_сообщения_скайп', 'text', 15)
# self.driver.find_element_by_xpath('/html/body/div[1]/div/div[1]/div[2]/div/div[1]/div/div[2]/div/div[2]/div/div/div/div/div/div[2]/div[2]/div/div/div[1]/div/div/div/div/div[2]').send_keys('text')
#self.driver.find_element_by_css_selector('[id="#.public-DraftStyleDefault-block"]').send_keys('text')
#self.page.loger('тут')
# self.click_xpath('//*[@id="swxContent1"]/swx-navigation/div/div/div/label/div/div/div[2]/div[2]/div/swx-button/button')
# self.loger_info('Отправка текста в чат Деплоймент произведена')
# self.driver.close()
# def delete_uzer(self, name): # ФУНКЦИЯ УДАЛЕНИЯ ПОЛЬЗОВАТЕЛЯ ИЗ АДМИНКИ
# self.driver.execute_script("window.open('','_blank');")
# time.sleep(2)
# self.driver.switch_to.window(self.driver.window_handles[1])
# time.sleep(2)
# self.driver.get("https://www.tvzavr.ru:8080/admin/")
# time.sleep(2)
# # 'Открытие страницы админки
# self.driver.maximize_window()
# time.sleep(3)
# self.driver.implicitly_wait(7)
# self.driver.find_element_by_xpath(
# './/a[@href="https://accounts.google.com/o/oauth2/auth?client_id=245544346256-4luf263ioa376hp89q5k08otplt9dvdh.apps.googleusercontent.com&scope=openid%20profile%20email%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fplus.login&redirect_uri=http://www.tvzavr.ru:8080/oauth2callback/&response_type=code"]').click()
# time.sleep(3)
# # Логинимся через Google
#
# emailgo = 'bykov.a@tvzavr.ru'
# passok = 'tmW9HZvaksgc'
#
# self.send_f('Ввод_логин_Google', emailgo, 1)
# time.sleep(2)
# self.click_f('Клик_кнопки_Далее_Google', 6)
# time.sleep(2)
# self.send_f('Ввод_пароль_Google', passok, 1)
# time.sleep(2)
# self.click_f('Клик_кнопки_Далее_Google', 6)
# time.sleep(6)
# # Вошли в админку
#
# self.driver.find_element_by_xpath(
# './/a[@href="/admin/tvzavr_admin/customer/"]').click() # Клик на "Профили посетителей"
# time.sleep(3)
# self.send_f('Админка_Ввод_в_поиск', name, 16) # Ввод имени пользователя
# time.sleep(3)
#
# self.driver.find_element_by_xpath('.//input[@value="Найти"]').click() # Клик найти
# time.sleep(2)
#
# self.driver.find_element_by_xpath(
# './/input[@id="action-toggle"]').click() # Клик по чекбоксу(ставит галочку)
# time.sleep(2)
#
# self.driver.find_element_by_xpath('.//select[@name="action"]').click() # Клик на поле "Действие"
# time.sleep(2)
# self.driver.find_element_by_css_selector(
# '#action_block > label > select > option:nth-child(14)').click() # Выбор "Удалить"
# time.sleep(2)
# self.driver.find_element_by_xpath('.//*[@id="action_block"]/button').click() # Клик на "Выполнить"
# time.sleep(3)
# self.driver.find_element_by_xpath('.//*[@id="content"]/form/div[2]/input[4]').click() # Клик на "Да, уверен"
# time.sleep(2)
# self.driver.switch_to.window(self.driver.window_handles[-1])
# time.sleep(2)
# def delete_comments(self, name): # ФУНКЦИЯ УДАЛЕНИЯ комментариев ПОЛЬЗОВАТЕЛЯ
# self.driver.execute_script("window.open('','_blank');")
# time.sleep(2)
# self.driver.switch_to.window(self.driver.window_handles[1])
# time.sleep(2)
# self.driver.get("https://www.tvzavr.ru:8080/admin/")
# time.sleep(2)
# # 'Открытие страницы админки
# self.driver.maximize_window()
# time.sleep(3)
# self.driver.implicitly_wait(7)
# self.driver.find_element_by_xpath(
# './/a[@href="https://accounts.google.com/o/oauth2/auth?client_id=245544346256-4luf263ioa376hp89q5k08otplt9dvdh.apps.googleusercontent.com&scope=openid%20profile%20email%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fplus.login&redirect_uri=http://www.tvzavr.ru:8080/oauth2callback/&response_type=code"]').click()
# time.sleep(3)
# # Логинимся через Google
#
# emailgo = 'bykov.a@tvzavr.ru'
# passok = 'tmW9HZvaksgc'
#
# self.send_f('Ввод_логин_Google', emailgo, 1)
# time.sleep(2)
# self.click_f('Клик_кнопки_Далее_Google', 6)
# time.sleep(2)
# self.send_f('Ввод_пароль_Google', passok, 1)
# time.sleep(2)
# self.click_f('Клик_кнопки_Далее_Google', 6)
# time.sleep(6)
# # Вошли в админку
#
# self.driver.find_element_by_xpath('.//a[@href="/admin/tvzavr_admin/customer/"]').click() # Клик на "Профили посетителей"
# time.sleep(3)
# self.send_f('Админка_Ввод_в_поиск', name, 16) # Ввод имени пользователя
# time.sleep(3)
#
# self.driver.find_element_by_xpath('.//input[@value="Найти"]').click() # Клик найти
# time.sleep(2)
#
# self.driver.find_element_by_xpath(
# './/input[@id="action-toggle"]').click() # Клик по чекбоксу(ставит галочку)
# time.sleep(2)
#
# self.driver.find_element_by_xpath('.//select[@name="action"]').click() # Клик на поле "Действие"
# time.sleep(2)
# self.driver.find_element_by_xpath('.//option[@value="remove_comments"]').click()
# # self.driver.find_element_by_css_selector('#action_block > label > select > option:nth-child(14)').click() # Выбор "Удалить"
# time.sleep(2)
# self.driver.find_element_by_xpath('.//*[@id="action_block"]/button').click() # Клик на "Выполнить"
# time.sleep(3)
# self.driver.find_element_by_xpath(
# './/input[@id="action-toggle"]').click() # Клик по чекбоксу(ставит галочку)
# time.sleep(2)
# # self.driver.find_element_by_xpath('.//*[@id="content"]/form/div[2]/input[4]').click() # Клик на "Да, уверен"
# # time.sleep(2)
# self.driver.switch_to.window(self.driver.window_handles[-1])
# time.sleep(2)
| 53.892756 | 323 | 0.614788 | from lib2to3.pgen2 import driver
from selenium.webdriver.chrome import options
from selenium.webdriver.common.action_chains import ActionChains
from selenium import webdriver
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from datetime import datetime
import logging
from selenium.webdriver.support.select import Select
import pytest
from selenium.webdriver.remote.command import Command
import time
from selenium import webdriver
import json
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
import logging
import urllib.request
import os
from time import sleep
from sys import exit
import random
SLEEP_SHORT = 4
SLEEP_MEDIUM = 15
SLEEP_LONG = 20
class BasePage(object):
def __init__(self, driver):
self.driver = driver
class ResultPage(BasePage):
def find_link(self, tag_, class_):
path: str = './/ttag[@class="cclass"]'
path_1 = path.replace('ttag', tag_)
xpath = path_1.replace('cclass', class_)
time.sleep(SLEEP_SHORT)
table = self.driver.page_source
soup = BeautifulSoup(table, 'html.parser')
ls = soup.find(tag_, class_)
return ls
def find_all_link(self, tag_, class_):
page = MainPage(self.driver)
path = './/ttag[@class="cclass"]'
path_1 = path.replace('ttag', tag_)
xpath = path_1.replace('cclass', class_)
page.waitForElementVisible(xpath, 7)
time.sleep(SLEEP_SHORT)
table = self.driver.page_source
soup = BeautifulSoup(table, 'html.parser')
ts = soup.find_all(tag_, class_)
return ts
def find_x(self, tag_, class_):
table = self.driver.page_source
soup = BeautifulSoup(table, 'html.parser')
ts = soup.find(tag_, class_)
ls = ts.find('a').get('clip_name')
return ls
def find_y(self, tag_, class_):
print(self, tag_, class_)
table = self.driver.page_source
soup = BeautifulSoup(table, 'html.parser')
ts = soup.find(tag_, class_)
ls = ts.find('button').get('filter__item filter__item_year').getText()
return ls
def find_n(self, tag_, class_):
print(self, tag_, class_)
table = self.driver.page_source
soup = BeautifulSoup(table, 'html.parser')
ts = soup.find(tag_, class_)
ls = ts.find('button').get('years-to').getText()
return ls
def find_tag(self, tag_):
time.sleep(SLEEP_LONG)
table = self.driver.page_source
soup = BeautifulSoup(table, 'html.parser')
ls = soup.find(tag_)
return ls
def find_all_tag(self, tag_):
time.sleep(SLEEP_SHORT)
table = self.driver.page_source
soup = BeautifulSoup(table, 'html.parser')
ls = soup.find_all(tag_)
return ls
def simple_find(self, xpath, number):
ls = self.driver.find_elements_by_xpath(xpath)[number]
return ls
def visible_xpath(self, xpath):
time.sleep(SLEEP_SHORT)
return EC.presence_of_element_located((By.XPATH, xpath))
class Admin(BasePage):
def click_f(self, name, stap):
page = MainPage(self.driver)
step = str(stap)
self.adminx = {
'Клик_In_Progress_статус': {'func': page.click_xpath, 'path': '//*[@id="issue_status_id"]/option[4]'},
'Клик_удалить_фильм_Малыш': {'func': page.click_xpath, 'path': './/a[text()="Прокат фильма (QA version)"]'},
'Клик_Принять': {'func': page.click_xpath, 'path': './/*[@id="issue-form"]/input[6]'},
'Клик_Ответить': {'func': page.click_xpath, 'path': './/span[@class="b-letter__foot__tab"]'},
'Клик_Отправить_письмо': {'func': page.click_xpath,'path': '//*[@id="b-toolbar__right"]/div[3]/div[2]/div/div[1]/div[1]/div/div[1]/span'},
'Клик_Входящие_mail_ru': {'func': page.click_xpath, 'path': '//*[@id="b-nav_folders"]/div/div[1]'},
'Клик_Чекбокс_Входящие_mail_ru': {'func': page.click_xpath, 'path': './/div[@class="b-checkbox__box"]'},
'Клик_Удалить_письма_из_mail_ru': {'func': page.click_xpath,'path': '//*[@id="b-toolbar__right"]/div[2]/div/div[2]/div[2]/div/div[1]/span'},
'Клик_список_статус': {'func': page.click_id, 'path': 'issue_status_id'},
'Админка_клик_найти': {'func': page.click_id, 'path': 'id-submit-search'},
'Админка_клик_чекбокс_1': {'func': page.click_id, 'path': 'action-toggle'},
'Админка_Действие': {'func': page.click_name, 'path': 'action'},
'Админка_Выбор_Удалить_пользователя': {'func': page.click_css,'path': '#action_block > label > select > option:nth-child(14)'},
'Админка_Выполнить': {'func': page.click_name, 'path': 'index'},
'Админка_подтвердить': {'func': page.click_xpath, 'path': '//input[@value="Да, я уверен"]'},
'Админка_большая_красная_кнопка': {'func': page.click_css, 'path': 'body > div > section > a'},
'Клик_первое_письмо': {'func': page.click_xpath, 'path': './/a[@class="js-href b-datalist__item__link"]'},
'Клик_второе_письмо': {'func': page.click_s_xpath, 'path': './/a[@class="js-href b-datalist__item__link"]','index': 1},
'Переключение_1_в_iframe': {'func': page.click_switch_to, 'path': 'iframe', 'index': 1},
'Возврат_фокуса_из_iframe': {'func': page.driver.switch_to.default_content, 'path': 'None'},
'Клик_Closed_статус': {'func': page.double, 'path_1': '//*[@id="issue_status_id"]/option[7]','path_2': './/option[@value="5"]'},
'Профили_посетителей': {'func': page.click_xpath, 'path': './/a[@href="/admin/tvzavr_admin/customer/"]'},
}
self.args = self.adminx[name]
self.func = self.args['func']
self.func(self.args)
page.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен')
click_f(self, name, stap):
page = MainPage(self.driver)
step = str(stap)
self.cardx = {
'Клик_кнопки_просмотр_от_руб': {'func': page.click_xpath,'path': './/button[@class="clip-player__action button"]'},
'Клик_кнопки_напрокат_SD-10р': {'func': page.click_xpath,'path': './/button[@class="tariffs__buy js-payment-info"]'},
'Клик_кнопки_напрокат_SD-11.88р': {'func': page.click_xpath, 'path': './/button[@data-tariff-id="575"]'},
'Клик_Личный_счёт': {'func': page.click_xpath,'path': './/a[@class="tabs__link js-tabs-link"][contains(., "Личный счёт")]'},
'Клик_Оплатить_личный_счет': {'func': page.click_xpath,'path': './/button[@class="payment-cloudpayments__card button button_stretched js-buy-button"]'},
'Клик_кнопки_В_избранное': {'func': page.click_xpath, 'path': './/button[@class="clip__action"]'},
'Клик_кнопки_Убрать_из_избранного': {'func': page.click_xpath,'path': './/button[@class="clip__action clip__action_active"]'},
'Клик_иконки_найденного_фильма': {'func': page.click_xpath, 'path': './/a[@href="/film/lunnyi-kamen/"]'},
'Клик_первого_фильма': {'func': page.click_xpath, 'path': './/a[@class="card card_clip"]'},
'Клик_иконки_избранного_фильма': {'func': page.click_xpath,'path': './/div[@class="clip-card__title tvz-overflow"]'},
'Клик_иконки_фильма_в_избранном': {'funk': page.click_xpath, 'path': './/a[@class="card card_clip"]'},
'Клик_Play': {'func': page.click_xpath, 'path': '//div[@class="tvz-button tvz-bpb2"]'},
'Клик_пауза': {'func': page.click_xpath, 'path': '//div[@class="tvz-button tvz-button_play"]'},
'Клик_вкладки_Описание': {'func': page.click_xpath,'path': '//h2[@class="clip__subheading"][contains(., "Описание")]'},
'Клик_вкладки_Серии': {'func': page.click_xpath,'path': '//h2[@class="clip__subheading"][contains(., "Серии")]'},
'Клик_2_ой_Серии_1_го_сезона': {'func': page.click_xpath,'path': '//div[@class="series-card__title"][contains(., "2 серия")]'},
'Клик_на_вкладку_Отзывы ': {'func': page.click_xpath, 'path': './/a[@data-target="clip-comments"]'},
'Клик_на_вкладку_Трейлеры': {'func': page.click_xpath, 'path': '//a[@data-target="clip-trailers"]'},
'Клик_на_вкладку_Награды': {'func': page.click_xpath, 'path': '//a[@data-target="clip-awards"]'},
'Клик_на_вкладку_описание': {'func': page.click_xpath, 'path': '//a[@data-target="clip-info"]'},
}
self.args = self.cardx[name]
self.func = self.args['func']
self.func(self.args)
page.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен')
class Profile(BasePage):
def click_f(self, name, step_int):
result = ResultPage(self.driver)
page = MainPage(self.driver)
step = str(step_int)
self.profx = {
'Клик_Зарегистрироваться': {'func': page.click_id, 'path': 'email-registration-submit'},
'Клик_поиска_Лупа': {'func': page.click_id, 'path': 'header-search-button'},
'Клик_Подписки': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link"][contains(., "Подписки")]'},
'Клик_Выйти': {'func': page.click_xpath,'path': './/button[@class="profile-menu__logout js-profile-logout"]'},
'Клик_Пополнить': {'func': page.click_xpath,'path': './/button[@class="cabinet-balance__replenish button button_stretched js-replenishment"]'},
'Клик_Личный_счет': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link"][contains(., "Счет")]'},
'Клик_Личный_счет_нового_пользователя': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link profile-menu__link_notified"][contains(., "Счет")]'},
'Клик_Регистрация': {'func': page.click_xpath, 'path': './/a[text()="Регистрация"]'},
'Клик_phone_Зарегистрироваться': {'func': page.click_css, 'path': '#register-submit'},
'Клик_значок_пользователя': {'func': page.click_xpath,'path': './/button[@class="header__profile js-profile-menu"]'},
'Клик_значок_нового_пользователя': {'func': page.click_xpath,'path': './/button[@class="header__profile header__profile_notified js-profile-menu"]'},
'Клик_мои_фильмы': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link"][contains(., "Мои фильмы")]'},
'Клик_крестик_всплывшего_окна_тройка': {'func': page.click_xpath,'path': './/button[@class="modal__close"]'},
'Клик_Настройки_профиля': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link"][contains(., "Настройки")]'},
'Клик_переход_в_настройки': {'func': page.click_xpath,'path': './/a[@class="tabs__link js-tabs-link"][contains(., "Настройки")]'},
'Клик_день_рождения': {'func': page.click_id, 'path': 'birthday-day'},
'Ввод_дня_рождения': {'func': page.click_css, 'path': '#birthday-day > option:nth-child(5)'},
'Клик_месяц_рождения': {'func': page.click_id, 'path': 'birthday-month'},
'Ввод_месяца_рождения': {'func': page.click_css, 'path': '#birthday-month > option:nth-child(5)'},
'Клик_год_рождения': {'func': page.click_id, 'path': 'birthday-year'},
'Ввод_года_рождения': {'func': page.click_xpath, 'path': './/option[@value="1990"]'},
'Клик_выбран_пол': {'func': page.click_xpath,'path': './/span[@class="toggle__label"][contains(., "Мужской пол")]'},
'Клик_Снятие_галочки_с_подписки': {'func': page.click_xpath,'path': './/span[text()="Да, я хочу получать подписку с обновлениями, акциями и подарками"]'},
'Клик_Снятие_галочки_с_продолжения_просмотра': {'func': page.click_xpath,'path': './/span[text()="Продолжать просмотр с места остановки"]'},
'Клик_Сохранить': {'func': page.click_xpath,'path': './/button[@class="cabinet-settings__button button button_stretched"][contains(., "Сохранить")]'},
'Клик_Избранное': {'func': page.click_xpath, 'path': './/a[text()="Избранное"]'},
}
self.args = self.profx[name]
self.func = self.args['func']
self.func(self.args)
page.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен')
class MainPage(BasePage):
def click_f(self, name, stap):
step = str(stap)
self.pagex = {
'Клик_прокрутки_слайда_вправо': {'func': self.click_xpath,'path': './/button[@class="slider__navigation slider__navigation_next js-slider-navigation js-slider-navigation-next"]'},
'Клик_прокрутки_слайда_влево': {'func': self.click_xpath,'path': './/button[@class="slider__navigation slider__navigation_prev js-slider-navigation js-slider-navigation-prev"]'},
'Клик_поиска_Лупа': {'func': self.click_css, 'path': '#header-search-button'},
'Клик_кнопки_крестик': {'func': self.click_xpath, 'path': './/button[@class="modal__close"]'},
'Клик_Новинки': {'func': self.click_xpath, 'path': './/a[@href="/novinki/"]'},
'Показать_еще': {'func': self.click_xpath,'path': './/button[@class="catalog__more button js-catalog-more"]'},
'Клик_Подписки': {'func': self.click_xpath, 'path': './/a[@title="Подписка tvzavr"]'},
'Клик_Подписка_Отключи_рекламу': {'func': self.click_xpath,'path': './/a[@class="tabs__link js-tabs-link"][contains(., "«Отключи рекламу на tvzavr!»")]'},
'Клик_купить_за_99р': {'func': self.click_xpath,'path': './/button[@class="subscriptions__button button button_dark js-payment-info"]'},
'Клик_Бесплатно': {'func': self.click_xpath,'path': './/a[@class="header__link"][contains(., "Бесплатно")]'},
'Клик_Каталог': {'func': self.click_xpath, 'path': './/a[@class="header__link"][contains(., "Каталог")]'},
'Клик_Фильмы_в_каталоге': {'func': self.click_xpath,'path': './/button[@class="filter__category js-filter-category"][contains(., "Фильмы")]'},
'Клик_Мультфильмы_в_каталоге': {'func': self.click_xpath,'path': './/button[@class="filter__category js-filter-category"][contains(., "Мультфильмы")]'},
'Клик_Сериалы_в_каталоге': {'func': self.click_xpath,'path': './/button[@class="filter__category js-filter-category"][contains(., "Сериалы")]'},
'Клик_Годы_выпуска': {'func': self.click_xpath,'path': './/button[@class="filter__subcategory js-filter-subcategory"][contains(., "Годы выпуска")]'},
'Выставление_год_левый': {'func': self.click_xpath, 'path': './/div[@style="left: 22.7642%;"]'},
'Клик_Родительский_контроль': {'func': self.click_xpath, 'path': './/span[text()="Родительский контроль"]'},
'Клик_Бесплатные': {'func': self.click_xpath, 'path': './/span[text()="Бесплатные"]'},
'Клик_страны': {'func': self.click_xpath,'path': './/button[@class="filter__subcategory js-filter-subcategory"][contains(., "Страны")]'},
'Клик_США': {'func': self.click_xpath, 'path': './/li[@data-filter-id="515"]'},
'Клик_Германия': {'func': self.click_xpath, 'path': './/li[@data-tag-name="Германия"]'},
'Клик_Южная_Корея': {'func': self.click_xpath, 'path': './/li[@data-filter-id="8789"]'},
'Клик_Япония': {'func': self.click_xpath, 'path': './/li[@data-filter-id="3467"]'},
'Клик_Испания': {'func': self.click_xpath, 'path': './/li[@data-filter-id="2600"]'},
'Клик_Турция': {'func': self.click_xpath, 'path': './/li[@data-filter-id="5287"]'},
'Клик_Россия': {'func': self.click_xpath, 'path': './/li[@data-filter-id="122"]'},
'Клик_Жанры': {'func': self.click_xpath,'path': './/button[@class="filter__subcategory js-filter-subcategory"][contains(., "Жанры")]'},
'Клик_боевик_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="690"]'},
'Клик_комедия_жанр': {'func': self.click_xpath, 'path': './/li[@data-tag-name="Комедия"]'},
'Клик_азиатский_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="21136"]'},
'Клик_Советский_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="7320"]'},
'Клик_приключения_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="702"]'},
'Клик_Детектив_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="693"]'},
'Клик_применить_фильтр': {'func': self.click_xpath,'path': './/button[@class="filter__apply button js-filter-apply"]'},
'Клик_кнопки_просмотр_от_руб': {'func': self.click_xpath,'path': './/button[@class="clip-player__action button"]'},
'Клик_Сериалы': {'func': self.click_xpath, 'path': './/a[text()="Сериалы"]'},
'Вход': {'func': self.click_xpath, 'path': './/button[@class="reset-button pm-gate__button"]'},
'Вход2': {'func': self.click_xpath, 'path': './/button[@data-action="click->pm-auth#login"]'},
'Далее': {'func': self.click_xpath, 'path': './/button[@type="submit"]'},
'Войти': {'func': self.click_xpath, 'path': './/button[@type="submit"]'},
'Клик_Вход_через_Mailru': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_mr js-social-link"]'},
'Клик_Войти_и_разрешить_Mailru': {'func': self.click_xpath,'path': './/button[@class="ui-button-main"][contains(., "Войти и разрешить")]'},
'Клик_Вход_через_FB': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_fb js-social-link"]'},
'Клик_Вход_через_VK': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_vk js-social-link"]'},
'Клик_Вход_через_OK': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_ok js-social-link"]'},
'Клик_Вход_через_G': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_gp js-social-link"]'},
'Клик_Вход_FB': {'func': self.click_id, 'path': 'loginbutton'},
'Клик_Вход_VK': {'func': self.click_id, 'path': 'install_allow'},
'Клик_Вход_ОК': {'func': self.click_xpath, 'path': './/input[@class="button-pro __wide form-actions_yes"]'},
'Снятие_галочки_чекбокса_запомнить_меня': {'func': self.click_xpath, 'path': './/span[@class="irc-vis"]'},
'Клик_кнопки_Далее_Google': {'func': self.click_xpath,'path': './/span[@class="RveJvd snByac"][contains(., "Далее")]'},
'Клик_1_Далее_Google': {'func': self.click_xpath,'path': './/span[@class="RveJvd snByac"][contains(., "Далее")]'},
'Войти_auth': {'func': self.click_id, 'path': 'authorization-submit'},
'По_номеру_телефона': {'func': self.click_xpath, 'path': './/a[@data-target="register-phone-tab"]'},
'Клик_Подборки': {'func': self.click_xpath, 'path': './/a[@class="header__link"][contains(., "Подборки")]'},
'Клик_Коллекции': {'func': self.click_xpath,'path': './/a[@class="filter__category"][contains(., "Коллекции")]'},
'Клик_Подборки_партнеров': {'func': self.click_xpath,'path': './/a[@class="filter__category"][contains(., "Подборки партнеров")]'},
'Клик_Детям': {'func': self.click_xpath, 'path': './/a[@class="header__link"][contains(., "Детям")]'},
'Клик_Спецпроекты': {'func': self.click_xpath,'path': './/a[@class="header__link"][contains(., "Спецпроекты")]'},
'Клик_Кино_равного_доступа': {'func': self.click_xpath,'path': './/div[@class="card__title"][contains(., "Кино равного доступа")]'},
'Проект, где ваши дети снимаются в кино': {'func': self.click_xpath,'path': './/div[@class="card__title"][contains(., "Проект, где ваши дети снимаются в кино")]'},
'Клик_TVZ': {'func': self.click_xpath,'path': './/div[@class="card__title"][contains(., "Кино равного доступа")]'},
'Обратная_связь': {'func': self.click_xpath,'path': './/button[@class="footer__link"][contains(., "Обратная связь")]'},
'Клик_Отправить_сообщение': {'func': self.click_xpath,'path': './/a[@class="header__link"][contains(., "Подборки")]'},
'Клик_Отправить_сообщение_обратная связь': {'func': self.click_xpath,'path': './/button[@class="feedback__submit button button_stretched"]'},
'Редактировать': {'func': self.click_xpath,'path': './/a[@class="header__link"][contains(., "Редактировать")]'},
'Клик_первого_фильма': {'func': self.click_xpath, 'path': './/div[@class="owl-item active"]'},
'Клик_постер_первого_фильма': {'func': self.click_xpath, 'path': './/a[@class="card card_clip"]'},
'Клик_постер_сериала_соседка_ты_дома': {'func': self.click_xpath,'path': '//a[@href="/film/sosedka-ty-doma/"]'},
'Клик_стрелка_Вниз': {'func': self.click_tag, 'path': 'body', 'send': Keys.DOWN},
'Переход_вниз_страницы': {'func': self.click_tag, 'path': 'body', 'send': Keys.END},
}
self.args = self.pagex[name]
self.func = self.args['func']
self.func(self.args)
self.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен')
def send_f(self, name, text, stap):
step = str(stap)
self.pages = {
'Ввод_в_строку_поиска': {'func': self.send_id, 'path': 'search-field', 'text': text},
'Ввод_2_в_строку_поиска': {'func': self.send_id, 'path': 'header-search-field', 'text': text},
'Ввод_логина': {'func': self.send_name, 'path': 'email-registration__address', 'text': text},
'Ввод_пароля': {'func': self.send_css, 'path': '#register-email-password', 'text': text},
'Ввод_логина_вход': {'func': self.send_name, 'path': 'login', 'text': text},
'Ввод_пароля_вход': {'func': self.send_css, 'path': '#auth-password', 'text': text},
'Ввод_логин_Google': {'func': self.send_id, 'path': 'identifierId', 'text': text},
'Ввод_пароль_Google': {'func': self.send_name, 'path': 'password', 'text': text},
'Ввод_логин_Mailru': {'func': self.send_name, 'path': 'Login', 'text': text},
'Ввод_пароля_Mailru': {'func': self.send_css, 'path': '#password', 'text': text},
'Ввод_пароля_FB': {'func': self.send_css, 'path': '#pass', 'text': text},
'Ввод_пароля_VK': {'func': self.send_css, 'path': '#login_submit > div > div > input:nth-child(9)','text': text},
'Ввод_логина_OK': {'func': self.send_name, 'path': 'fr.email', 'text': text},
'Ввод_пароля_OK': {'func': self.send_css, 'path': '#field_password', 'text': text},
'Ввод_СМС_пароля_reg': {'func': self.send_name, 'path': 'code', 'text': text},
'feedback_имя_пользователя': {'func': self.send_id, 'path': 'feedback-name', 'text': text},
'feedback_e_mail_пользователя': {'func': self.send_id, 'path': 'feedback-email', 'text': text},
'feedback_сообщение_пользователя': {'func': self.send_id, 'path': 'feedback-decription', 'text': text},
'Ввод_ответа_пользователю': {'func': self.send_id, 'path': 'issue_notes', 'text': text},
'Ввод_текста_ответа_пользователя': {'func': self.send_id, 'path': 'tinymce', 'text': text},
'Ввод_из_СМС_пароля_auth': {'func': self.send_css, 'path': '#auth-password', 'text': text},
'Ввод_суммы_пополнения_счета': {'func': self.send_name, 'path': 'price', 'text': text},
'Ввод_профиль_old_пароля': {'func': self.send_id, 'path': 'cabinet-password-old', 'text': text},
'Ввод_профиль_new_пароля': {'func': self.send_id, 'path': 'cabinet-password-new', 'text': text},
'Ввод_профиль_rep_пароля': {'func': self.send_id, 'path': 'cabinet-password-repeat', 'text': text},
'Ввод_псевдонима': {'func': self.send_id, 'path': 'name', 'text': text},
'Админка_Ввод_в_поиск': {'func': self.send_name, 'path': 'q_q', 'text': text},
'Ввод_номера_карты_тройка': {'func': self.send_id, 'path': 'troika-binding-textbox', 'text': text},
}
self.args = self.pages[name]
self.func = self.args['func']
self.func(self.args)
self.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен')
def click_button(self, bc):
button_xpath = ('.//button[text()="%s"]' % bc)
self.driver.find_element_by_xpath(button_xpath).click()
def click_div(self, dep):
up_xpath = ('.//div[text()="%s"]' % dep)
self.waitForElementClickable(up_xpath, 30)
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath(up_xpath).click()
return
def click_li(self, dep):
li_xpath = ('.//li[text()="%s"]' % dep)
self.waitForElementClickable(li_xpath, 30)
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath(li_xpath).click()
def click_a(self, dep):
a_xpath = ('.//a[text()="%s"]' % dep)
self.waitForElementClickable(a_xpath, 10)
self.driver.find_element_by_xpath(a_xpath).click()
def click_span(self, dep):
span_xpath = ('.//span[text()="%s"]' % dep)
self.waitForElementClickable(span_xpath, 30)
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath(span_xpath).click()
def click_id(self, args):
dep = args['path']
self.waitForIDVisible(dep, 10)
self.driver.find_element_by_id(dep).click()
def click_name(self, args):
dep = args['path']
self.waitForNameVisible(dep, 30)
self.driver.find_element_by_name(dep).click()
def click_xpath(self, args):
xpath = args['path']
self.waitForElementClickable(xpath, 30)
self.driver.find_element_by_xpath(xpath).click()
def click_css(self, args):
css = args['path']
print('css = ', css)
self.driver.find_element_by_css_selector(css).click()
def click_switch_to(self, args):
frame = args['path']
index = args['index']
self.driver.switch_to.frame(self.driver.find_elements_by_tag_name(frame)[index]).click()
def click_s_xpath(self, args):
xpath = args['path']
index = args['index']
self.waitForElementClickable(xpath, 30)
self.driver.find_elements_by_xpath(xpath)[index].click()
def double(self, args):
self.click_xpath(args['path_1'])
self.click_xpath(args['path_2'])
def click_tag(self, args):
self.driver.find_element_by_tag_name(args['path']).send_keys(args['send'])
ester_vis_xpath(self, xpath):
self.waitForElementVisible(xpath, 5)
self.driver.find_element_by_xpath(xpath)
def tester_click_xpath(self, xpath):
self.waitForElementClickable(xpath, 25)
self.driver.find_element_by_xpath(xpath)
def click_play(self):
xpath = '//*[@id="clip-player"]/div[16]'
self.waitForElementClickable(xpath, 35)
self.driver.find_element_by_xpath(xpath).click()
(self):
xpath = '//*[@id="clip-player"]/div[4]/div'
css_sel = '#clip-player > div.tvz-button.tvz-button_play > div'
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath(xpath).click()
def click_enter(self):
self.click_xpath('.//button[@class="header__login"]')
def waitForElementPresent(self, xpath, timer):
WebDriverWait(self.driver, timer).until(EC.presence_of_element_located((By.XPATH, xpath)))
def waitForElementClickable(self, xpath, timer):
WebDriverWait(self.driver, timer).until(EC.element_to_be_clickable((By.XPATH, xpath)))
def waitForElementVisible(self, xpath, timer):
WebDriverWait(self.driver, timer).until(EC.presence_of_element_located((By.XPATH, xpath)))
def waitForNameVisible(self, name, timer):
WebDriverWait(self.driver, timer).until(EC.presence_of_element_located((By.NAME, name)))
def waitForIDVisible(self, id, timer):
WebDriverWait(self.driver, timer).until(EC.presence_of_element_located((By.ID, id)))
def send_id(self, args):
d_id = args['path']
txt = args['text']
self.waitForIDVisible(d_id, 30)
self.driver.find_element_by_id(d_id).send_keys(txt)
return
def send_name(self, args):
d_name = args['path']
txt = args['text']
self.waitForNameVisible(d_name, 30)
self.driver.find_element_by_name(d_name).send_keys(txt)
return
def send_css(self, args):
d_name = args['path']
txt = args['text']
self.driver.find_element_by_css_selector(d_name).send_keys(txt)
return
def input(self, dclass, data):
li_xpath = ('.//input[@class="%s"]' % dclass)
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath(li_xpath).send_keys(data)
def rand_mail(self, lit):
d = str(datetime.today())
ds = d.replace('-', '')
d = ds.split(':')[0]
d_2 = ds.split(':')[1]
d_3 = d.replace(' ', '')
rand = d_3 + d_2
random_mail = 'tvzavrtest' + rand + lit + '@rrbbxvdr.rz'
return (random_mail, rand)
def code_phone(self, phone):
self.loger_info('Получение кода на телефон: ' + phone)
url = 'http://www.tvzavr.ru/api/3.1/sms/send_confirm_code?phone=' + str(
phone) + '&entity=empty&prv=smstest_tvz'
self.loger_info(url)
code = (requests.get(url)).text
self.loger_info('code_phone ' + code)
r_code = code.split(':')[3]
s_code = r_code.split('"')[1]
self.loger_info(s_code)
return s_code
# Функция проверки наличия элементов на странице.
def elem(self):
self.loger('Проверка элементов страницы')
# Проверка наличия ссылки "Новинки""
res_txt = str(ResultPage.find_link(self, "a", "header__link"))
self.loger(res_txt)
assert ('Новинки') in res_txt
self.loger('Наличие ссылки "Новинки" подтверждено')
res_txt = str(ResultPage.find_all_link(self, "a", "header__link"))
# Подписки - проверочное словосочетание надписи
assert ('Подписки') in res_txt
self.loger('Наличие ссылки "Подписки" подтверждено')
# Проверка наличия ссылки "Бесплатно""
assert ('Бесплатно') in res_txt
self.loger('Наличие ссылки "Бесплатно" подтверждено')
assert ('Подборки') in res_txt
self.loger('Наличие ссылки "Подборки" подтверждено')
# Каталог - проверочное словосочетание надписи
assert ('Каталог') in res_txt
self.loger('Наличие ссылки "Каталог" подтверждено')
# Проверка наличия ссылки "Детям""
assert ('Детям') in res_txt
self.loger('Наличие ссылки "Детям" подтверждено')
res_txt = str(ResultPage.find_link(self, "button", "header__login"))
# Вход - проверочное словосочетание надписи
assert ('Вход') in res_txt
self.loger(res_txt)
self.loger('Наличие ссылки "Вход" подтверждено')
def loger(self, text):
logging.basicConfig(format=u'%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG)
logging.info(text)
print(text)
def loger_info(self, text):
logging.basicConfig(format=u'%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG)
logging.info(text)
print(text)
def loger_error(self, text):
logging.basicConfig(format=u'%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG)
logging.error(text)
print(text)
# def send_sms(self, phone, message): # Функция отправки смс
# logging.info("Вызов функции отправки СМС")
# # chrome_options = webdriver.ChromeOptions()
# # chrome_options.add_argument("user-data-dir=C:\\Users\\user\\AppData\\Local\\Google\\Chrome\\User Data") # Запуск браузера с сохраненным профилем
# self.driver = webdriver.Chrome("C:\chromedriver\chromedriver.exe")
self.driver.get("https://app.mysms.com/")
self.driver.implicitly_wait(10)
time.sleep(SLEEP_MEDIUM)
self.driver.find_element_by_xpath('.//div[@class="gwt-Label"]').click() # Новое сообщение
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath('.//input[@class="recipientTextBox"]').send_keys(phone)
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath('.//div[@class="textarea"]').send_keys(message)
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath('.//button[@class="styledButton sendButton sim dropdown"]').click()
logging.info("Клик 'Отправить' произведен, СМС подтверждения отправлено")
self.driver.close()
return
def login_google(self, emailgo, passok):
time.sleep(SLEEP_SHORT)
self.send_f('Ввод_логин_Google', emailgo, 1)
time.sleep(2)
self.click_f('Клик_кнопки_Далее_Google', 6)
time.sleep(2)
self.send_f('Ввод_пароль_Google', passok, 1)
time.sleep(2)
self.click_f('Клик_кнопки_Далее_Google', 6)
time.sleep(2)
return
# def login_match(self, emailt, passw):
# time.sleep(SLEEP_SHORT)
# self.send_f('Ввод_name_логина', emailt, 2)
# self.send_f('Ввод_пароля_tvz', passw, 3)
# self.click_f('Клик_Войти_auth', 4)
# time.sleep(SLEEP_MEDIUM)
# return 06.08.2019
def login_matchtv(self, num_phone, passw):
self.click_f('Вход', 1)
time.sleep(1)
self.click_f('Вход2', 2)
time.sleep(2)
self.driver.find_element_by_xpath('.//input[@type="tel"]').send_keys(num_phone)
self.loger_info('Введён номер телефона: +7 ' + num_phone)
time.sleep(1)
self.click_f('Далее', 3)
time.sleep(1)
self.driver.find_element_by_xpath('.//input[@type="password"]').send_keys(passw)
self.loger_info('Введён пароль: ' + passw)
time.sleep(1)
self.click_f('Войти', 4)
time.sleep(1)
return
# def login_tvzavr(self, email, passw, sex):
# result = ResultPage(self.driver)
# prof = Profile(self.driver)
# self.waitForElementVisible('.//button[@class="header__login"]', 7)
# resic = result.find_link("button", "header__login")
# if "Вход" not in resic:
# if sex == 'male':
# prof.click_f('Клик_Аватарка_М', 1)
# else:
# prof.click_f('Клик_Аватарка_Ж', 1)
#
# prof.click_f('Клик_Выйти', 1)
# else:
# # Шаг 1 Нажать в шапке на кнопку "Вход".')
# self.click_f('Клик_Вход', 1)
# time.sleep(SLEEP_SHORT)
# # Шаг 2
# self.send_f('Ввод_name_логина', email, 2)
# self.send_f('Ввод_пароля_Google', passw, 3)
# self.click_f('Клик_Войти_auth', 4)
# time.sleep(SLEEP_LONG)
# try:
# self.click_f('Клик_кнопки_крестик', 6)
# except:
# print('нет акции мицубиси')
# Шаг 5
# if sex == 'male': 1.08.2019
# prof.click_f('Клик_Аватарка_М', 7)
# else:
# prof.click_f('Клик_Аватарка_Ж', 8)
# time.sleep(SLEEP_MEDIUM)
# return
def login_mailru(self, emailru, passw):
self.send_id('mailbox:login', emailru)
self.loger_info('Ввод логина на mail.ru ' + emailru + 'произведен')
time.sleep(SLEEP_SHORT)
self.send_id('mailbox:password', passw)
self.loger_info('Ввод пароля на mail.ru произведен')
time.sleep(SLEEP_SHORT)
self.driver.find_element_by_xpath('.//input[@class="o-control"]').click()
self.loger_info('Клик кнопки "Войти" на mail.ru произведен')
time.sleep(SLEEP_MEDIUM)
def registration(self, email, passw):
result = ResultPage(self.driver)
prof = Profile(self.driver)
resic = result.find_link("button", "header__login tvz-unauthorized")
if "Вход" not in resic:
prof.click_f('Клик_Аватарка_М', 1)
self.loger_info('Шаг 0 Клик на аватарку пользователя произведен')
self.driver.find_element_by_xpath('.//button[@class="profile-menu__logout js-profile-logout"]').click()
time.sleep(SLEEP_SHORT)
else:
self.click_enter()
self.loger_info('Шаг 1 Клик "Вход" произведен')
# Шаг 2
self.click_a('Регистрация')
self.loger_info('Шаг 2 Клик "Регистрация" произведен')
time.sleep(SLEEP_SHORT)
# Шаг 3
self.login('email', email)
print('Шаг 3 Ввод логина', email, 'произведен')
# Шаг 4
self.driver.find_element_by_css_selector('#register-email-password').send_keys(passw)
# page.login('password', passw)
self.loger_info('Шаг 4 Ввод пароля произведен')
time.sleep(SLEEP_SHORT)
# Шаг 5
self.driver.find_element_by_id('register-email-submit').click()
self.loger_info('Шаг 5 Клик "Зарегистрироваться" произведен')
time.sleep(7)
prof.click_f('Клик_Аватарка_М', 5)
self.loger_info('Шаг 6 Клик на аватарку пользователя произведен')
self.waitForElementVisible('.//div[@class="profile-menu__name __username"]', 7)
# Проверка авторизации пользователя"
resic = str(result.find_link("div", "profile-menu__name __username"))
# email - проверочное словосочетание надписи
assert (email) in resic
self.loger_info('Авторизация зарегистрированного пользователя с е-майлом ' + email + ' подтверждена')
time.sleep(SLEEP_SHORT)
def input_card(self, number, month, year, name_card, cvv):
result = ResultPage(self.driver)
self.driver.find_elements_by_xpath('.//input[@class="payment-cloudpayments__field textbox js-input"]')[
0].send_keys(number)
self.loger_info('Шаг 10 Ввод номера карты произведен ' + number)
time.sleep(3)
self.driver.find_elements_by_xpath('.//select[@class="dropdown js-input"]')[0].click()
time.sleep(1)
self.driver.find_element_by_xpath('.//option[@value="%s"]' % month).click()
self.loger_info('Ввод месяца карты произведен')
time.sleep(3)
self.driver.find_elements_by_xpath('.//select[@class="dropdown js-input"]')[1].click()
self.driver.find_element_by_xpath('.//option[@value="%s"]' % year).click()
self.loger_info('Ввод года карты произведен')
time.sleep(3)
# Заполнить поле "Имя держателя" - Ivanov Ivan
self.driver.find_elements_by_xpath('.//input[@class="payment-cloudpayments__field textbox js-input"]')[
1].send_keys(name_card)
self.loger_info('Ввод имени держателя карты произведен')
time.sleep(3)
# Заполнить поле "CVV код" - 526
self.driver.find_element_by_xpath(
'.//input[@class="payment-cloudpayments__field payment-cloudpayments__field_cvc textbox js-input"]').send_keys(
cvv)
self.loger_info('Ввод CVV код карты произведен')
time.sleep(4)
#Снять галочку "Сохранить данные карты"
# self.driver.find_element_by_xpath('.//span[@class="toggle__label"]').click()
# self.loger_info('Снятие галочки в чек-боксе"Сохранить данные карты" произведено')
# Нажать кнопку "Оплатить"clip-watch
#self.driver.find_element_by_xpath('.//button[@class="payment-cloudpayments__pay button button_stretched js-buy-button"]').click()
#self.loger_info('Клик "Оплатить" произведен')
#time.sleep(1)
# message = str(result.find_link("section", "tvz-alerts tvz-animation-fadeOut"))
# self.loger_info('Сообщение внизу формы оплаты:')
# self.loger_info('message:' + message)
def delete_mails(self, emailgo, passgo):
# self.driver.get('https://mail.google.com')
self.loger_info('Шаг 5 Переход на gmail.com произведен')
# self.login_google(emailgo, passgo)
time.sleep(SLEEP_SHORT)
# self.driver.get('https://mail.google.com/mail/u/0/
# Удаление письма из почты
self.click_xpath('.//div[@class="J-J5-Ji J-JN-M-I-Jm"]')
# self.driver.find_element_by_id(':3d').click()
self.loger_info('Поставлена галочка чекбокс - выбор письма')
self.click_xpath('//*[@id=":5"]/div/div[1]/div[1]/div/div/div[2]/div[3]')
# self.click_xpath('.//div[@class="T-I J-J5-Ji nX T-I-ax7 T-I-Js-Gs mA"]')
self.loger_info('Клик кнопки "Удалить" письмо на gmail.com произведен')
time.sleep(SLEEP_SHORT)
@property
def consol_jenkins(self):
print('Запуск проверки консоли')
# p = subprocess.call('ffmpeg.exe -framerate 10 -f image2 -i "Frame%03d.jpg" -r 10 -s 620x380 Video.avi', shell=True)
# options = webdriver.ChromeOptions()
#self.driver = webdriver.Chrome("C:\chromedriver\chromedriver.exe", chrome_options=options)
options.add_argument('--incognito') # Запуск браузера в режиме инкогнито
# self.driver = webdriver.Chrome(options=options)
#self.driver = webdriver.Chrome("C:\chromedriver\chromedriver.exe")
self.driver.get("http://192.168.2.31:8080/jenkins/job/1_Regress/")
self.driver.maximize_window()
self.driver.implicitly_wait(10)
page = MainPage(self.driver)
result = ResultPage(self.driver)
self.send_name('j_username', 'admin')
self.send_name('j_password', 'admin')
self.click_xpath('.//div[@class="Checkbox-indicator"]')
self.click_name('Submit')
self.driver.implicitly_wait(5)
self.driver.find_elements_by_xpath('.//td[@class="build-row-cell"]')[0].click()
self.click_a('Вывод консоли')
res = self.driver.find_element_by_xpath('.//pre[@class="console-output"]').text
self.driver.close()
return res
def mail_send_web(self, login, passw):
self.driver.execute_script("window.open('','_blank');")
time.sleep(2)
self.driver.switch_to.window(self.driver.window_handles[1])
time.sleep(2)
result = ResultPage(self.driver)
self.driver.get("https://e.mail.ru/login")
self.driver.maximize_window()
self.driver.implicitly_wait(10)
self.loger_info(' Переход в mail произведен')
time.sleep(2)
#
# # self.driver.find_element_by_name("Login").send_keys('testmailtvzavr15')
self.driver.find_element_by_xpath('.//*[@id="root"]/div/div[3]/div/div/div/form/div[2]/div[2]/div[1]/div/div/div/div/div/div[1]')[0].click()
self.driver.find_element_by_xpath('.//*[@id="root"]/div/div[3]/div/div/div/form/div[2]/div[2]/div[1]/div/div/div/div/div/div[1]').send_keys('testmailtvzavr15')
time.sleep(2)
#
self.driver.find_element_by_xpath('.//span[@class="c01104 c0179 c01102 c0177"]').click() # Клик далее
time.sleep(3)
def scype_send_web(self, login, passw):
self.driver.execute_script("window.open('','_blank');")
time.sleep(2)
self.driver.switch_to.window(self.driver.window_handles[1])
time.sleep(2)
result = ResultPage(self.driver)
# options = webdriver.ChromeOptions()
# options.add_argument("--disable-notifications")
# options.add_argument('--incognito') # Запуск браузера в режиме инкогнито
# self.driver = webdriver.Chrome(options=options)
#self.driver = webdriver.Chrome("C:\chromedriver\chromedriver.exe")
self.driver.get("https://web.skype.com/ru/")
self.driver.maximize_window()
self.driver.implicitly_wait(10)
self.loger_info(' Переход в Skype произведен')
time.sleep(2)
self.driver.find_element_by_name("loginfmt").send_keys('79776410337')
time.sleep(2)
self.driver.find_element_by_xpath('.//input[@id="idSIButton9"]').click() # Клик далее
time.sleep(3)
self.driver.find_element_by_name("passwd").send_keys('Aleh1260337')
self.loger_info('Ввод Skype пароля произведен')
time.sleep(3)
self.driver.find_element_by_xpath('.//input[@id="idSIButton9"]').click() # Клик вход
self.loger_info('Клик Вход произведен')
time.sleep(3)
self.driver.find_element_by_xpath('.//div[@id="rx-vlv-6"]').click() # Клик по диалогу
self.loger_info('Шаг 100. Переход в чат Деплоймент произведен')
time.sleep(10)
#self.driver.find_element_by_css_selector(".public-DraftStyleDefault-block").click() # Клик по полю ввода
time.sleep(15)
#print('тут2222')
#time.sleep(2)
# self.driver.find_element_by_xpath('/html/body/div[1]/div/div[1]/div[2]/div/div[1]/div/div[2]/div/div[2]/div/div/div/div/div/div[2]/div[2]/div/div/div[1]/div/div/div/div/div[2]/div[3]').send_keys('text')
# self.send_f('Ввод_сообщения_скайп', 'text', 15)
# self.driver.find_element_by_xpath('/html/body/div[1]/div/div[1]/div[2]/div/div[1]/div/div[2]/div/div[2]/div/div/div/div/div/div[2]/div[2]/div/div/div[1]/div/div/div/div/div[2]').send_keys('text')
#self.driver.find_element_by_css_selector('[id="#.public-DraftStyleDefault-block"]').send_keys('text')
#self.page.loger('тут')
# self.click_xpath('//*[@id="swxContent1"]/swx-navigation/div/div/div/label/div/div/div[2]/div[2]/div/swx-button/button')
# self.loger_info('Отправка текста в чат Деплоймент произведена')
# self.driver.close()
# def delete_uzer(self, name): # ФУНКЦИЯ УДАЛЕНИЯ ПОЛЬЗОВАТЕЛЯ ИЗ АДМИНКИ
# self.driver.execute_script("window.open('','_blank');")
# time.sleep(2)
# self.driver.switch_to.window(self.driver.window_handles[1])
# time.sleep(2)
# self.driver.get("https://www.tvzavr.ru:8080/admin/")
# time.sleep(2)
# # 'Открытие страницы админки
# self.driver.find_element_by_xpath(
# './/a[@href="https://accounts.google.com/o/oauth2/auth?client_id=245544346256-4luf263ioa376hp89q5k08otplt9dvdh.apps.googleusercontent.com&scope=openid%20profile%20email%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fplus.login&redirect_uri=http://www.tvzavr.ru:8080/oauth2callback/&response_type=code"]').click()
# time.sleep(3)
# # Логинимся через Google
#
# emailgo = 'bykov.a@tvzavr.ru'
# passok = 'tmW9HZvaksgc'
#
# self.send_f('Ввод_логин_Google', emailgo, 1)
# time.sleep(2)
# self.click_f('Клик_кнопки_Далее_Google', 6)
# time.sleep(2)
# self.send_f('Ввод_пароль_Google', passok, 1)
# time.sleep(2)
# self.click_f('Клик_кнопки_Далее_Google', 6)
# time.sleep(6)
# # Вошли в админку
#
# self.driver.find_element_by_xpath('.//a[@href="/admin/tvzavr_admin/customer/"]').click() # Клик на "Профили посетителей"
# time.sleep(3)
# self.send_f('Админка_Ввод_в_поиск', name, 16) # Ввод имени пользователя
# time.sleep(3)
#
# self.driver.find_element_by_xpath('.//input[@value="Найти"]').click() # Клик найти
# time.sleep(2)
#
# self.driver.find_element_by_xpath(
# './/input[@id="action-toggle"]').click() # Клик по чекбоксу(ставит галочку)
# time.sleep(2)
#
# self.driver.find_element_by_xpath('.//select[@name="action"]').click() # Клик на поле "Действие"
# time.sleep(2)
# self.driver.find_element_by_xpath('.//option[@value="remove_comments"]').click()
# # self.driver.find_element_by_css_selector('
# time.sleep(2)
# self.driver.find_element_by_xpath('.//*[@id="action_block"]/button').click() # Клик на "Выполнить"
# time.sleep(3)
# self.driver.find_element_by_xpath(
# './/input[@id="action-toggle"]').click() # Клик по чекбоксу(ставит галочку)
# time.sleep(2)
# # self.driver.find_element_by_xpath('.//*[@id="content"]/form/div[2]/input[4]').click() # Клик на "Да, уверен"
# # time.sleep(2)
# self.driver.switch_to.window(self.driver.window_handles[-1])
# time.sleep(2)
| true | true |
f7372d0c1d753f3bbf5cda998c2c4fa9a7597570 | 601 | py | Python | mundo1/D28.py | KayanOkagawa/Cursoemvideo-Python3-Exercicios | 10c8386102cc0928f8f090070eb3218deb3d60fe | [
"MIT"
] | null | null | null | mundo1/D28.py | KayanOkagawa/Cursoemvideo-Python3-Exercicios | 10c8386102cc0928f8f090070eb3218deb3d60fe | [
"MIT"
] | null | null | null | mundo1/D28.py | KayanOkagawa/Cursoemvideo-Python3-Exercicios | 10c8386102cc0928f8f090070eb3218deb3d60fe | [
"MIT"
] | null | null | null | #-*-coding:utf8;-*-
#qpy:console
from random import randint
print('\n')
print('-' * 5, 'ADIVINHE O NÚMERO', '-' * 5)
print('\n')
opção = input('Quer jogar comigo: ').lower().strip()
print('\n')
if opção == 'sim':
print('Muito bom! Adivinhe o número que estou pensando entre 0 a 5')
print('\n')
n = randint(0,5)
resposta = int(input('Digite o número: '))
if resposta == n:
print('Parabens! Você acertou!')
else:
print('Você errou!')
elif opção == 'não' or opção == 'nao':
print('Ok!')
else:
print('Não entendi.')
print('\n')
print('Fim')
| 21.464286 | 72 | 0.565724 |
from random import randint
print('\n')
print('-' * 5, 'ADIVINHE O NÚMERO', '-' * 5)
print('\n')
opção = input('Quer jogar comigo: ').lower().strip()
print('\n')
if opção == 'sim':
print('Muito bom! Adivinhe o número que estou pensando entre 0 a 5')
print('\n')
n = randint(0,5)
resposta = int(input('Digite o número: '))
if resposta == n:
print('Parabens! Você acertou!')
else:
print('Você errou!')
elif opção == 'não' or opção == 'nao':
print('Ok!')
else:
print('Não entendi.')
print('\n')
print('Fim')
| true | true |
f7372d11ca4f9fc3c017bb0e2c0de8bfc43a8138 | 173 | py | Python | mydjango_env/news/nip_site/polls/models.py | fuandenghuo/100-days-of-python | 50f3263b0984bb6690e565d58604c1882aaf465e | [
"MIT"
] | null | null | null | mydjango_env/news/nip_site/polls/models.py | fuandenghuo/100-days-of-python | 50f3263b0984bb6690e565d58604c1882aaf465e | [
"MIT"
] | null | null | null | mydjango_env/news/nip_site/polls/models.py | fuandenghuo/100-days-of-python | 50f3263b0984bb6690e565d58604c1882aaf465e | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here.
class Question(models.Model):
question_text = models.CharField(max_length = 200)
pub_date = models.DateTime | 24.714286 | 54 | 0.757225 | from django.db import models
class Question(models.Model):
question_text = models.CharField(max_length = 200)
pub_date = models.DateTime | true | true |
f7372e132c093972530f907f1052f0b5f02603b5 | 1,109 | py | Python | test/test_text_to_speech_request.py | Cloudmersive/Cloudmersive.APIClient.Python.VoiceRecognition | ab03179bd3449dc54d33ebbeabe00fcc45960335 | [
"Apache-2.0"
] | null | null | null | test/test_text_to_speech_request.py | Cloudmersive/Cloudmersive.APIClient.Python.VoiceRecognition | ab03179bd3449dc54d33ebbeabe00fcc45960335 | [
"Apache-2.0"
] | null | null | null | test/test_text_to_speech_request.py | Cloudmersive/Cloudmersive.APIClient.Python.VoiceRecognition | ab03179bd3449dc54d33ebbeabe00fcc45960335 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
speechapi
Speech APIs enable you to recognize speech and convert it to text using advanced machine learning, and also to convert text to speech. # noqa: E501
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import cloudmersive_voicerecognition_api_client
from cloudmersive_voicerecognition_api_client.models.text_to_speech_request import TextToSpeechRequest # noqa: E501
from cloudmersive_voicerecognition_api_client.rest import ApiException
class TestTextToSpeechRequest(unittest.TestCase):
"""TextToSpeechRequest unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testTextToSpeechRequest(self):
"""Test TextToSpeechRequest"""
# FIXME: construct object with mandatory attributes with example values
# model = cloudmersive_voicerecognition_api_client.models.text_to_speech_request.TextToSpeechRequest() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 27.04878 | 152 | 0.751127 |
from __future__ import absolute_import
import unittest
import cloudmersive_voicerecognition_api_client
from cloudmersive_voicerecognition_api_client.models.text_to_speech_request import TextToSpeechRequest
from cloudmersive_voicerecognition_api_client.rest import ApiException
class TestTextToSpeechRequest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testTextToSpeechRequest(self):
s
if __name__ == '__main__':
unittest.main()
| true | true |
f7372fa08a96b90691f0888b248e42aa64165f17 | 26,737 | py | Python | gamestonk_terminal/cryptocurrency/defi/defi_controller.py | JakubPluta/GamestonkTerminal | 3eb200fbea467d808ab401962e1d62022b7a2338 | [
"MIT"
] | null | null | null | gamestonk_terminal/cryptocurrency/defi/defi_controller.py | JakubPluta/GamestonkTerminal | 3eb200fbea467d808ab401962e1d62022b7a2338 | [
"MIT"
] | null | null | null | gamestonk_terminal/cryptocurrency/defi/defi_controller.py | JakubPluta/GamestonkTerminal | 3eb200fbea467d808ab401962e1d62022b7a2338 | [
"MIT"
] | null | null | null | """Defi Controller Module"""
__docformat__ = "numpy"
import argparse
from typing import List
from prompt_toolkit.completion import NestedCompleter
from gamestonk_terminal.rich_config import console
from gamestonk_terminal.cryptocurrency.defi import (
graph_model,
coindix_model,
terraengineer_model,
terraengineer_view,
)
from gamestonk_terminal.parent_classes import BaseController
from gamestonk_terminal import feature_flags as gtff
from gamestonk_terminal.menu import session
from gamestonk_terminal.helper_funcs import (
parse_known_args_and_warn,
check_positive,
EXPORT_ONLY_RAW_DATA_ALLOWED,
EXPORT_BOTH_RAW_DATA_AND_FIGURES,
)
from gamestonk_terminal.cryptocurrency.defi import (
defirate_view,
defipulse_view,
llama_model,
llama_view,
substack_view,
graph_view,
coindix_view,
)
class DefiController(BaseController):
"""Defi Controller class"""
CHOICES_COMMANDS = [
"dpi",
"funding",
"lending",
"borrow",
"ldapps",
"gdapps",
"stvl",
"dtvl",
"newsletter",
"tokens",
"pairs",
"pools",
"swaps",
"stats",
"vaults",
"ayr",
"aterra",
]
PATH = "/crypto/defi/"
def __init__(self, queue: List[str] = None):
"""Constructor"""
super().__init__(queue)
if session and gtff.USE_PROMPT_TOOLKIT:
choices: dict = {c: {} for c in self.controller_choices}
choices["ldapps"]["-s"] = {c: {} for c in llama_model.LLAMA_FILTERS}
choices["aterra"]["--asset"] = {c: {} for c in terraengineer_model.ASSETS}
choices["aterra"] = {c: {} for c in terraengineer_model.ASSETS}
choices["tokens"]["-s"] = {c: {} for c in graph_model.TOKENS_FILTERS}
choices["pairs"]["-s"] = {c: {} for c in graph_model.PAIRS_FILTERS}
choices["pools"]["-s"] = {c: {} for c in graph_model.POOLS_FILTERS}
choices["swaps"]["-s"] = {c: {} for c in graph_model.SWAPS_FILTERS}
choices["vaults"]["-s"] = {c: {} for c in coindix_model.VAULTS_FILTERS}
choices["vaults"]["-k"] = {c: {} for c in coindix_model.VAULT_KINDS}
choices["vaults"]["-c"] = {c: {} for c in coindix_model.CHAINS}
choices["vaults"]["-p"] = {c: {} for c in coindix_model.PROTOCOLS}
self.completer = NestedCompleter.from_nested_dict(choices)
def print_help(self):
"""Print help"""
help_text = """[cmds]
[info]Overview:[/info]
newsletter Recent DeFi related newsletters [src][Substack][/src]
dpi DeFi protocols listed on DefiPulse [src][Defipulse][/src]
funding Funding rates - current or last 30 days average [src][Defirate][/src]
borrow DeFi borrow rates - current or last 30 days average [src][Defirate][/src]
lending DeFi ending rates - current or last 30 days average [src][Defirate][/src]
vaults Top DeFi Vaults on different blockchains [src][[Coindix][/src]
[src][The Graph][/src] [info]Uniswap[/info]
tokens Tokens trade-able on Uniswap
stats Base statistics about Uniswap
pairs Recently added pairs on Uniswap
pools Pools by volume on Uniswap
swaps Recent swaps done on Uniswap
[src][Defi Llama][/src]
ldapps Lists dApps
gdapps Display top DeFi dApps grouped by chain
stvl Display historical values of the total sum of TVLs from all dApps
dtvl Display historical total value locked (TVL) by dApp
[src][Terra Engineer][/src]
aterra Displays 30-day history of specified asset in terra address
ayr Displays 30-day history of anchor yield reserve[/cmds]
"""
console.print(text=help_text, menu="Cryptocurrency - Decentralized Finance")
def call_aterra(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="aterra",
description="""
Displays the 30-day history of an asset in a certain terra address.
[Source: https://terra.engineer/]
""",
)
parser.add_argument(
"--asset",
dest="asset",
type=str,
help="Terra asset {ust,luna,sdt} Default: ust",
default=terraengineer_model.ASSETS[0],
choices=terraengineer_model.ASSETS,
)
parser.add_argument(
"--address",
dest="address",
type=str,
help="Terra address. Valid terra addresses start with 'terra'",
required=True,
)
if other_args and not other_args[0][0] == "-":
other_args.insert(0, "--asset")
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
terraengineer_view.display_terra_asset_history(
export=ns_parser.export,
address=ns_parser.address,
asset=ns_parser.asset,
)
def call_ayr(self, other_args: List[str]):
"""Process ayr command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="ayr",
description="""
Displays the 30-day history of the Anchor Yield Reserve.
An increasing yield reserve indicates that the return on collateral staked by borrowers in Anchor
is greater than the yield paid to depositors. A decreasing yield reserve means yield paid
to depositors is outpacing the staking returns of borrower's collateral.
TLDR: Shows the address that contains UST that is paid on anchor interest earn.
[Source: https://terra.engineer/]
""",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
terraengineer_view.display_anchor_yield_reserve(export=ns_parser.export)
def call_dpi(self, other_args: List[str]):
"""Process dpi command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="dpi",
description="""
Displays DeFi Pulse crypto protocols.
[Source: https://defipulse.com/]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=15,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: Rank",
default="Rank",
choices=["Rank", "Name", "Chain", "Category", "TVL", "Change_1D"],
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=True,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
defipulse_view.display_defipulse(
top=ns_parser.limit,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
export=ns_parser.export,
)
def call_gdapps(self, other_args: List[str]):
"""Process gdapps command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="gdapps",
description="""
Display top dApps (in terms of TVL) grouped by chain.
[Source: https://docs.llama.fi/api]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of top dApps to display",
default=40,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
llama_view.display_grouped_defi_protocols(num=ns_parser.limit)
def call_dtvl(self, other_args: List[str]):
"""Process dtvl command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="dtvl",
description="""
Displays historical TVL of different dApps.
[Source: https://docs.llama.fi/api]
""",
)
parser.add_argument(
"-d",
"--dapps",
dest="dapps",
type=str,
required="-h" not in other_args,
help="dApps to search historical TVL. Should be split by , e.g.: anchor,sushiswap,pancakeswap",
)
if other_args and not other_args[0][0] == "-":
other_args.insert(0, "-d")
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
llama_view.display_historical_tvl(dapps=ns_parser.dapps)
def call_ldapps(self, other_args: List[str]):
"""Process ldapps command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="ldapps",
description="""
Display information about listed dApps on DeFi Llama.
[Source: https://docs.llama.fi/api]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: tvl",
default="tvl",
choices=llama_model.LLAMA_FILTERS,
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=False,
)
parser.add_argument(
"--desc",
action="store_false",
help="Flag to display description of protocol",
dest="description",
default=False,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
llama_view.display_defi_protocols(
top=ns_parser.limit,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
description=ns_parser.description,
export=ns_parser.export,
)
def call_stvl(self, other_args: List[str]):
"""Process stvl command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="stvl",
description="""
Displays historical values of the total sum of TVLs from all listed dApps.
[Source: https://docs.llama.fi/api]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_BOTH_RAW_DATA_AND_FIGURES
)
if ns_parser:
llama_view.display_defi_tvl(top=ns_parser.limit, export=ns_parser.export)
def call_funding(self, other_args: List[str]):
"""Process funding command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="funding",
description="""
Display Funding rates.
[Source: https://defirate.com/]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
parser.add_argument(
"--current",
action="store_false",
default=True,
dest="current",
help="Show Current Funding Rates or Last 30 Days Average",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
defirate_view.display_funding_rates(
top=ns_parser.limit, current=ns_parser.current, export=ns_parser.export
)
def call_borrow(self, other_args: List[str]):
"""Process borrow command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="borrow",
description="""
Display DeFi borrow rates.
[Source: https://defirate.com/]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
parser.add_argument(
"--current",
action="store_false",
default=True,
dest="current",
help="Show Current Borrow Rates or Last 30 Days Average",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
defirate_view.display_borrow_rates(
top=ns_parser.limit, current=ns_parser.current, export=ns_parser.export
)
def call_lending(self, other_args: List[str]):
"""Process lending command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="lending",
description="""
Display DeFi lending rates.
[Source: https://defirate.com/]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=15,
)
parser.add_argument(
"--current",
action="store_false",
default=True,
dest="current",
help="Show Current Lending Rates or Last 30 Days Average",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
defirate_view.display_lending_rates(
top=ns_parser.limit, current=ns_parser.current, export=ns_parser.export
)
def call_newsletter(self, other_args: List[str]):
"""Process newsletter command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="newsletter",
description="""
Display DeFi related substack newsletters.
[Source: substack.com]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
substack_view.display_newsletters(
top=ns_parser.limit, export=ns_parser.export
)
def call_tokens(self, other_args: List[str]):
"""Process tokens command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="tokens",
description="""
Display tokens trade-able on Uniswap DEX
[Source: https://thegraph.com/en/]
""",
)
parser.add_argument(
"--skip",
dest="skip",
type=check_positive,
help="Number of records to skip",
default=0,
)
parser.add_argument(
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=20,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: index",
default="index",
choices=graph_model.TOKENS_FILTERS,
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=True,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
graph_view.display_uni_tokens(
skip=ns_parser.skip,
limit=ns_parser.limit,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
export=ns_parser.export,
)
def call_stats(self, other_args: List[str]):
"""Process stats command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="stats",
description="""
Display base statistics about Uniswap DEX.
[Source: https://thegraph.com/en/]
""",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
graph_view.display_uni_stats(export=ns_parser.export)
def call_pairs(self, other_args: List[str]):
"""Process pairs command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="pairs",
description="""
Displays Lastly added pairs on Uniswap DEX.
[Source: https://thegraph.com/en/]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
parser.add_argument(
"-v",
"--vol",
dest="vol",
type=check_positive,
help="Minimum trading volume",
default=100,
)
parser.add_argument(
"-tx",
"--tx",
dest="tx",
type=check_positive,
help="Minimum number of transactions",
default=100,
)
parser.add_argument(
"--days",
dest="days",
type=check_positive,
help="Number of days the pair has been active,",
default=10,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: created",
default="created",
choices=graph_model.PAIRS_FILTERS,
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=False,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
graph_view.display_recently_added(
top=ns_parser.limit,
days=ns_parser.days,
min_volume=ns_parser.vol,
min_tx=ns_parser.tx,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
export=ns_parser.export,
)
def call_pools(self, other_args: List[str]):
"""Process pools command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="pairs",
description="""
Display uniswap pools by volume.
[Source: https://thegraph.com/en/]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: volumeUSD",
default="volumeUSD",
choices=graph_model.POOLS_FILTERS,
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=False,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
graph_view.display_uni_pools(
top=ns_parser.limit,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
export=ns_parser.export,
)
def call_swaps(self, other_args: List[str]):
"""Process swaps command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="pairs",
description="""
Display last swaps done on Uniswap DEX.
[Source: https://thegraph.com/en/]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: timestamp",
default="timestamp",
choices=graph_model.SWAPS_FILTERS,
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=False,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
graph_view.display_last_uni_swaps(
top=ns_parser.limit,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
export=ns_parser.export,
)
def call_vaults(self, other_args: List[str]):
"""Process swaps command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="vaults",
description="""
Display Top DeFi Vaults.
[Source: https://coindix.com/]
""",
)
parser.add_argument(
"-c",
"--chain",
dest="chain",
type=str,
help="Blockchain name e.g. ethereum, terra",
default=None,
choices=coindix_model.CHAINS,
required=False,
)
parser.add_argument(
"-p",
"--protocol",
dest="protocol",
type=str,
help="DeFi protocol name e.g. aave, uniswap",
default=None,
choices=coindix_model.PROTOCOLS,
required=False,
)
parser.add_argument(
"-k",
"--kind",
dest="kind",
type=str,
help="Kind/type of vault e.g. lp, single, noimploss, stable",
default=None,
choices=coindix_model.VAULT_KINDS,
required=False,
)
parser.add_argument(
"-t",
"--top",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: timestamp",
default="apy",
choices=coindix_model.VAULTS_FILTERS,
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=False,
)
parser.add_argument(
"-l",
"--links",
action="store_false",
help="Flag to show vault link",
dest="link",
default=True,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
coindix_view.display_defi_vaults(
chain=ns_parser.chain,
kind=ns_parser.kind,
protocol=ns_parser.protocol,
top=ns_parser.limit,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
link=ns_parser.link,
export=ns_parser.export,
)
| 30.909827 | 113 | 0.533717 | __docformat__ = "numpy"
import argparse
from typing import List
from prompt_toolkit.completion import NestedCompleter
from gamestonk_terminal.rich_config import console
from gamestonk_terminal.cryptocurrency.defi import (
graph_model,
coindix_model,
terraengineer_model,
terraengineer_view,
)
from gamestonk_terminal.parent_classes import BaseController
from gamestonk_terminal import feature_flags as gtff
from gamestonk_terminal.menu import session
from gamestonk_terminal.helper_funcs import (
parse_known_args_and_warn,
check_positive,
EXPORT_ONLY_RAW_DATA_ALLOWED,
EXPORT_BOTH_RAW_DATA_AND_FIGURES,
)
from gamestonk_terminal.cryptocurrency.defi import (
defirate_view,
defipulse_view,
llama_model,
llama_view,
substack_view,
graph_view,
coindix_view,
)
class DefiController(BaseController):
CHOICES_COMMANDS = [
"dpi",
"funding",
"lending",
"borrow",
"ldapps",
"gdapps",
"stvl",
"dtvl",
"newsletter",
"tokens",
"pairs",
"pools",
"swaps",
"stats",
"vaults",
"ayr",
"aterra",
]
PATH = "/crypto/defi/"
def __init__(self, queue: List[str] = None):
super().__init__(queue)
if session and gtff.USE_PROMPT_TOOLKIT:
choices: dict = {c: {} for c in self.controller_choices}
choices["ldapps"]["-s"] = {c: {} for c in llama_model.LLAMA_FILTERS}
choices["aterra"]["--asset"] = {c: {} for c in terraengineer_model.ASSETS}
choices["aterra"] = {c: {} for c in terraengineer_model.ASSETS}
choices["tokens"]["-s"] = {c: {} for c in graph_model.TOKENS_FILTERS}
choices["pairs"]["-s"] = {c: {} for c in graph_model.PAIRS_FILTERS}
choices["pools"]["-s"] = {c: {} for c in graph_model.POOLS_FILTERS}
choices["swaps"]["-s"] = {c: {} for c in graph_model.SWAPS_FILTERS}
choices["vaults"]["-s"] = {c: {} for c in coindix_model.VAULTS_FILTERS}
choices["vaults"]["-k"] = {c: {} for c in coindix_model.VAULT_KINDS}
choices["vaults"]["-c"] = {c: {} for c in coindix_model.CHAINS}
choices["vaults"]["-p"] = {c: {} for c in coindix_model.PROTOCOLS}
self.completer = NestedCompleter.from_nested_dict(choices)
def print_help(self):
help_text = """[cmds]
[info]Overview:[/info]
newsletter Recent DeFi related newsletters [src][Substack][/src]
dpi DeFi protocols listed on DefiPulse [src][Defipulse][/src]
funding Funding rates - current or last 30 days average [src][Defirate][/src]
borrow DeFi borrow rates - current or last 30 days average [src][Defirate][/src]
lending DeFi ending rates - current or last 30 days average [src][Defirate][/src]
vaults Top DeFi Vaults on different blockchains [src][[Coindix][/src]
[src][The Graph][/src] [info]Uniswap[/info]
tokens Tokens trade-able on Uniswap
stats Base statistics about Uniswap
pairs Recently added pairs on Uniswap
pools Pools by volume on Uniswap
swaps Recent swaps done on Uniswap
[src][Defi Llama][/src]
ldapps Lists dApps
gdapps Display top DeFi dApps grouped by chain
stvl Display historical values of the total sum of TVLs from all dApps
dtvl Display historical total value locked (TVL) by dApp
[src][Terra Engineer][/src]
aterra Displays 30-day history of specified asset in terra address
ayr Displays 30-day history of anchor yield reserve[/cmds]
"""
console.print(text=help_text, menu="Cryptocurrency - Decentralized Finance")
def call_aterra(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="aterra",
description="""
Displays the 30-day history of an asset in a certain terra address.
[Source: https://terra.engineer/]
""",
)
parser.add_argument(
"--asset",
dest="asset",
type=str,
help="Terra asset {ust,luna,sdt} Default: ust",
default=terraengineer_model.ASSETS[0],
choices=terraengineer_model.ASSETS,
)
parser.add_argument(
"--address",
dest="address",
type=str,
help="Terra address. Valid terra addresses start with 'terra'",
required=True,
)
if other_args and not other_args[0][0] == "-":
other_args.insert(0, "--asset")
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
terraengineer_view.display_terra_asset_history(
export=ns_parser.export,
address=ns_parser.address,
asset=ns_parser.asset,
)
def call_ayr(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="ayr",
description="""
Displays the 30-day history of the Anchor Yield Reserve.
An increasing yield reserve indicates that the return on collateral staked by borrowers in Anchor
is greater than the yield paid to depositors. A decreasing yield reserve means yield paid
to depositors is outpacing the staking returns of borrower's collateral.
TLDR: Shows the address that contains UST that is paid on anchor interest earn.
[Source: https://terra.engineer/]
""",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
terraengineer_view.display_anchor_yield_reserve(export=ns_parser.export)
def call_dpi(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="dpi",
description="""
Displays DeFi Pulse crypto protocols.
[Source: https://defipulse.com/]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=15,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: Rank",
default="Rank",
choices=["Rank", "Name", "Chain", "Category", "TVL", "Change_1D"],
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=True,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
defipulse_view.display_defipulse(
top=ns_parser.limit,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
export=ns_parser.export,
)
def call_gdapps(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="gdapps",
description="""
Display top dApps (in terms of TVL) grouped by chain.
[Source: https://docs.llama.fi/api]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of top dApps to display",
default=40,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
llama_view.display_grouped_defi_protocols(num=ns_parser.limit)
def call_dtvl(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="dtvl",
description="""
Displays historical TVL of different dApps.
[Source: https://docs.llama.fi/api]
""",
)
parser.add_argument(
"-d",
"--dapps",
dest="dapps",
type=str,
required="-h" not in other_args,
help="dApps to search historical TVL. Should be split by , e.g.: anchor,sushiswap,pancakeswap",
)
if other_args and not other_args[0][0] == "-":
other_args.insert(0, "-d")
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
llama_view.display_historical_tvl(dapps=ns_parser.dapps)
def call_ldapps(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="ldapps",
description="""
Display information about listed dApps on DeFi Llama.
[Source: https://docs.llama.fi/api]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: tvl",
default="tvl",
choices=llama_model.LLAMA_FILTERS,
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=False,
)
parser.add_argument(
"--desc",
action="store_false",
help="Flag to display description of protocol",
dest="description",
default=False,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
llama_view.display_defi_protocols(
top=ns_parser.limit,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
description=ns_parser.description,
export=ns_parser.export,
)
def call_stvl(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="stvl",
description="""
Displays historical values of the total sum of TVLs from all listed dApps.
[Source: https://docs.llama.fi/api]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_BOTH_RAW_DATA_AND_FIGURES
)
if ns_parser:
llama_view.display_defi_tvl(top=ns_parser.limit, export=ns_parser.export)
def call_funding(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="funding",
description="""
Display Funding rates.
[Source: https://defirate.com/]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
parser.add_argument(
"--current",
action="store_false",
default=True,
dest="current",
help="Show Current Funding Rates or Last 30 Days Average",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
defirate_view.display_funding_rates(
top=ns_parser.limit, current=ns_parser.current, export=ns_parser.export
)
def call_borrow(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="borrow",
description="""
Display DeFi borrow rates.
[Source: https://defirate.com/]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
parser.add_argument(
"--current",
action="store_false",
default=True,
dest="current",
help="Show Current Borrow Rates or Last 30 Days Average",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
defirate_view.display_borrow_rates(
top=ns_parser.limit, current=ns_parser.current, export=ns_parser.export
)
def call_lending(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="lending",
description="""
Display DeFi lending rates.
[Source: https://defirate.com/]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=15,
)
parser.add_argument(
"--current",
action="store_false",
default=True,
dest="current",
help="Show Current Lending Rates or Last 30 Days Average",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
defirate_view.display_lending_rates(
top=ns_parser.limit, current=ns_parser.current, export=ns_parser.export
)
def call_newsletter(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="newsletter",
description="""
Display DeFi related substack newsletters.
[Source: substack.com]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
substack_view.display_newsletters(
top=ns_parser.limit, export=ns_parser.export
)
def call_tokens(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="tokens",
description="""
Display tokens trade-able on Uniswap DEX
[Source: https://thegraph.com/en/]
""",
)
parser.add_argument(
"--skip",
dest="skip",
type=check_positive,
help="Number of records to skip",
default=0,
)
parser.add_argument(
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=20,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: index",
default="index",
choices=graph_model.TOKENS_FILTERS,
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=True,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
graph_view.display_uni_tokens(
skip=ns_parser.skip,
limit=ns_parser.limit,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
export=ns_parser.export,
)
def call_stats(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="stats",
description="""
Display base statistics about Uniswap DEX.
[Source: https://thegraph.com/en/]
""",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
graph_view.display_uni_stats(export=ns_parser.export)
def call_pairs(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="pairs",
description="""
Displays Lastly added pairs on Uniswap DEX.
[Source: https://thegraph.com/en/]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
parser.add_argument(
"-v",
"--vol",
dest="vol",
type=check_positive,
help="Minimum trading volume",
default=100,
)
parser.add_argument(
"-tx",
"--tx",
dest="tx",
type=check_positive,
help="Minimum number of transactions",
default=100,
)
parser.add_argument(
"--days",
dest="days",
type=check_positive,
help="Number of days the pair has been active,",
default=10,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: created",
default="created",
choices=graph_model.PAIRS_FILTERS,
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=False,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
graph_view.display_recently_added(
top=ns_parser.limit,
days=ns_parser.days,
min_volume=ns_parser.vol,
min_tx=ns_parser.tx,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
export=ns_parser.export,
)
def call_pools(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="pairs",
description="""
Display uniswap pools by volume.
[Source: https://thegraph.com/en/]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: volumeUSD",
default="volumeUSD",
choices=graph_model.POOLS_FILTERS,
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=False,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
graph_view.display_uni_pools(
top=ns_parser.limit,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
export=ns_parser.export,
)
def call_swaps(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="pairs",
description="""
Display last swaps done on Uniswap DEX.
[Source: https://thegraph.com/en/]
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: timestamp",
default="timestamp",
choices=graph_model.SWAPS_FILTERS,
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=False,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
graph_view.display_last_uni_swaps(
top=ns_parser.limit,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
export=ns_parser.export,
)
def call_vaults(self, other_args: List[str]):
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="vaults",
description="""
Display Top DeFi Vaults.
[Source: https://coindix.com/]
""",
)
parser.add_argument(
"-c",
"--chain",
dest="chain",
type=str,
help="Blockchain name e.g. ethereum, terra",
default=None,
choices=coindix_model.CHAINS,
required=False,
)
parser.add_argument(
"-p",
"--protocol",
dest="protocol",
type=str,
help="DeFi protocol name e.g. aave, uniswap",
default=None,
choices=coindix_model.PROTOCOLS,
required=False,
)
parser.add_argument(
"-k",
"--kind",
dest="kind",
type=str,
help="Kind/type of vault e.g. lp, single, noimploss, stable",
default=None,
choices=coindix_model.VAULT_KINDS,
required=False,
)
parser.add_argument(
"-t",
"--top",
dest="limit",
type=check_positive,
help="Number of records to display",
default=10,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: timestamp",
default="apy",
choices=coindix_model.VAULTS_FILTERS,
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=False,
)
parser.add_argument(
"-l",
"--links",
action="store_false",
help="Flag to show vault link",
dest="link",
default=True,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
coindix_view.display_defi_vaults(
chain=ns_parser.chain,
kind=ns_parser.kind,
protocol=ns_parser.protocol,
top=ns_parser.limit,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
link=ns_parser.link,
export=ns_parser.export,
)
| true | true |
f73730188d31b22bc56c053057c20a1fbf903e08 | 23,260 | py | Python | sdk/apimanagement/azure-mgmt-apimanagement/azure/mgmt/apimanagement/aio/operations/_api_operation_policy_operations.py | adewaleo/azure-sdk-for-python | 169457edbea5e3c5557246cfcf8bd635d528bae4 | [
"MIT"
] | 1 | 2020-03-05T18:10:35.000Z | 2020-03-05T18:10:35.000Z | sdk/apimanagement/azure-mgmt-apimanagement/azure/mgmt/apimanagement/aio/operations/_api_operation_policy_operations.py | adewaleo/azure-sdk-for-python | 169457edbea5e3c5557246cfcf8bd635d528bae4 | [
"MIT"
] | 2 | 2020-03-03T23:11:13.000Z | 2020-03-30T18:50:55.000Z | sdk/apimanagement/azure-mgmt-apimanagement/azure/mgmt/apimanagement/aio/operations/_api_operation_policy_operations.py | adewaleo/azure-sdk-for-python | 169457edbea5e3c5557246cfcf8bd635d528bae4 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ApiOperationPolicyOperations:
"""ApiOperationPolicyOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.apimanagement.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def list_by_operation(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
**kwargs
) -> "models.PolicyCollection":
"""Get the list of policy configuration at the API Operation level.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:type api_id: str
:param operation_id: Operation identifier within an API. Must be unique in the current API
Management service instance.
:type operation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyCollection, or the result of cls(response)
:rtype: ~azure.mgmt.apimanagement.models.PolicyCollection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.PolicyCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01-preview"
accept = "application/json"
# Construct URL
url = self.list_by_operation.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyCollection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_by_operation.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}/policies'} # type: ignore
async def get_entity_tag(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
policy_id: Union[str, "models.PolicyIdName"],
**kwargs
) -> bool:
"""Gets the entity state (Etag) version of the API operation policy specified by its identifier.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:type api_id: str
:param operation_id: Operation identifier within an API. Must be unique in the current API
Management service instance.
:type operation_id: str
:param policy_id: The identifier of the Policy.
:type policy_id: str or ~azure.mgmt.apimanagement.models.PolicyIdName
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool, or the result of cls(response)
:rtype: bool
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01-preview"
accept = "application/json"
# Construct URL
url = self.get_entity_tag.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'policyId': self._serialize.url("policy_id", policy_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.head(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
if cls:
return cls(pipeline_response, None, response_headers)
return 200 <= response.status_code <= 299
get_entity_tag.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}/policies/{policyId}'} # type: ignore
async def get(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
policy_id: Union[str, "models.PolicyIdName"],
format: Optional[Union[str, "models.PolicyExportFormat"]] = "xml",
**kwargs
) -> "models.PolicyContract":
"""Get the policy configuration at the API Operation level.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:type api_id: str
:param operation_id: Operation identifier within an API. Must be unique in the current API
Management service instance.
:type operation_id: str
:param policy_id: The identifier of the Policy.
:type policy_id: str or ~azure.mgmt.apimanagement.models.PolicyIdName
:param format: Policy Export Format.
:type format: str or ~azure.mgmt.apimanagement.models.PolicyExportFormat
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyContract, or the result of cls(response)
:rtype: ~azure.mgmt.apimanagement.models.PolicyContract
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.PolicyContract"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01-preview"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'policyId': self._serialize.url("policy_id", policy_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if format is not None:
query_parameters['format'] = self._serialize.query("format", format, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('PolicyContract', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}/policies/{policyId}'} # type: ignore
async def create_or_update(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
policy_id: Union[str, "models.PolicyIdName"],
if_match: Optional[str] = None,
value: Optional[str] = None,
format: Optional[Union[str, "models.PolicyContentFormat"]] = "xml",
**kwargs
) -> "models.PolicyContract":
"""Creates or updates policy configuration for the API Operation level.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:type api_id: str
:param operation_id: Operation identifier within an API. Must be unique in the current API
Management service instance.
:type operation_id: str
:param policy_id: The identifier of the Policy.
:type policy_id: str or ~azure.mgmt.apimanagement.models.PolicyIdName
:param if_match: ETag of the Entity. Not required when creating an entity, but required when
updating an entity.
:type if_match: str
:param value: Contents of the Policy as defined by the format.
:type value: str
:param format: Format of the policyContent.
:type format: str or ~azure.mgmt.apimanagement.models.PolicyContentFormat
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyContract, or the result of cls(response)
:rtype: ~azure.mgmt.apimanagement.models.PolicyContract
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.PolicyContract"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_parameters = models.PolicyContract(value=value, format=format)
api_version = "2020-06-01-preview"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'policyId': self._serialize.url("policy_id", policy_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(_parameters, 'PolicyContract')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 200:
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('PolicyContract', pipeline_response)
if response.status_code == 201:
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('PolicyContract', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}/policies/{policyId}'} # type: ignore
async def delete(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
policy_id: Union[str, "models.PolicyIdName"],
if_match: str,
**kwargs
) -> None:
"""Deletes the policy configuration at the Api Operation.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:type api_id: str
:param operation_id: Operation identifier within an API. Must be unique in the current API
Management service instance.
:type operation_id: str
:param policy_id: The identifier of the Policy.
:type policy_id: str or ~azure.mgmt.apimanagement.models.PolicyIdName
:param if_match: ETag of the Entity. ETag should match the current entity state from the header
response of the GET request or it should be * for unconditional update.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01-preview"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'policyId': self._serialize.url("policy_id", policy_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}/policies/{policyId}'} # type: ignore
| 52.984055 | 239 | 0.670765 |
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ApiOperationPolicyOperations:
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def list_by_operation(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
**kwargs
) -> "models.PolicyCollection":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01-preview"
accept = "application/json"
url = self.list_by_operation.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyCollection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_by_operation.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}/policies'}
async def get_entity_tag(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
policy_id: Union[str, "models.PolicyIdName"],
**kwargs
) -> bool:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01-preview"
accept = "application/json"
url = self.get_entity_tag.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'policyId': self._serialize.url("policy_id", policy_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.head(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
if cls:
return cls(pipeline_response, None, response_headers)
return 200 <= response.status_code <= 299
get_entity_tag.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}/policies/{policyId}'}
async def get(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
policy_id: Union[str, "models.PolicyIdName"],
format: Optional[Union[str, "models.PolicyExportFormat"]] = "xml",
**kwargs
) -> "models.PolicyContract":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01-preview"
accept = "application/json"
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'policyId': self._serialize.url("policy_id", policy_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
if format is not None:
query_parameters['format'] = self._serialize.query("format", format, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('PolicyContract', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}/policies/{policyId}'}
async def create_or_update(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
policy_id: Union[str, "models.PolicyIdName"],
if_match: Optional[str] = None,
value: Optional[str] = None,
format: Optional[Union[str, "models.PolicyContentFormat"]] = "xml",
**kwargs
) -> "models.PolicyContract":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
_parameters = models.PolicyContract(value=value, format=format)
api_version = "2020-06-01-preview"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self.create_or_update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'policyId': self._serialize.url("policy_id", policy_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
body_content = self._serialize.body(_parameters, 'PolicyContract')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 200:
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('PolicyContract', pipeline_response)
if response.status_code == 201:
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('PolicyContract', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}/policies/{policyId}'}
async def delete(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
policy_id: Union[str, "models.PolicyIdName"],
if_match: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01-preview"
accept = "application/json"
url = self.delete.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'policyId': self._serialize.url("policy_id", policy_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}/policies/{policyId}'}
| true | true |
f737306d0d56ed2ac43898c329dd2edc390a5c98 | 4,679 | py | Python | src/api/dataflow/batch/settings.py | Chromico/bk-base | be822d9bbee544a958bed4831348185a75604791 | [
"MIT"
] | 84 | 2021-06-30T06:20:23.000Z | 2022-03-22T03:05:49.000Z | src/api/dataflow/batch/settings.py | Chromico/bk-base | be822d9bbee544a958bed4831348185a75604791 | [
"MIT"
] | 7 | 2021-06-30T06:21:16.000Z | 2022-03-29T07:36:13.000Z | src/api/dataflow/batch/settings.py | Chromico/bk-base | be822d9bbee544a958bed4831348185a75604791 | [
"MIT"
] | 40 | 2021-06-30T06:21:26.000Z | 2022-03-29T12:42:26.000Z | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-BASE 蓝鲸基础平台 is licensed under the MIT License.
License for BK-BASE 蓝鲸基础平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from conf import dataapi_settings
from pizza import settings_default
from dataflow import pizza_settings
DATA_TIME_ZONE = dataapi_settings.DATA_TIME_ZONE
CODE_VERSION_DEFAULT = "0.1.0"
CLUSTER_GROUP_DEFAULT = "default"
CLUSTER_NAME_DEFAULT = "root.dataflow.batch.default"
DEBUG_QUEUE = "root.dataflow.batch.debug"
DEPLOY_MODE_DEFAULT = "yarn"
DEPLOY_CONFIG_DEFAULT = "{}"
JOBSERVER_CONFIG_DEFAULT = "default"
COMPONENT_TYPE_DEFAULT = "spark"
CODECHECK_BLACKLIST_GROUP = "py_spark_1"
HDFS_DATA_HOME = "/kafka/data"
DIR_API_OUT = "/api/outdir"
HDFS_FLOW_HOME = "/api/flow"
DIR_HDFS_LOCATION = "/api/tmp"
QUERY_NAMESPACE = "/api/query"
offline_reserved_fields = {"dtEventTime": "string", "dtEventTimeStamp": "timestamp"}
databus_max_retry_times = 3
tspider_job_max_retry_times = 5
hdfs_max_retry_times = 3
HDFS_BIN_DIR = dataapi_settings.HDFS_BIN_DIR
HDFS_BACKUP_DIR = dataapi_settings.HDFS_BACKUP_DIR
HDFS_EXPIRE_TIME = 7
# other api
PRODUCT_MODE_URL = {
"deployment.collector.api": "%s/api/c/compapi/data/databus" % dataapi_settings.BK_PAAS_HOST,
"trt.api": "%s/api/c/compapi/data/trt" % dataapi_settings.BK_PAAS_HOST,
"tool.api": "%s/api/c/compapi/data/tool" % dataapi_settings.BK_PAAS_HOST,
}
URL_DICT = PRODUCT_MODE_URL
timezone = dataapi_settings.TIME_ZONE
# PASS
PASS_APP_CODE = dataapi_settings.APP_ID
PASS_APP_TOKEN = dataapi_settings.APP_TOKEN
PASS_BK_HOST = dataapi_settings.BK_PAAS_HOST
PASS_APP_NAME = "蓝鲸"
PASS_ACCOUNT = "root"
BKDATA_DIR = settings_default.BASE_DIR
TDW_PREFIX = pizza_settings.TDW_PREFIX
TDW_SPARK_TYPE = pizza_settings.TDW_SPARK_TYPE
TDW_DESCRIPTION = pizza_settings.TDW_DESCRIPTION
TDW_BASE_FILE_NAME = pizza_settings.TDW_BASE_FILE_NAME
TDW_BASE_FILE_MD5 = pizza_settings.TDW_BASE_FILE_MD5
TDW_JAR_FILE_NAME = pizza_settings.TDW_JAR_FILE_NAME
TDW_CLASS_NAME = pizza_settings.TDW_CLASS_NAME
TDW_JAR_CLASS_NAME = pizza_settings.TDW_JAR_CLASS_NAME
TDW_DRIVER_MEMORY = pizza_settings.TDW_DRIVER_MEMORY
TDW_EXECUTOR_CORES = pizza_settings.TDW_EXECUTOR_CORES
TDW_NUM_EXECUTORS = pizza_settings.TDW_NUM_EXECUTORS
TDW_EXECUTOR_MEMORY = pizza_settings.TDW_EXECUTOR_MEMORY
TDW_UC_JAR_FILE_PATH = dataapi_settings.TDW_UC_JAR_FILE_PATH
TDW_BATCH_LZ_ADMIN_USERS = dataapi_settings.TDW_BATCH_LZ_ADMIN_USERS
TDW_LOG_SERVER = dataapi_settings.TDW_LOG_SERVER
TDW_TASK_NODE_LABEL = dataapi_settings.TDW_TASK_NODE_LABEL
DEPLOY_CONFIG_RESOURCE = [
"executor.memory",
"executor.cores",
"executor.memoryOverhead",
]
RESULT_TABLE_TYPE_MAP = {
"stream": "stream",
"clean": "stream",
"batch_model": "stream",
"stream_model": "stream",
"transform": "stream",
"storage": "stream",
"batch": "batch",
}
SPARK_SQL_NODE_LABEL = dataapi_settings.SPARK_SQL_NODE_LABEL
SPARK_SQL_DEFAULT_CLUSTER_GROUP = pizza_settings.SPARK_SQL_DEFAULT_CLUSTER_GROUP
SPARK_SQL_CLUSTER_NAME_PREFIX = pizza_settings.SPARK_SQL_CLUSTER_NAME_PREFIX
SPARK_SQL_NODE_LABEL_DEFAULT_LIST = dataapi_settings.SPARK_SQL_NODE_LABEL_DEFAULT_LIST
SPARK_SQL_UDF_ENGINE_CONF = pizza_settings.SPARK_SQL_UDF_ENGINE_CONF
SPARK_SQL_TEXT_MAX_LENGTH = pizza_settings.SPARK_SQL_TEXT_MAX_LENGTH
LIVY_NODE_LABEL = dataapi_settings.LIVY_NODE_LABEL
LIVY_DEFAULT_CLUSTER_GROUP = dataapi_settings.LIVY_DEFAULT_CLUSTER_GROUP
LIVY_CLUSTER_NAME_PREFIX = dataapi_settings.LIVY_CLUSTER_NAME_PREFIX
| 40.686957 | 111 | 0.808934 |
from conf import dataapi_settings
from pizza import settings_default
from dataflow import pizza_settings
DATA_TIME_ZONE = dataapi_settings.DATA_TIME_ZONE
CODE_VERSION_DEFAULT = "0.1.0"
CLUSTER_GROUP_DEFAULT = "default"
CLUSTER_NAME_DEFAULT = "root.dataflow.batch.default"
DEBUG_QUEUE = "root.dataflow.batch.debug"
DEPLOY_MODE_DEFAULT = "yarn"
DEPLOY_CONFIG_DEFAULT = "{}"
JOBSERVER_CONFIG_DEFAULT = "default"
COMPONENT_TYPE_DEFAULT = "spark"
CODECHECK_BLACKLIST_GROUP = "py_spark_1"
HDFS_DATA_HOME = "/kafka/data"
DIR_API_OUT = "/api/outdir"
HDFS_FLOW_HOME = "/api/flow"
DIR_HDFS_LOCATION = "/api/tmp"
QUERY_NAMESPACE = "/api/query"
offline_reserved_fields = {"dtEventTime": "string", "dtEventTimeStamp": "timestamp"}
databus_max_retry_times = 3
tspider_job_max_retry_times = 5
hdfs_max_retry_times = 3
HDFS_BIN_DIR = dataapi_settings.HDFS_BIN_DIR
HDFS_BACKUP_DIR = dataapi_settings.HDFS_BACKUP_DIR
HDFS_EXPIRE_TIME = 7
PRODUCT_MODE_URL = {
"deployment.collector.api": "%s/api/c/compapi/data/databus" % dataapi_settings.BK_PAAS_HOST,
"trt.api": "%s/api/c/compapi/data/trt" % dataapi_settings.BK_PAAS_HOST,
"tool.api": "%s/api/c/compapi/data/tool" % dataapi_settings.BK_PAAS_HOST,
}
URL_DICT = PRODUCT_MODE_URL
timezone = dataapi_settings.TIME_ZONE
PASS_APP_CODE = dataapi_settings.APP_ID
PASS_APP_TOKEN = dataapi_settings.APP_TOKEN
PASS_BK_HOST = dataapi_settings.BK_PAAS_HOST
PASS_APP_NAME = "蓝鲸"
PASS_ACCOUNT = "root"
BKDATA_DIR = settings_default.BASE_DIR
TDW_PREFIX = pizza_settings.TDW_PREFIX
TDW_SPARK_TYPE = pizza_settings.TDW_SPARK_TYPE
TDW_DESCRIPTION = pizza_settings.TDW_DESCRIPTION
TDW_BASE_FILE_NAME = pizza_settings.TDW_BASE_FILE_NAME
TDW_BASE_FILE_MD5 = pizza_settings.TDW_BASE_FILE_MD5
TDW_JAR_FILE_NAME = pizza_settings.TDW_JAR_FILE_NAME
TDW_CLASS_NAME = pizza_settings.TDW_CLASS_NAME
TDW_JAR_CLASS_NAME = pizza_settings.TDW_JAR_CLASS_NAME
TDW_DRIVER_MEMORY = pizza_settings.TDW_DRIVER_MEMORY
TDW_EXECUTOR_CORES = pizza_settings.TDW_EXECUTOR_CORES
TDW_NUM_EXECUTORS = pizza_settings.TDW_NUM_EXECUTORS
TDW_EXECUTOR_MEMORY = pizza_settings.TDW_EXECUTOR_MEMORY
TDW_UC_JAR_FILE_PATH = dataapi_settings.TDW_UC_JAR_FILE_PATH
TDW_BATCH_LZ_ADMIN_USERS = dataapi_settings.TDW_BATCH_LZ_ADMIN_USERS
TDW_LOG_SERVER = dataapi_settings.TDW_LOG_SERVER
TDW_TASK_NODE_LABEL = dataapi_settings.TDW_TASK_NODE_LABEL
DEPLOY_CONFIG_RESOURCE = [
"executor.memory",
"executor.cores",
"executor.memoryOverhead",
]
RESULT_TABLE_TYPE_MAP = {
"stream": "stream",
"clean": "stream",
"batch_model": "stream",
"stream_model": "stream",
"transform": "stream",
"storage": "stream",
"batch": "batch",
}
SPARK_SQL_NODE_LABEL = dataapi_settings.SPARK_SQL_NODE_LABEL
SPARK_SQL_DEFAULT_CLUSTER_GROUP = pizza_settings.SPARK_SQL_DEFAULT_CLUSTER_GROUP
SPARK_SQL_CLUSTER_NAME_PREFIX = pizza_settings.SPARK_SQL_CLUSTER_NAME_PREFIX
SPARK_SQL_NODE_LABEL_DEFAULT_LIST = dataapi_settings.SPARK_SQL_NODE_LABEL_DEFAULT_LIST
SPARK_SQL_UDF_ENGINE_CONF = pizza_settings.SPARK_SQL_UDF_ENGINE_CONF
SPARK_SQL_TEXT_MAX_LENGTH = pizza_settings.SPARK_SQL_TEXT_MAX_LENGTH
LIVY_NODE_LABEL = dataapi_settings.LIVY_NODE_LABEL
LIVY_DEFAULT_CLUSTER_GROUP = dataapi_settings.LIVY_DEFAULT_CLUSTER_GROUP
LIVY_CLUSTER_NAME_PREFIX = dataapi_settings.LIVY_CLUSTER_NAME_PREFIX
| true | true |
f737318c35ed85e43b12dfe585edb3c6820f83b5 | 4,696 | py | Python | sensor/GY271.py | Smart4L/embedded-server | cf10ebb1ccc3002b45c20315ec521eaf1a9e7f10 | [
"MIT"
] | null | null | null | sensor/GY271.py | Smart4L/embedded-server | cf10ebb1ccc3002b45c20315ec521eaf1a9e7f10 | [
"MIT"
] | null | null | null | sensor/GY271.py | Smart4L/embedded-server | cf10ebb1ccc3002b45c20315ec521eaf1a9e7f10 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Enabe I2C
# sudo apt-get update
# sudo apt-get install -y git python3-smbus i2c-tools
# pip3 install smbus2
# i2cdetect -y 1 # expect 0x0d
import smbus2
from time import sleep
import math
C_REG_A = 0x09 # Address of Configuration register A
C_REG_B = 0x0a # Address of configuration register B
SR_period_REG = 0x0b # Address of SER/RESET register
MODE_STBY = 0x00 # standby mode
MODE_CONT = 0x01 # continous mode
ODR_10Hz = 0x00 # output data rate 10Hz
ODR_50Hz = 0x01 # output data rate 50Hz
ODR_100Hz = 0x10 # output data rate 100Hz
ODR_200Hz = 0x11 # output data rate 200Hz
SENS_2G = 0x00 # magnetic field sensitivity 2G
SENS_8G = 0x01 # magnetic field sensitivity 8G
OSR_512 = 0x00 # oversampling rate 512
OSR_256 = 0x01 # oversampling rate 256
OSR_128 = 0x10 # oversampling rate 128
OSR_64 = 0x11 # oversampling rate 64
X_axis_H = 0x00 # Address of X-axis MSB data register
Z_axis_H = 0x02 # Address of Z-axis MSB data register
Y_axis_H = 0x04 # Address of Y-axis MSB data register
TEMP_REG = 0x07 # Address of Temperature MSB data register
# declination angle of location where measurement going to be done
CURR_DECL = -0.00669 # determine by yourself
pi = 3.14159265359 # define pi value
class GY271():
def __init__(self, id=None, address=0x0d) -> None:
self.id_sensor = id
self.sensor = Compass(address=address)
def measure(self) -> dict:
bearing = self.sensor.get_bearing()
temp = self.sensor.read_temp()
angles = self.sensor.get_angles()
#temp = "{:.1f}"%temp
return { "value": {"temperature": temp, "angles" : angles, "bearing" : bearing} }
def stop(self) -> None:
pass
def __str__(self):
return f'Sensor:{self.id_sensor}'
def __repr__(self):
return str(self)
class Compass():
def __init__(self, address=0x0d, mode=MODE_CONT, odr=ODR_10Hz, sens=SENS_2G, osr=OSR_512, d=CURR_DECL):
self.bus = smbus2.SMBus(1)
self.device_address = address # magnetometer device i2c address
self._declination = d
self.magnetometer_init(mode, odr, sens, osr)
sleep(2)
def soft_reset(self):
self.bus.write_byte_data(self.device_address, C_REG_B, 0x80)
def __set_mode(self, mode, odr, sens, osr):
value = mode | odr | sens | osr
return value
def magnetometer_init(self, mode, odr, sens, osr):
self.soft_reset()
self._mode = self.__set_mode(mode, odr, sens, osr)
# Write to Configuration Register B: normal 0x00, soft_reset: 0x80
self.bus.write_byte_data(self.device_address, C_REG_B, 0x00)
# SET/RESET period set to 0x01 (recommendation from datasheet)
self.bus.write_byte_data(self.device_address, SR_period_REG, 0x01)
# write to Configuration Register A: mode
self.bus.write_byte_data(self.device_address, C_REG_A, self._mode)
def __read_raw_data(self, reg_address):
# Read raw 16-bit value
low_byte = self.bus.read_byte_data(self.device_address, reg_address)
high_byte = self.bus.read_byte_data(self.device_address, reg_address + 1)
# concatenate high_byte and low_byte into two_byte data
value = (high_byte << 8) | low_byte
if value > 32767:
value = value - 65536
return value
def get_angles(self):
# Read Accelerometer raw value
x = self.__read_raw_data(X_axis_H)
z = self.__read_raw_data(Z_axis_H)
y = self.__read_raw_data(Y_axis_H)
return {"X":x, "Y":y, "Z": z}
def get_bearing(self):
# Read Accelerometer raw value
x = self.__read_raw_data(X_axis_H)
z = self.__read_raw_data(Z_axis_H)
y = self.__read_raw_data(Y_axis_H)
heading = math.atan2(y, x) + self._declination
# due to declination check for >360 degree
if(heading > 2.0 * pi):
heading = heading - 2.0 * pi
# check for sign
if(heading < 0.0):
heading = heading + 2.0 * pi
# convert into angle
heading_angle = int(heading * 180.0 / pi)
return heading_angle
def read_temp(self):
low_byte = self.bus.read_byte_data(self.device_address, TEMP_REG)
high_byte = self.bus.read_byte_data(self.device_address, TEMP_REG + 1)
# concatenate higher and lower value
value = (high_byte << 8) | low_byte # signed int (-32766 : 32767)
value = value & 0x3fff # to get only positive numbers (first bit, sign bit)
value = value / 520.0 # around: 125 (temp range) times 100 LSB/*C ~ 520
return value
def set_declination(self, value):
self._declination = value
if __name__ == '__main__':
sensor = GY271("GY271")
try:
while True:
print(sensor.measure()["value"]["bearing"])
sleep(1)
except Exception as e:
print(e)
| 29.910828 | 105 | 0.689097 |
s2
from time import sleep
import math
C_REG_A = 0x09
C_REG_B = 0x0a
SR_period_REG = 0x0b
MODE_STBY = 0x00
MODE_CONT = 0x01
ODR_10Hz = 0x00
ODR_50Hz = 0x01
ODR_100Hz = 0x10
ODR_200Hz = 0x11
SENS_2G = 0x00
SENS_8G = 0x01
OSR_512 = 0x00
OSR_256 = 0x01
OSR_128 = 0x10
OSR_64 = 0x11
X_axis_H = 0x00
Z_axis_H = 0x02
Y_axis_H = 0x04
TEMP_REG = 0x07
CURR_DECL = -0.00669
pi = 3.14159265359
class GY271():
def __init__(self, id=None, address=0x0d) -> None:
self.id_sensor = id
self.sensor = Compass(address=address)
def measure(self) -> dict:
bearing = self.sensor.get_bearing()
temp = self.sensor.read_temp()
angles = self.sensor.get_angles()
return { "value": {"temperature": temp, "angles" : angles, "bearing" : bearing} }
def stop(self) -> None:
pass
def __str__(self):
return f'Sensor:{self.id_sensor}'
def __repr__(self):
return str(self)
class Compass():
def __init__(self, address=0x0d, mode=MODE_CONT, odr=ODR_10Hz, sens=SENS_2G, osr=OSR_512, d=CURR_DECL):
self.bus = smbus2.SMBus(1)
self.device_address = address
self._declination = d
self.magnetometer_init(mode, odr, sens, osr)
sleep(2)
def soft_reset(self):
self.bus.write_byte_data(self.device_address, C_REG_B, 0x80)
def __set_mode(self, mode, odr, sens, osr):
value = mode | odr | sens | osr
return value
def magnetometer_init(self, mode, odr, sens, osr):
self.soft_reset()
self._mode = self.__set_mode(mode, odr, sens, osr)
self.bus.write_byte_data(self.device_address, C_REG_B, 0x00)
self.bus.write_byte_data(self.device_address, SR_period_REG, 0x01)
self.bus.write_byte_data(self.device_address, C_REG_A, self._mode)
def __read_raw_data(self, reg_address):
low_byte = self.bus.read_byte_data(self.device_address, reg_address)
high_byte = self.bus.read_byte_data(self.device_address, reg_address + 1)
value = (high_byte << 8) | low_byte
if value > 32767:
value = value - 65536
return value
def get_angles(self):
x = self.__read_raw_data(X_axis_H)
z = self.__read_raw_data(Z_axis_H)
y = self.__read_raw_data(Y_axis_H)
return {"X":x, "Y":y, "Z": z}
def get_bearing(self):
x = self.__read_raw_data(X_axis_H)
z = self.__read_raw_data(Z_axis_H)
y = self.__read_raw_data(Y_axis_H)
heading = math.atan2(y, x) + self._declination
if(heading > 2.0 * pi):
heading = heading - 2.0 * pi
if(heading < 0.0):
heading = heading + 2.0 * pi
heading_angle = int(heading * 180.0 / pi)
return heading_angle
def read_temp(self):
low_byte = self.bus.read_byte_data(self.device_address, TEMP_REG)
high_byte = self.bus.read_byte_data(self.device_address, TEMP_REG + 1)
value = (high_byte << 8) | low_byte
value = value & 0x3fff
value = value / 520.0
return value
def set_declination(self, value):
self._declination = value
if __name__ == '__main__':
sensor = GY271("GY271")
try:
while True:
print(sensor.measure()["value"]["bearing"])
sleep(1)
except Exception as e:
print(e)
| true | true |
f737318c397372cc579cd36ff1c6f24edbe46b51 | 44,261 | py | Python | lib/network.py | BTCPrime/electrum | 62f266921bce1622976fb73f277e5254a7e914e8 | [
"MIT"
] | null | null | null | lib/network.py | BTCPrime/electrum | 62f266921bce1622976fb73f277e5254a7e914e8 | [
"MIT"
] | 2 | 2018-06-06T08:28:58.000Z | 2018-08-17T02:08:43.000Z | lib/network.py | BTCPrime/electrum | 62f266921bce1622976fb73f277e5254a7e914e8 | [
"MIT"
] | 5 | 2018-05-24T21:57:12.000Z | 2018-11-19T10:38:19.000Z | # Electrum - Lightweight Bitcoin Client
# Copyright (c) 2011-2016 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import time
import queue
import os
import stat
import errno
import random
import re
import select
from collections import defaultdict
import threading
import socket
import json
import socks
from . import util
from . import bitcoin
from .bitcoin import *
from . import constants
from .interface import Connection, Interface
from . import blockchain
from .version import ELECTRUM_VERSION, PROTOCOL_VERSION
from .i18n import _
NODES_RETRY_INTERVAL = 60
SERVER_RETRY_INTERVAL = 10
def parse_servers(result):
""" parse servers list into dict format"""
from .version import PROTOCOL_VERSION
servers = {}
for item in result:
host = item[1]
out = {}
version = None
pruning_level = '-'
if len(item) > 2:
for v in item[2]:
if re.match("[st]\d*", v):
protocol, port = v[0], v[1:]
if port == '': port = constants.net.DEFAULT_PORTS[protocol]
out[protocol] = port
elif re.match("v(.?)+", v):
version = v[1:]
elif re.match("p\d*", v):
pruning_level = v[1:]
if pruning_level == '': pruning_level = '0'
if out:
out['pruning'] = pruning_level
out['version'] = version
servers[host] = out
return servers
def filter_version(servers):
def is_recent(version):
try:
return util.normalize_version(version) >= util.normalize_version(PROTOCOL_VERSION)
except Exception as e:
return False
return {k: v for k, v in servers.items() if is_recent(v.get('version'))}
def filter_protocol(hostmap, protocol = 's'):
'''Filters the hostmap for those implementing protocol.
The result is a list in serialized form.'''
eligible = []
for host, portmap in hostmap.items():
port = portmap.get(protocol)
if port:
eligible.append(serialize_server(host, port, protocol))
return eligible
def pick_random_server(hostmap = None, protocol = 's', exclude_set = set()):
if hostmap is None:
hostmap = constants.net.DEFAULT_SERVERS
eligible = list(set(filter_protocol(hostmap, protocol)) - exclude_set)
return random.choice(eligible) if eligible else None
from .simple_config import SimpleConfig
proxy_modes = ['socks4', 'socks5', 'http']
def serialize_proxy(p):
if not isinstance(p, dict):
return None
return ':'.join([p.get('mode'), p.get('host'), p.get('port'),
p.get('user', ''), p.get('password', '')])
def deserialize_proxy(s):
if not isinstance(s, str):
return None
if s.lower() == 'none':
return None
proxy = { "mode":"socks5", "host":"localhost" }
args = s.split(':')
n = 0
if proxy_modes.count(args[n]) == 1:
proxy["mode"] = args[n]
n += 1
if len(args) > n:
proxy["host"] = args[n]
n += 1
if len(args) > n:
proxy["port"] = args[n]
n += 1
else:
proxy["port"] = "8080" if proxy["mode"] == "http" else "1080"
if len(args) > n:
proxy["user"] = args[n]
n += 1
if len(args) > n:
proxy["password"] = args[n]
return proxy
def deserialize_server(server_str):
host, port, protocol = str(server_str).rsplit(':', 2)
if protocol not in 'st':
raise ValueError('invalid network protocol: {}'.format(protocol))
int(port) # Throw if cannot be converted to int
return host, port, protocol
def serialize_server(host, port, protocol):
return str(':'.join([host, port, protocol]))
class Network(util.DaemonThread):
"""The Network class manages a set of connections to remote electrum
servers, each connected socket is handled by an Interface() object.
Connections are initiated by a Connection() thread which stops once
the connection succeeds or fails.
Our external API:
- Member functions get_header(), get_interfaces(), get_local_height(),
get_parameters(), get_server_height(), get_status_value(),
is_connected(), set_parameters(), stop()
"""
def __init__(self, config=None):
if config is None:
config = {} # Do not use mutables as default values!
util.DaemonThread.__init__(self)
self.config = SimpleConfig(config) if isinstance(config, dict) else config
self.num_server = 10 if not self.config.get('oneserver') else 0
self.blockchains = blockchain.read_blockchains(self.config)
self.print_error("blockchains", self.blockchains.keys())
self.blockchain_index = config.get('blockchain_index', 0)
if self.blockchain_index not in self.blockchains.keys():
self.blockchain_index = 0
# Server for addresses and transactions
self.default_server = self.config.get('server', None)
# Sanitize default server
if self.default_server:
try:
deserialize_server(self.default_server)
except:
self.print_error('Warning: failed to parse server-string; falling back to random.')
self.default_server = None
if not self.default_server:
self.default_server = pick_random_server()
self.lock = threading.Lock()
self.pending_sends = []
self.message_id = 0
self.debug = False
self.irc_servers = {} # returned by interface (list from irc)
self.recent_servers = self.read_recent_servers()
self.banner = ''
self.donation_address = ''
self.relay_fee = None
# callbacks passed with subscriptions
self.subscriptions = defaultdict(list)
self.sub_cache = {}
# callbacks set by the GUI
self.callbacks = defaultdict(list)
dir_path = os.path.join( self.config.path, 'certs')
if not os.path.exists(dir_path):
os.mkdir(dir_path)
os.chmod(dir_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
# subscriptions and requests
self.subscribed_addresses = set()
self.h2addr = {}
# Requests from client we've not seen a response to
self.unanswered_requests = {}
# retry times
self.server_retry_time = time.time()
self.nodes_retry_time = time.time()
# kick off the network. interface is the main server we are currently
# communicating with. interfaces is the set of servers we are connecting
# to or have an ongoing connection with
self.interface = None
self.interfaces = {}
self.auto_connect = self.config.get('auto_connect', True)
self.connecting = set()
self.requested_chunks = set()
self.socket_queue = queue.Queue()
self.start_network(deserialize_server(self.default_server)[2],
deserialize_proxy(self.config.get('proxy')))
def register_callback(self, callback, events):
with self.lock:
for event in events:
self.callbacks[event].append(callback)
def unregister_callback(self, callback):
with self.lock:
for callbacks in self.callbacks.values():
if callback in callbacks:
callbacks.remove(callback)
def trigger_callback(self, event, *args):
with self.lock:
callbacks = self.callbacks[event][:]
[callback(event, *args) for callback in callbacks]
def read_recent_servers(self):
if not self.config.path:
return []
path = os.path.join(self.config.path, "recent_servers")
try:
with open(path, "r", encoding='utf-8') as f:
data = f.read()
return json.loads(data)
except:
return []
def save_recent_servers(self):
if not self.config.path:
return
path = os.path.join(self.config.path, "recent_servers")
s = json.dumps(self.recent_servers, indent=4, sort_keys=True)
try:
with open(path, "w", encoding='utf-8') as f:
f.write(s)
except:
pass
def get_server_height(self):
return self.interface.tip if self.interface else 0
def server_is_lagging(self):
sh = self.get_server_height()
if not sh:
self.print_error('no height for main interface')
return True
lh = self.get_local_height()
result = (lh - sh) > 1
if result:
self.print_error('%s is lagging (%d vs %d)' % (self.default_server, sh, lh))
return result
def set_status(self, status):
self.connection_status = status
self.notify('status')
def is_connected(self):
return self.interface is not None
def is_connecting(self):
return self.connection_status == 'connecting'
def is_up_to_date(self):
return self.unanswered_requests == {}
def queue_request(self, method, params, interface=None):
# If you want to queue a request on any interface it must go
# through this function so message ids are properly tracked
if interface is None:
interface = self.interface
message_id = self.message_id
self.message_id += 1
if self.debug:
self.print_error(interface.host, "-->", method, params, message_id)
interface.queue_request(method, params, message_id)
return message_id
def send_subscriptions(self):
self.print_error('sending subscriptions to', self.interface.server, len(self.unanswered_requests), len(self.subscribed_addresses))
self.sub_cache.clear()
# Resend unanswered requests
requests = self.unanswered_requests.values()
self.unanswered_requests = {}
if self.interface.ping_required():
params = [ELECTRUM_VERSION, PROTOCOL_VERSION]
self.queue_request('server.version', params, self.interface)
for request in requests:
message_id = self.queue_request(request[0], request[1])
self.unanswered_requests[message_id] = request
self.queue_request('server.banner', [])
self.queue_request('server.donation_address', [])
self.queue_request('server.peers.subscribe', [])
self.request_fee_estimates()
self.queue_request('blockchain.relayfee', [])
for h in list(self.subscribed_addresses):
self.queue_request('blockchain.scripthash.subscribe', [h])
def request_fee_estimates(self):
from .simple_config import FEE_ETA_TARGETS
self.config.requested_fee_estimates()
self.queue_request('mempool.get_fee_histogram', [])
for i in FEE_ETA_TARGETS:
self.queue_request('blockchain.estimatefee', [i])
def get_status_value(self, key):
if key == 'status':
value = self.connection_status
elif key == 'banner':
value = self.banner
elif key == 'fee':
value = self.config.fee_estimates
elif key == 'fee_histogram':
value = self.config.mempool_fees
elif key == 'updated':
value = (self.get_local_height(), self.get_server_height())
elif key == 'servers':
value = self.get_servers()
elif key == 'interfaces':
value = self.get_interfaces()
return value
def notify(self, key):
if key in ['status', 'updated']:
self.trigger_callback(key)
else:
self.trigger_callback(key, self.get_status_value(key))
def get_parameters(self):
host, port, protocol = deserialize_server(self.default_server)
return host, port, protocol, self.proxy, self.auto_connect
def get_donation_address(self):
if self.is_connected():
return self.donation_address
def get_interfaces(self):
'''The interfaces that are in connected state'''
return list(self.interfaces.keys())
def get_servers(self):
out = constants.net.DEFAULT_SERVERS
if self.irc_servers:
out.update(filter_version(self.irc_servers.copy()))
else:
for s in self.recent_servers:
try:
host, port, protocol = deserialize_server(s)
except:
continue
if host not in out:
out[host] = { protocol:port }
return out
def start_interface(self, server):
if (not server in self.interfaces and not server in self.connecting):
if server == self.default_server:
self.print_error("connecting to %s as new interface" % server)
self.set_status('connecting')
self.connecting.add(server)
c = Connection(server, self.socket_queue, self.config.path)
def start_random_interface(self):
exclude_set = self.disconnected_servers.union(set(self.interfaces))
server = pick_random_server(self.get_servers(), self.protocol, exclude_set)
if server:
self.start_interface(server)
def start_interfaces(self):
self.start_interface(self.default_server)
for i in range(self.num_server - 1):
self.start_random_interface()
def set_proxy(self, proxy):
self.proxy = proxy
# Store these somewhere so we can un-monkey-patch
if not hasattr(socket, "_socketobject"):
socket._socketobject = socket.socket
socket._getaddrinfo = socket.getaddrinfo
if proxy:
self.print_error('setting proxy', proxy)
proxy_mode = proxy_modes.index(proxy["mode"]) + 1
socks.setdefaultproxy(proxy_mode,
proxy["host"],
int(proxy["port"]),
# socks.py seems to want either None or a non-empty string
username=(proxy.get("user", "") or None),
password=(proxy.get("password", "") or None))
socket.socket = socks.socksocket
# prevent dns leaks, see http://stackoverflow.com/questions/13184205/dns-over-proxy
socket.getaddrinfo = lambda *args: [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
else:
socket.socket = socket._socketobject
socket.getaddrinfo = socket._getaddrinfo
def start_network(self, protocol, proxy):
assert not self.interface and not self.interfaces
assert not self.connecting and self.socket_queue.empty()
self.print_error('starting network')
self.disconnected_servers = set([])
self.protocol = protocol
self.set_proxy(proxy)
self.start_interfaces()
def stop_network(self):
self.print_error("stopping network")
for interface in list(self.interfaces.values()):
self.close_interface(interface)
if self.interface:
self.close_interface(self.interface)
assert self.interface is None
assert not self.interfaces
self.connecting = set()
# Get a new queue - no old pending connections thanks!
self.socket_queue = queue.Queue()
def set_parameters(self, host, port, protocol, proxy, auto_connect):
proxy_str = serialize_proxy(proxy)
server = serialize_server(host, port, protocol)
# sanitize parameters
try:
deserialize_server(serialize_server(host, port, protocol))
if proxy:
proxy_modes.index(proxy["mode"]) + 1
int(proxy['port'])
except:
return
self.config.set_key('auto_connect', auto_connect, False)
self.config.set_key("proxy", proxy_str, False)
self.config.set_key("server", server, True)
# abort if changes were not allowed by config
if self.config.get('server') != server or self.config.get('proxy') != proxy_str:
return
self.auto_connect = auto_connect
if self.proxy != proxy or self.protocol != protocol:
# Restart the network defaulting to the given server
self.stop_network()
self.default_server = server
self.start_network(protocol, proxy)
elif self.default_server != server:
self.switch_to_interface(server)
else:
self.switch_lagging_interface()
self.notify('updated')
def switch_to_random_interface(self):
'''Switch to a random connected server other than the current one'''
servers = self.get_interfaces() # Those in connected state
if self.default_server in servers:
servers.remove(self.default_server)
if servers:
self.switch_to_interface(random.choice(servers))
def switch_lagging_interface(self):
'''If auto_connect and lagging, switch interface'''
if self.server_is_lagging() and self.auto_connect:
# switch to one that has the correct header (not height)
header = self.blockchain().read_header(self.get_local_height())
filtered = list(map(lambda x:x[0], filter(lambda x: x[1].tip_header==header, self.interfaces.items())))
if filtered:
choice = random.choice(filtered)
self.switch_to_interface(choice)
def switch_to_interface(self, server):
'''Switch to server as our interface. If no connection exists nor
being opened, start a thread to connect. The actual switch will
happen on receipt of the connection notification. Do nothing
if server already is our interface.'''
self.default_server = server
if server not in self.interfaces:
self.interface = None
self.start_interface(server)
return
i = self.interfaces[server]
if self.interface != i:
self.print_error("switching to", server)
# stop any current interface in order to terminate subscriptions
# fixme: we don't want to close headers sub
#self.close_interface(self.interface)
self.interface = i
self.send_subscriptions()
self.set_status('connected')
self.notify('updated')
def close_interface(self, interface):
if interface:
if interface.server in self.interfaces:
self.interfaces.pop(interface.server)
if interface.server == self.default_server:
self.interface = None
interface.close()
def add_recent_server(self, server):
# list is ordered
if server in self.recent_servers:
self.recent_servers.remove(server)
self.recent_servers.insert(0, server)
self.recent_servers = self.recent_servers[0:20]
self.save_recent_servers()
def process_response(self, interface, response, callbacks):
if self.debug:
self.print_error("<--", response)
error = response.get('error')
result = response.get('result')
method = response.get('method')
params = response.get('params')
# We handle some responses; return the rest to the client.
if method == 'server.version':
interface.server_version = result
elif method == 'blockchain.headers.subscribe':
if error is None:
self.on_notify_header(interface, result)
elif method == 'server.peers.subscribe':
if error is None:
self.irc_servers = parse_servers(result)
self.notify('servers')
elif method == 'server.banner':
if error is None:
self.banner = result
self.notify('banner')
elif method == 'server.donation_address':
if error is None:
self.donation_address = result
elif method == 'mempool.get_fee_histogram':
if error is None:
self.print_error('fee_histogram', result)
self.config.mempool_fees = result
self.notify('fee_histogram')
elif method == 'blockchain.estimatefee':
if error is None and result > 0:
i = params[0]
fee = int(result*COIN)
self.config.update_fee_estimates(i, fee)
self.print_error("fee_estimates[%d]" % i, fee)
self.notify('fee')
elif method == 'blockchain.relayfee':
if error is None:
self.relay_fee = int(result * COIN) if result is not None else None
self.print_error("relayfee", self.relay_fee)
elif method == 'blockchain.block.get_chunk':
self.on_get_chunk(interface, response)
elif method == 'blockchain.block.get_header':
self.on_get_header(interface, response)
for callback in callbacks:
callback(response)
def get_index(self, method, params):
""" hashable index for subscriptions and cache"""
return str(method) + (':' + str(params[0]) if params else '')
def process_responses(self, interface):
responses = interface.get_responses()
for request, response in responses:
if request:
method, params, message_id = request
k = self.get_index(method, params)
# client requests go through self.send() with a
# callback, are only sent to the current interface,
# and are placed in the unanswered_requests dictionary
client_req = self.unanswered_requests.pop(message_id, None)
if client_req:
assert interface == self.interface
callbacks = [client_req[2]]
else:
# fixme: will only work for subscriptions
k = self.get_index(method, params)
callbacks = self.subscriptions.get(k, [])
# Copy the request method and params to the response
response['method'] = method
response['params'] = params
# Only once we've received a response to an addr subscription
# add it to the list; avoids double-sends on reconnection
if method == 'blockchain.scripthash.subscribe':
self.subscribed_addresses.add(params[0])
else:
if not response: # Closed remotely / misbehaving
self.connection_down(interface.server)
break
# Rewrite response shape to match subscription request response
method = response.get('method')
params = response.get('params')
k = self.get_index(method, params)
if method == 'blockchain.headers.subscribe':
response['result'] = params[0]
response['params'] = []
elif method == 'blockchain.scripthash.subscribe':
response['params'] = [params[0]] # addr
response['result'] = params[1]
callbacks = self.subscriptions.get(k, [])
# update cache if it's a subscription
if method.endswith('.subscribe'):
self.sub_cache[k] = response
# Response is now in canonical form
self.process_response(interface, response, callbacks)
def addr_to_scripthash(self, addr):
h = bitcoin.address_to_scripthash(addr)
if h not in self.h2addr:
self.h2addr[h] = addr
return h
def overload_cb(self, callback):
def cb2(x):
x2 = x.copy()
p = x2.pop('params')
addr = self.h2addr[p[0]]
x2['params'] = [addr]
callback(x2)
return cb2
def subscribe_to_addresses(self, addresses, callback):
hashes = [self.addr_to_scripthash(addr) for addr in addresses]
msgs = [('blockchain.scripthash.subscribe', [x]) for x in hashes]
self.send(msgs, self.overload_cb(callback))
def request_address_history(self, address, callback):
h = self.addr_to_scripthash(address)
self.send([('blockchain.scripthash.get_history', [h])], self.overload_cb(callback))
def send(self, messages, callback):
'''Messages is a list of (method, params) tuples'''
messages = list(messages)
with self.lock:
self.pending_sends.append((messages, callback))
def process_pending_sends(self):
# Requests needs connectivity. If we don't have an interface,
# we cannot process them.
if not self.interface:
return
with self.lock:
sends = self.pending_sends
self.pending_sends = []
for messages, callback in sends:
for method, params in messages:
r = None
if method.endswith('.subscribe'):
k = self.get_index(method, params)
# add callback to list
l = self.subscriptions.get(k, [])
if callback not in l:
l.append(callback)
self.subscriptions[k] = l
# check cached response for subscriptions
r = self.sub_cache.get(k)
if r is not None:
self.print_error("cache hit", k)
callback(r)
else:
message_id = self.queue_request(method, params)
self.unanswered_requests[message_id] = method, params, callback
def unsubscribe(self, callback):
'''Unsubscribe a callback to free object references to enable GC.'''
# Note: we can't unsubscribe from the server, so if we receive
# subsequent notifications process_response() will emit a harmless
# "received unexpected notification" warning
with self.lock:
for v in self.subscriptions.values():
if callback in v:
v.remove(callback)
def connection_down(self, server):
'''A connection to server either went down, or was never made.
We distinguish by whether it is in self.interfaces.'''
self.disconnected_servers.add(server)
if server == self.default_server:
self.set_status('disconnected')
if server in self.interfaces:
self.close_interface(self.interfaces[server])
self.notify('interfaces')
for b in self.blockchains.values():
if b.catch_up == server:
b.catch_up = None
def new_interface(self, server, socket):
# todo: get tip first, then decide which checkpoint to use.
self.add_recent_server(server)
interface = Interface(server, socket)
interface.blockchain = None
interface.tip_header = None
interface.tip = 0
interface.mode = 'default'
interface.request = None
self.interfaces[server] = interface
self.queue_request('blockchain.headers.subscribe', [], interface)
if server == self.default_server:
self.switch_to_interface(server)
#self.notify('interfaces')
def maintain_sockets(self):
'''Socket maintenance.'''
# Responses to connection attempts?
while not self.socket_queue.empty():
server, socket = self.socket_queue.get()
if server in self.connecting:
self.connecting.remove(server)
if socket:
self.new_interface(server, socket)
else:
self.connection_down(server)
# Send pings and shut down stale interfaces
# must use copy of values
for interface in list(self.interfaces.values()):
if interface.has_timed_out():
self.connection_down(interface.server)
elif interface.ping_required():
params = [ELECTRUM_VERSION, PROTOCOL_VERSION]
self.queue_request('server.version', params, interface)
now = time.time()
# nodes
if len(self.interfaces) + len(self.connecting) < self.num_server:
self.start_random_interface()
if now - self.nodes_retry_time > NODES_RETRY_INTERVAL:
self.print_error('network: retrying connections')
self.disconnected_servers = set([])
self.nodes_retry_time = now
# main interface
if not self.is_connected():
if self.auto_connect:
if not self.is_connecting():
self.switch_to_random_interface()
else:
if self.default_server in self.disconnected_servers:
if now - self.server_retry_time > SERVER_RETRY_INTERVAL:
self.disconnected_servers.remove(self.default_server)
self.server_retry_time = now
else:
self.switch_to_interface(self.default_server)
else:
if self.config.is_fee_estimates_update_required():
self.request_fee_estimates()
def request_chunk(self, interface, index):
if index in self.requested_chunks:
return
interface.print_error("requesting chunk %d" % index)
self.requested_chunks.add(index)
self.queue_request('blockchain.block.get_chunk', [index], interface)
def on_get_chunk(self, interface, response):
'''Handle receiving a chunk of block headers'''
error = response.get('error')
result = response.get('result')
params = response.get('params')
blockchain = interface.blockchain
if result is None or params is None or error is not None:
interface.print_error(error or 'bad response')
return
index = params[0]
# Ignore unsolicited chunks
if index not in self.requested_chunks:
interface.print_error("received chunk %d (unsolicited)" % index)
return
else:
interface.print_error("received chunk %d" % index)
self.requested_chunks.remove(index)
connect = blockchain.connect_chunk(index, result)
if not connect:
self.connection_down(interface.server)
return
# If not finished, get the next chunk
if index >= len(blockchain.checkpoints) and blockchain.height() < interface.tip:
self.request_chunk(interface, index+1)
else:
interface.mode = 'default'
interface.print_error('catch up done', blockchain.height())
blockchain.catch_up = None
self.notify('updated')
def request_header(self, interface, height):
#interface.print_error("requesting header %d" % height)
self.queue_request('blockchain.block.get_header', [height], interface)
interface.request = height
interface.req_time = time.time()
def on_get_header(self, interface, response):
'''Handle receiving a single block header'''
header = response.get('result')
if not header:
interface.print_error(response)
self.connection_down(interface.server)
return
height = header.get('block_height')
if interface.request != height:
interface.print_error("unsolicited header",interface.request, height)
self.connection_down(interface.server)
return
chain = blockchain.check_header(header)
if interface.mode == 'backward':
can_connect = blockchain.can_connect(header)
if can_connect and can_connect.catch_up is None:
interface.mode = 'catch_up'
interface.blockchain = can_connect
interface.blockchain.save_header(header)
next_height = height + 1
interface.blockchain.catch_up = interface.server
elif chain:
interface.print_error("binary search")
interface.mode = 'binary'
interface.blockchain = chain
interface.good = height
next_height = (interface.bad + interface.good) // 2
assert next_height >= self.max_checkpoint(), (interface.bad, interface.good)
else:
if height == 0:
self.connection_down(interface.server)
next_height = None
else:
interface.bad = height
interface.bad_header = header
delta = interface.tip - height
next_height = max(self.max_checkpoint(), interface.tip - 2 * delta)
elif interface.mode == 'binary':
if chain:
interface.good = height
interface.blockchain = chain
else:
interface.bad = height
interface.bad_header = header
if interface.bad != interface.good + 1:
next_height = (interface.bad + interface.good) // 2
assert next_height >= self.max_checkpoint()
elif not interface.blockchain.can_connect(interface.bad_header, check_height=False):
self.connection_down(interface.server)
next_height = None
else:
branch = self.blockchains.get(interface.bad)
if branch is not None:
if branch.check_header(interface.bad_header):
interface.print_error('joining chain', interface.bad)
next_height = None
elif branch.parent().check_header(header):
interface.print_error('reorg', interface.bad, interface.tip)
interface.blockchain = branch.parent()
next_height = None
else:
interface.print_error('checkpoint conflicts with existing fork', branch.path())
branch.write('', 0)
branch.save_header(interface.bad_header)
interface.mode = 'catch_up'
interface.blockchain = branch
next_height = interface.bad + 1
interface.blockchain.catch_up = interface.server
else:
bh = interface.blockchain.height()
next_height = None
if bh > interface.good:
if not interface.blockchain.check_header(interface.bad_header):
b = interface.blockchain.fork(interface.bad_header)
self.blockchains[interface.bad] = b
interface.blockchain = b
interface.print_error("new chain", b.checkpoint)
interface.mode = 'catch_up'
next_height = interface.bad + 1
interface.blockchain.catch_up = interface.server
else:
assert bh == interface.good
if interface.blockchain.catch_up is None and bh < interface.tip:
interface.print_error("catching up from %d"% (bh + 1))
interface.mode = 'catch_up'
next_height = bh + 1
interface.blockchain.catch_up = interface.server
self.notify('updated')
elif interface.mode == 'catch_up':
can_connect = interface.blockchain.can_connect(header)
if can_connect:
interface.blockchain.save_header(header)
next_height = height + 1 if height < interface.tip else None
else:
# go back
interface.print_error("cannot connect", height)
interface.mode = 'backward'
interface.bad = height
interface.bad_header = header
next_height = height - 1
if next_height is None:
# exit catch_up state
interface.print_error('catch up done', interface.blockchain.height())
interface.blockchain.catch_up = None
self.switch_lagging_interface()
self.notify('updated')
else:
raise Exception(interface.mode)
# If not finished, get the next header
if next_height:
if interface.mode == 'catch_up' and interface.tip > next_height + 50:
self.request_chunk(interface, next_height // constants.CHUNK_LENGTH)
else:
self.request_header(interface, next_height)
else:
interface.mode = 'default'
interface.request = None
self.notify('updated')
# refresh network dialog
self.notify('interfaces')
def maintain_requests(self):
for interface in list(self.interfaces.values()):
if interface.request and time.time() - interface.request_time > 20:
interface.print_error("blockchain request timed out")
self.connection_down(interface.server)
continue
def wait_on_sockets(self):
# Python docs say Windows doesn't like empty selects.
# Sleep to prevent busy looping
if not self.interfaces:
time.sleep(0.1)
return
rin = [i for i in self.interfaces.values()]
win = [i for i in self.interfaces.values() if i.num_requests()]
try:
rout, wout, xout = select.select(rin, win, [], 0.1)
except socket.error as e:
# TODO: py3, get code from e
code = None
if code == errno.EINTR:
return
raise
assert not xout
for interface in wout:
interface.send_requests()
for interface in rout:
self.process_responses(interface)
def init_headers_file(self):
b = self.blockchains[0]
filename = b.path()
length = constants.HEADER_BYTES * len(constants.net.CHECKPOINTS) * constants.CHUNK_LENGTH
if not os.path.exists(filename) or os.path.getsize(filename) < length:
with open(filename, 'wb') as f:
if length>0:
f.seek(length-1)
f.write(b'\x00')
with b.lock:
b.update_size()
def run(self):
self.init_headers_file()
while self.is_running():
self.maintain_sockets()
self.wait_on_sockets()
self.maintain_requests()
self.run_jobs() # Synchronizer and Verifier
self.process_pending_sends()
self.stop_network()
self.on_stop()
def on_notify_header(self, interface, header):
height = header.get('block_height')
if not height:
return
if height < self.max_checkpoint():
self.connection_down(interface.server)
return
interface.tip_header = header
interface.tip = height
if interface.mode != 'default':
return
b = blockchain.check_header(header)
if b:
interface.blockchain = b
self.switch_lagging_interface()
self.notify('updated')
self.notify('interfaces')
return
b = blockchain.can_connect(header)
if b:
interface.blockchain = b
b.save_header(header)
self.switch_lagging_interface()
self.notify('updated')
self.notify('interfaces')
return
tip = max([x.height() for x in self.blockchains.values()])
if tip >=0:
interface.mode = 'backward'
interface.bad = height
interface.bad_header = header
self.request_header(interface, min(tip +1, height - 1))
else:
chain = self.blockchains[0]
if chain.catch_up is None:
chain.catch_up = interface
interface.mode = 'catch_up'
interface.blockchain = chain
self.print_error("switching to catchup mode", tip, self.blockchains)
self.request_header(interface, 0)
else:
self.print_error("chain already catching up with", chain.catch_up.server)
def blockchain(self):
if self.interface and self.interface.blockchain is not None:
self.blockchain_index = self.interface.blockchain.checkpoint
return self.blockchains[self.blockchain_index]
def get_blockchains(self):
out = {}
for k, b in self.blockchains.items():
r = list(filter(lambda i: i.blockchain==b, list(self.interfaces.values())))
if r:
out[k] = r
return out
def follow_chain(self, index):
blockchain = self.blockchains.get(index)
if blockchain:
self.blockchain_index = index
self.config.set_key('blockchain_index', index)
for i in self.interfaces.values():
if i.blockchain == blockchain:
self.switch_to_interface(i.server)
break
else:
raise Exception('blockchain not found', index)
if self.interface:
server = self.interface.server
host, port, protocol, proxy, auto_connect = self.get_parameters()
host, port, protocol = server.split(':')
self.set_parameters(host, port, protocol, proxy, auto_connect)
def get_local_height(self):
return self.blockchain().height()
def synchronous_get(self, request, timeout=30):
q = queue.Queue()
self.send([request], q.put)
try:
r = q.get(True, timeout)
except queue.Empty:
raise util.TimeoutException(_('Server did not answer'))
if r.get('error'):
raise Exception(r.get('error'))
return r.get('result')
def broadcast(self, tx, timeout=30):
tx_hash = tx.txid()
try:
out = self.synchronous_get(('blockchain.transaction.broadcast', [str(tx)]), timeout)
except BaseException as e:
return False, "error: " + str(e)
if out != tx_hash:
return False, "error: " + out
return True, out
def export_checkpoints(self, path):
# run manually from the console to generate checkpoints
cp = self.blockchain().get_checkpoints()
with open(path, 'w', encoding='utf-8') as f:
f.write(json.dumps(cp, indent=4))
def max_checkpoint(self):
return max(0, len(constants.net.CHECKPOINTS) * constants.CHUNK_LENGTH - 1)
| 40.237273 | 138 | 0.588938 |
import time
import queue
import os
import stat
import errno
import random
import re
import select
from collections import defaultdict
import threading
import socket
import json
import socks
from . import util
from . import bitcoin
from .bitcoin import *
from . import constants
from .interface import Connection, Interface
from . import blockchain
from .version import ELECTRUM_VERSION, PROTOCOL_VERSION
from .i18n import _
NODES_RETRY_INTERVAL = 60
SERVER_RETRY_INTERVAL = 10
def parse_servers(result):
from .version import PROTOCOL_VERSION
servers = {}
for item in result:
host = item[1]
out = {}
version = None
pruning_level = '-'
if len(item) > 2:
for v in item[2]:
if re.match("[st]\d*", v):
protocol, port = v[0], v[1:]
if port == '': port = constants.net.DEFAULT_PORTS[protocol]
out[protocol] = port
elif re.match("v(.?)+", v):
version = v[1:]
elif re.match("p\d*", v):
pruning_level = v[1:]
if pruning_level == '': pruning_level = '0'
if out:
out['pruning'] = pruning_level
out['version'] = version
servers[host] = out
return servers
def filter_version(servers):
def is_recent(version):
try:
return util.normalize_version(version) >= util.normalize_version(PROTOCOL_VERSION)
except Exception as e:
return False
return {k: v for k, v in servers.items() if is_recent(v.get('version'))}
def filter_protocol(hostmap, protocol = 's'):
eligible = []
for host, portmap in hostmap.items():
port = portmap.get(protocol)
if port:
eligible.append(serialize_server(host, port, protocol))
return eligible
def pick_random_server(hostmap = None, protocol = 's', exclude_set = set()):
if hostmap is None:
hostmap = constants.net.DEFAULT_SERVERS
eligible = list(set(filter_protocol(hostmap, protocol)) - exclude_set)
return random.choice(eligible) if eligible else None
from .simple_config import SimpleConfig
proxy_modes = ['socks4', 'socks5', 'http']
def serialize_proxy(p):
if not isinstance(p, dict):
return None
return ':'.join([p.get('mode'), p.get('host'), p.get('port'),
p.get('user', ''), p.get('password', '')])
def deserialize_proxy(s):
if not isinstance(s, str):
return None
if s.lower() == 'none':
return None
proxy = { "mode":"socks5", "host":"localhost" }
args = s.split(':')
n = 0
if proxy_modes.count(args[n]) == 1:
proxy["mode"] = args[n]
n += 1
if len(args) > n:
proxy["host"] = args[n]
n += 1
if len(args) > n:
proxy["port"] = args[n]
n += 1
else:
proxy["port"] = "8080" if proxy["mode"] == "http" else "1080"
if len(args) > n:
proxy["user"] = args[n]
n += 1
if len(args) > n:
proxy["password"] = args[n]
return proxy
def deserialize_server(server_str):
host, port, protocol = str(server_str).rsplit(':', 2)
if protocol not in 'st':
raise ValueError('invalid network protocol: {}'.format(protocol))
int(port)
return host, port, protocol
def serialize_server(host, port, protocol):
return str(':'.join([host, port, protocol]))
class Network(util.DaemonThread):
def __init__(self, config=None):
if config is None:
config = {}
util.DaemonThread.__init__(self)
self.config = SimpleConfig(config) if isinstance(config, dict) else config
self.num_server = 10 if not self.config.get('oneserver') else 0
self.blockchains = blockchain.read_blockchains(self.config)
self.print_error("blockchains", self.blockchains.keys())
self.blockchain_index = config.get('blockchain_index', 0)
if self.blockchain_index not in self.blockchains.keys():
self.blockchain_index = 0
self.default_server = self.config.get('server', None)
if self.default_server:
try:
deserialize_server(self.default_server)
except:
self.print_error('Warning: failed to parse server-string; falling back to random.')
self.default_server = None
if not self.default_server:
self.default_server = pick_random_server()
self.lock = threading.Lock()
self.pending_sends = []
self.message_id = 0
self.debug = False
self.irc_servers = {}
self.recent_servers = self.read_recent_servers()
self.banner = ''
self.donation_address = ''
self.relay_fee = None
self.subscriptions = defaultdict(list)
self.sub_cache = {}
self.callbacks = defaultdict(list)
dir_path = os.path.join( self.config.path, 'certs')
if not os.path.exists(dir_path):
os.mkdir(dir_path)
os.chmod(dir_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
self.subscribed_addresses = set()
self.h2addr = {}
self.unanswered_requests = {}
# retry times
self.server_retry_time = time.time()
self.nodes_retry_time = time.time()
# kick off the network. interface is the main server we are currently
# communicating with. interfaces is the set of servers we are connecting
# to or have an ongoing connection with
self.interface = None
self.interfaces = {}
self.auto_connect = self.config.get('auto_connect', True)
self.connecting = set()
self.requested_chunks = set()
self.socket_queue = queue.Queue()
self.start_network(deserialize_server(self.default_server)[2],
deserialize_proxy(self.config.get('proxy')))
def register_callback(self, callback, events):
with self.lock:
for event in events:
self.callbacks[event].append(callback)
def unregister_callback(self, callback):
with self.lock:
for callbacks in self.callbacks.values():
if callback in callbacks:
callbacks.remove(callback)
def trigger_callback(self, event, *args):
with self.lock:
callbacks = self.callbacks[event][:]
[callback(event, *args) for callback in callbacks]
def read_recent_servers(self):
if not self.config.path:
return []
path = os.path.join(self.config.path, "recent_servers")
try:
with open(path, "r", encoding='utf-8') as f:
data = f.read()
return json.loads(data)
except:
return []
def save_recent_servers(self):
if not self.config.path:
return
path = os.path.join(self.config.path, "recent_servers")
s = json.dumps(self.recent_servers, indent=4, sort_keys=True)
try:
with open(path, "w", encoding='utf-8') as f:
f.write(s)
except:
pass
def get_server_height(self):
return self.interface.tip if self.interface else 0
def server_is_lagging(self):
sh = self.get_server_height()
if not sh:
self.print_error('no height for main interface')
return True
lh = self.get_local_height()
result = (lh - sh) > 1
if result:
self.print_error('%s is lagging (%d vs %d)' % (self.default_server, sh, lh))
return result
def set_status(self, status):
self.connection_status = status
self.notify('status')
def is_connected(self):
return self.interface is not None
def is_connecting(self):
return self.connection_status == 'connecting'
def is_up_to_date(self):
return self.unanswered_requests == {}
def queue_request(self, method, params, interface=None):
# If you want to queue a request on any interface it must go
# through this function so message ids are properly tracked
if interface is None:
interface = self.interface
message_id = self.message_id
self.message_id += 1
if self.debug:
self.print_error(interface.host, "-->", method, params, message_id)
interface.queue_request(method, params, message_id)
return message_id
def send_subscriptions(self):
self.print_error('sending subscriptions to', self.interface.server, len(self.unanswered_requests), len(self.subscribed_addresses))
self.sub_cache.clear()
# Resend unanswered requests
requests = self.unanswered_requests.values()
self.unanswered_requests = {}
if self.interface.ping_required():
params = [ELECTRUM_VERSION, PROTOCOL_VERSION]
self.queue_request('server.version', params, self.interface)
for request in requests:
message_id = self.queue_request(request[0], request[1])
self.unanswered_requests[message_id] = request
self.queue_request('server.banner', [])
self.queue_request('server.donation_address', [])
self.queue_request('server.peers.subscribe', [])
self.request_fee_estimates()
self.queue_request('blockchain.relayfee', [])
for h in list(self.subscribed_addresses):
self.queue_request('blockchain.scripthash.subscribe', [h])
def request_fee_estimates(self):
from .simple_config import FEE_ETA_TARGETS
self.config.requested_fee_estimates()
self.queue_request('mempool.get_fee_histogram', [])
for i in FEE_ETA_TARGETS:
self.queue_request('blockchain.estimatefee', [i])
def get_status_value(self, key):
if key == 'status':
value = self.connection_status
elif key == 'banner':
value = self.banner
elif key == 'fee':
value = self.config.fee_estimates
elif key == 'fee_histogram':
value = self.config.mempool_fees
elif key == 'updated':
value = (self.get_local_height(), self.get_server_height())
elif key == 'servers':
value = self.get_servers()
elif key == 'interfaces':
value = self.get_interfaces()
return value
def notify(self, key):
if key in ['status', 'updated']:
self.trigger_callback(key)
else:
self.trigger_callback(key, self.get_status_value(key))
def get_parameters(self):
host, port, protocol = deserialize_server(self.default_server)
return host, port, protocol, self.proxy, self.auto_connect
def get_donation_address(self):
if self.is_connected():
return self.donation_address
def get_interfaces(self):
return list(self.interfaces.keys())
def get_servers(self):
out = constants.net.DEFAULT_SERVERS
if self.irc_servers:
out.update(filter_version(self.irc_servers.copy()))
else:
for s in self.recent_servers:
try:
host, port, protocol = deserialize_server(s)
except:
continue
if host not in out:
out[host] = { protocol:port }
return out
def start_interface(self, server):
if (not server in self.interfaces and not server in self.connecting):
if server == self.default_server:
self.print_error("connecting to %s as new interface" % server)
self.set_status('connecting')
self.connecting.add(server)
c = Connection(server, self.socket_queue, self.config.path)
def start_random_interface(self):
exclude_set = self.disconnected_servers.union(set(self.interfaces))
server = pick_random_server(self.get_servers(), self.protocol, exclude_set)
if server:
self.start_interface(server)
def start_interfaces(self):
self.start_interface(self.default_server)
for i in range(self.num_server - 1):
self.start_random_interface()
def set_proxy(self, proxy):
self.proxy = proxy
# Store these somewhere so we can un-monkey-patch
if not hasattr(socket, "_socketobject"):
socket._socketobject = socket.socket
socket._getaddrinfo = socket.getaddrinfo
if proxy:
self.print_error('setting proxy', proxy)
proxy_mode = proxy_modes.index(proxy["mode"]) + 1
socks.setdefaultproxy(proxy_mode,
proxy["host"],
int(proxy["port"]),
# socks.py seems to want either None or a non-empty string
username=(proxy.get("user", "") or None),
password=(proxy.get("password", "") or None))
socket.socket = socks.socksocket
# prevent dns leaks, see http://stackoverflow.com/questions/13184205/dns-over-proxy
socket.getaddrinfo = lambda *args: [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
else:
socket.socket = socket._socketobject
socket.getaddrinfo = socket._getaddrinfo
def start_network(self, protocol, proxy):
assert not self.interface and not self.interfaces
assert not self.connecting and self.socket_queue.empty()
self.print_error('starting network')
self.disconnected_servers = set([])
self.protocol = protocol
self.set_proxy(proxy)
self.start_interfaces()
def stop_network(self):
self.print_error("stopping network")
for interface in list(self.interfaces.values()):
self.close_interface(interface)
if self.interface:
self.close_interface(self.interface)
assert self.interface is None
assert not self.interfaces
self.connecting = set()
# Get a new queue - no old pending connections thanks!
self.socket_queue = queue.Queue()
def set_parameters(self, host, port, protocol, proxy, auto_connect):
proxy_str = serialize_proxy(proxy)
server = serialize_server(host, port, protocol)
# sanitize parameters
try:
deserialize_server(serialize_server(host, port, protocol))
if proxy:
proxy_modes.index(proxy["mode"]) + 1
int(proxy['port'])
except:
return
self.config.set_key('auto_connect', auto_connect, False)
self.config.set_key("proxy", proxy_str, False)
self.config.set_key("server", server, True)
# abort if changes were not allowed by config
if self.config.get('server') != server or self.config.get('proxy') != proxy_str:
return
self.auto_connect = auto_connect
if self.proxy != proxy or self.protocol != protocol:
# Restart the network defaulting to the given server
self.stop_network()
self.default_server = server
self.start_network(protocol, proxy)
elif self.default_server != server:
self.switch_to_interface(server)
else:
self.switch_lagging_interface()
self.notify('updated')
def switch_to_random_interface(self):
servers = self.get_interfaces() # Those in connected state
if self.default_server in servers:
servers.remove(self.default_server)
if servers:
self.switch_to_interface(random.choice(servers))
def switch_lagging_interface(self):
if self.server_is_lagging() and self.auto_connect:
# switch to one that has the correct header (not height)
header = self.blockchain().read_header(self.get_local_height())
filtered = list(map(lambda x:x[0], filter(lambda x: x[1].tip_header==header, self.interfaces.items())))
if filtered:
choice = random.choice(filtered)
self.switch_to_interface(choice)
def switch_to_interface(self, server):
self.default_server = server
if server not in self.interfaces:
self.interface = None
self.start_interface(server)
return
i = self.interfaces[server]
if self.interface != i:
self.print_error("switching to", server)
# stop any current interface in order to terminate subscriptions
# fixme: we don't want to close headers sub
self.interface = i
self.send_subscriptions()
self.set_status('connected')
self.notify('updated')
def close_interface(self, interface):
if interface:
if interface.server in self.interfaces:
self.interfaces.pop(interface.server)
if interface.server == self.default_server:
self.interface = None
interface.close()
def add_recent_server(self, server):
if server in self.recent_servers:
self.recent_servers.remove(server)
self.recent_servers.insert(0, server)
self.recent_servers = self.recent_servers[0:20]
self.save_recent_servers()
def process_response(self, interface, response, callbacks):
if self.debug:
self.print_error("<--", response)
error = response.get('error')
result = response.get('result')
method = response.get('method')
params = response.get('params')
if method == 'server.version':
interface.server_version = result
elif method == 'blockchain.headers.subscribe':
if error is None:
self.on_notify_header(interface, result)
elif method == 'server.peers.subscribe':
if error is None:
self.irc_servers = parse_servers(result)
self.notify('servers')
elif method == 'server.banner':
if error is None:
self.banner = result
self.notify('banner')
elif method == 'server.donation_address':
if error is None:
self.donation_address = result
elif method == 'mempool.get_fee_histogram':
if error is None:
self.print_error('fee_histogram', result)
self.config.mempool_fees = result
self.notify('fee_histogram')
elif method == 'blockchain.estimatefee':
if error is None and result > 0:
i = params[0]
fee = int(result*COIN)
self.config.update_fee_estimates(i, fee)
self.print_error("fee_estimates[%d]" % i, fee)
self.notify('fee')
elif method == 'blockchain.relayfee':
if error is None:
self.relay_fee = int(result * COIN) if result is not None else None
self.print_error("relayfee", self.relay_fee)
elif method == 'blockchain.block.get_chunk':
self.on_get_chunk(interface, response)
elif method == 'blockchain.block.get_header':
self.on_get_header(interface, response)
for callback in callbacks:
callback(response)
def get_index(self, method, params):
return str(method) + (':' + str(params[0]) if params else '')
def process_responses(self, interface):
responses = interface.get_responses()
for request, response in responses:
if request:
method, params, message_id = request
k = self.get_index(method, params)
client_req = self.unanswered_requests.pop(message_id, None)
if client_req:
assert interface == self.interface
callbacks = [client_req[2]]
else:
k = self.get_index(method, params)
callbacks = self.subscriptions.get(k, [])
response['method'] = method
response['params'] = params
# add it to the list; avoids double-sends on reconnection
if method == 'blockchain.scripthash.subscribe':
self.subscribed_addresses.add(params[0])
else:
if not response: # Closed remotely / misbehaving
self.connection_down(interface.server)
break
# Rewrite response shape to match subscription request response
method = response.get('method')
params = response.get('params')
k = self.get_index(method, params)
if method == 'blockchain.headers.subscribe':
response['result'] = params[0]
response['params'] = []
elif method == 'blockchain.scripthash.subscribe':
response['params'] = [params[0]] # addr
response['result'] = params[1]
callbacks = self.subscriptions.get(k, [])
# update cache if it's a subscription
if method.endswith('.subscribe'):
self.sub_cache[k] = response
self.process_response(interface, response, callbacks)
def addr_to_scripthash(self, addr):
h = bitcoin.address_to_scripthash(addr)
if h not in self.h2addr:
self.h2addr[h] = addr
return h
def overload_cb(self, callback):
def cb2(x):
x2 = x.copy()
p = x2.pop('params')
addr = self.h2addr[p[0]]
x2['params'] = [addr]
callback(x2)
return cb2
def subscribe_to_addresses(self, addresses, callback):
hashes = [self.addr_to_scripthash(addr) for addr in addresses]
msgs = [('blockchain.scripthash.subscribe', [x]) for x in hashes]
self.send(msgs, self.overload_cb(callback))
def request_address_history(self, address, callback):
h = self.addr_to_scripthash(address)
self.send([('blockchain.scripthash.get_history', [h])], self.overload_cb(callback))
def send(self, messages, callback):
messages = list(messages)
with self.lock:
self.pending_sends.append((messages, callback))
def process_pending_sends(self):
# we cannot process them.
if not self.interface:
return
with self.lock:
sends = self.pending_sends
self.pending_sends = []
for messages, callback in sends:
for method, params in messages:
r = None
if method.endswith('.subscribe'):
k = self.get_index(method, params)
# add callback to list
l = self.subscriptions.get(k, [])
if callback not in l:
l.append(callback)
self.subscriptions[k] = l
# check cached response for subscriptions
r = self.sub_cache.get(k)
if r is not None:
self.print_error("cache hit", k)
callback(r)
else:
message_id = self.queue_request(method, params)
self.unanswered_requests[message_id] = method, params, callback
def unsubscribe(self, callback):
# Note: we can't unsubscribe from the server, so if we receive
with self.lock:
for v in self.subscriptions.values():
if callback in v:
v.remove(callback)
def connection_down(self, server):
self.disconnected_servers.add(server)
if server == self.default_server:
self.set_status('disconnected')
if server in self.interfaces:
self.close_interface(self.interfaces[server])
self.notify('interfaces')
for b in self.blockchains.values():
if b.catch_up == server:
b.catch_up = None
def new_interface(self, server, socket):
self.add_recent_server(server)
interface = Interface(server, socket)
interface.blockchain = None
interface.tip_header = None
interface.tip = 0
interface.mode = 'default'
interface.request = None
self.interfaces[server] = interface
self.queue_request('blockchain.headers.subscribe', [], interface)
if server == self.default_server:
self.switch_to_interface(server)
def maintain_sockets(self):
while not self.socket_queue.empty():
server, socket = self.socket_queue.get()
if server in self.connecting:
self.connecting.remove(server)
if socket:
self.new_interface(server, socket)
else:
self.connection_down(server)
for interface in list(self.interfaces.values()):
if interface.has_timed_out():
self.connection_down(interface.server)
elif interface.ping_required():
params = [ELECTRUM_VERSION, PROTOCOL_VERSION]
self.queue_request('server.version', params, interface)
now = time.time()
if len(self.interfaces) + len(self.connecting) < self.num_server:
self.start_random_interface()
if now - self.nodes_retry_time > NODES_RETRY_INTERVAL:
self.print_error('network: retrying connections')
self.disconnected_servers = set([])
self.nodes_retry_time = now
if not self.is_connected():
if self.auto_connect:
if not self.is_connecting():
self.switch_to_random_interface()
else:
if self.default_server in self.disconnected_servers:
if now - self.server_retry_time > SERVER_RETRY_INTERVAL:
self.disconnected_servers.remove(self.default_server)
self.server_retry_time = now
else:
self.switch_to_interface(self.default_server)
else:
if self.config.is_fee_estimates_update_required():
self.request_fee_estimates()
def request_chunk(self, interface, index):
if index in self.requested_chunks:
return
interface.print_error("requesting chunk %d" % index)
self.requested_chunks.add(index)
self.queue_request('blockchain.block.get_chunk', [index], interface)
def on_get_chunk(self, interface, response):
error = response.get('error')
result = response.get('result')
params = response.get('params')
blockchain = interface.blockchain
if result is None or params is None or error is not None:
interface.print_error(error or 'bad response')
return
index = params[0]
if index not in self.requested_chunks:
interface.print_error("received chunk %d (unsolicited)" % index)
return
else:
interface.print_error("received chunk %d" % index)
self.requested_chunks.remove(index)
connect = blockchain.connect_chunk(index, result)
if not connect:
self.connection_down(interface.server)
return
if index >= len(blockchain.checkpoints) and blockchain.height() < interface.tip:
self.request_chunk(interface, index+1)
else:
interface.mode = 'default'
interface.print_error('catch up done', blockchain.height())
blockchain.catch_up = None
self.notify('updated')
def request_header(self, interface, height):
self.queue_request('blockchain.block.get_header', [height], interface)
interface.request = height
interface.req_time = time.time()
def on_get_header(self, interface, response):
header = response.get('result')
if not header:
interface.print_error(response)
self.connection_down(interface.server)
return
height = header.get('block_height')
if interface.request != height:
interface.print_error("unsolicited header",interface.request, height)
self.connection_down(interface.server)
return
chain = blockchain.check_header(header)
if interface.mode == 'backward':
can_connect = blockchain.can_connect(header)
if can_connect and can_connect.catch_up is None:
interface.mode = 'catch_up'
interface.blockchain = can_connect
interface.blockchain.save_header(header)
next_height = height + 1
interface.blockchain.catch_up = interface.server
elif chain:
interface.print_error("binary search")
interface.mode = 'binary'
interface.blockchain = chain
interface.good = height
next_height = (interface.bad + interface.good) // 2
assert next_height >= self.max_checkpoint(), (interface.bad, interface.good)
else:
if height == 0:
self.connection_down(interface.server)
next_height = None
else:
interface.bad = height
interface.bad_header = header
delta = interface.tip - height
next_height = max(self.max_checkpoint(), interface.tip - 2 * delta)
elif interface.mode == 'binary':
if chain:
interface.good = height
interface.blockchain = chain
else:
interface.bad = height
interface.bad_header = header
if interface.bad != interface.good + 1:
next_height = (interface.bad + interface.good) // 2
assert next_height >= self.max_checkpoint()
elif not interface.blockchain.can_connect(interface.bad_header, check_height=False):
self.connection_down(interface.server)
next_height = None
else:
branch = self.blockchains.get(interface.bad)
if branch is not None:
if branch.check_header(interface.bad_header):
interface.print_error('joining chain', interface.bad)
next_height = None
elif branch.parent().check_header(header):
interface.print_error('reorg', interface.bad, interface.tip)
interface.blockchain = branch.parent()
next_height = None
else:
interface.print_error('checkpoint conflicts with existing fork', branch.path())
branch.write('', 0)
branch.save_header(interface.bad_header)
interface.mode = 'catch_up'
interface.blockchain = branch
next_height = interface.bad + 1
interface.blockchain.catch_up = interface.server
else:
bh = interface.blockchain.height()
next_height = None
if bh > interface.good:
if not interface.blockchain.check_header(interface.bad_header):
b = interface.blockchain.fork(interface.bad_header)
self.blockchains[interface.bad] = b
interface.blockchain = b
interface.print_error("new chain", b.checkpoint)
interface.mode = 'catch_up'
next_height = interface.bad + 1
interface.blockchain.catch_up = interface.server
else:
assert bh == interface.good
if interface.blockchain.catch_up is None and bh < interface.tip:
interface.print_error("catching up from %d"% (bh + 1))
interface.mode = 'catch_up'
next_height = bh + 1
interface.blockchain.catch_up = interface.server
self.notify('updated')
elif interface.mode == 'catch_up':
can_connect = interface.blockchain.can_connect(header)
if can_connect:
interface.blockchain.save_header(header)
next_height = height + 1 if height < interface.tip else None
else:
interface.print_error("cannot connect", height)
interface.mode = 'backward'
interface.bad = height
interface.bad_header = header
next_height = height - 1
if next_height is None:
interface.print_error('catch up done', interface.blockchain.height())
interface.blockchain.catch_up = None
self.switch_lagging_interface()
self.notify('updated')
else:
raise Exception(interface.mode)
if next_height:
if interface.mode == 'catch_up' and interface.tip > next_height + 50:
self.request_chunk(interface, next_height // constants.CHUNK_LENGTH)
else:
self.request_header(interface, next_height)
else:
interface.mode = 'default'
interface.request = None
self.notify('updated')
self.notify('interfaces')
def maintain_requests(self):
for interface in list(self.interfaces.values()):
if interface.request and time.time() - interface.request_time > 20:
interface.print_error("blockchain request timed out")
self.connection_down(interface.server)
continue
def wait_on_sockets(self):
# Sleep to prevent busy looping
if not self.interfaces:
time.sleep(0.1)
return
rin = [i for i in self.interfaces.values()]
win = [i for i in self.interfaces.values() if i.num_requests()]
try:
rout, wout, xout = select.select(rin, win, [], 0.1)
except socket.error as e:
# TODO: py3, get code from e
code = None
if code == errno.EINTR:
return
raise
assert not xout
for interface in wout:
interface.send_requests()
for interface in rout:
self.process_responses(interface)
def init_headers_file(self):
b = self.blockchains[0]
filename = b.path()
length = constants.HEADER_BYTES * len(constants.net.CHECKPOINTS) * constants.CHUNK_LENGTH
if not os.path.exists(filename) or os.path.getsize(filename) < length:
with open(filename, 'wb') as f:
if length>0:
f.seek(length-1)
f.write(b'\x00')
with b.lock:
b.update_size()
def run(self):
self.init_headers_file()
while self.is_running():
self.maintain_sockets()
self.wait_on_sockets()
self.maintain_requests()
self.run_jobs() # Synchronizer and Verifier
self.process_pending_sends()
self.stop_network()
self.on_stop()
def on_notify_header(self, interface, header):
height = header.get('block_height')
if not height:
return
if height < self.max_checkpoint():
self.connection_down(interface.server)
return
interface.tip_header = header
interface.tip = height
if interface.mode != 'default':
return
b = blockchain.check_header(header)
if b:
interface.blockchain = b
self.switch_lagging_interface()
self.notify('updated')
self.notify('interfaces')
return
b = blockchain.can_connect(header)
if b:
interface.blockchain = b
b.save_header(header)
self.switch_lagging_interface()
self.notify('updated')
self.notify('interfaces')
return
tip = max([x.height() for x in self.blockchains.values()])
if tip >=0:
interface.mode = 'backward'
interface.bad = height
interface.bad_header = header
self.request_header(interface, min(tip +1, height - 1))
else:
chain = self.blockchains[0]
if chain.catch_up is None:
chain.catch_up = interface
interface.mode = 'catch_up'
interface.blockchain = chain
self.print_error("switching to catchup mode", tip, self.blockchains)
self.request_header(interface, 0)
else:
self.print_error("chain already catching up with", chain.catch_up.server)
def blockchain(self):
if self.interface and self.interface.blockchain is not None:
self.blockchain_index = self.interface.blockchain.checkpoint
return self.blockchains[self.blockchain_index]
def get_blockchains(self):
out = {}
for k, b in self.blockchains.items():
r = list(filter(lambda i: i.blockchain==b, list(self.interfaces.values())))
if r:
out[k] = r
return out
def follow_chain(self, index):
blockchain = self.blockchains.get(index)
if blockchain:
self.blockchain_index = index
self.config.set_key('blockchain_index', index)
for i in self.interfaces.values():
if i.blockchain == blockchain:
self.switch_to_interface(i.server)
break
else:
raise Exception('blockchain not found', index)
if self.interface:
server = self.interface.server
host, port, protocol, proxy, auto_connect = self.get_parameters()
host, port, protocol = server.split(':')
self.set_parameters(host, port, protocol, proxy, auto_connect)
def get_local_height(self):
return self.blockchain().height()
def synchronous_get(self, request, timeout=30):
q = queue.Queue()
self.send([request], q.put)
try:
r = q.get(True, timeout)
except queue.Empty:
raise util.TimeoutException(_('Server did not answer'))
if r.get('error'):
raise Exception(r.get('error'))
return r.get('result')
def broadcast(self, tx, timeout=30):
tx_hash = tx.txid()
try:
out = self.synchronous_get(('blockchain.transaction.broadcast', [str(tx)]), timeout)
except BaseException as e:
return False, "error: " + str(e)
if out != tx_hash:
return False, "error: " + out
return True, out
def export_checkpoints(self, path):
# run manually from the console to generate checkpoints
cp = self.blockchain().get_checkpoints()
with open(path, 'w', encoding='utf-8') as f:
f.write(json.dumps(cp, indent=4))
def max_checkpoint(self):
return max(0, len(constants.net.CHECKPOINTS) * constants.CHUNK_LENGTH - 1)
| true | true |
f737321bbe218ac9a83cdad1ebea220e180cae0f | 913 | py | Python | engineered_features.py | carlos-alcan/network_app_classification | faa19842ed17b277259dd64e14c7133ce6a61e56 | [
"MIT"
] | null | null | null | engineered_features.py | carlos-alcan/network_app_classification | faa19842ed17b277259dd64e14c7133ce6a61e56 | [
"MIT"
] | null | null | null | engineered_features.py | carlos-alcan/network_app_classification | faa19842ed17b277259dd64e14c7133ce6a61e56 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 16 12:14:37 2019
@author: carlosalcantara
"""
'''
Expand data with engineered features using the feature_engineering_function.py
Saves new csv file with specified name, overwriting input file if no save file
name is given.
Usage: engineered_features.py csvfile [savefile=csvfile]
'''
import pandas as pd
import sys
import feature_engineering_function
# Check for command line arguments
if len(sys.argv) < 1:
print('Usage: engineered_features.py csvfile [savefile=csvfile]')
sys.exit(-1)
# use original file name as new csv filename if none specified
file = sys.argv[1]
if len(sys.argv) > 2:
savefile = sys.argv[2]
else:
savefile = file
# read NetFlow data file
df = pd.read_csv(file)
# add engineered features
df = feature_engineering_function.BLINC_features(df)
# write NetFlow data file
df.to_csv(savefile, index=False) | 24.675676 | 78 | 0.748083 |
import pandas as pd
import sys
import feature_engineering_function
if len(sys.argv) < 1:
print('Usage: engineered_features.py csvfile [savefile=csvfile]')
sys.exit(-1)
file = sys.argv[1]
if len(sys.argv) > 2:
savefile = sys.argv[2]
else:
savefile = file
df = pd.read_csv(file)
df = feature_engineering_function.BLINC_features(df)
df.to_csv(savefile, index=False) | true | true |
f737324f44c17fa1f6e877198d987bbc244bf54b | 3,178 | py | Python | msticpy/datamodel/entities/host_logon_session.py | 2xyo/msticpy | 17f6a25ea82d85632e0c52a60e20626e9621d3b0 | [
"MIT"
] | 1 | 2021-07-29T16:04:08.000Z | 2021-07-29T16:04:08.000Z | msticpy/datamodel/entities/host_logon_session.py | QPC-database/msticpy | 54c6d74e0bb25528dd0347edb40c693dd7b1eac7 | [
"MIT"
] | 3 | 2021-05-15T02:16:39.000Z | 2022-01-19T13:13:25.000Z | msticpy/datamodel/entities/host_logon_session.py | QPC-database/msticpy | 54c6d74e0bb25528dd0347edb40c693dd7b1eac7 | [
"MIT"
] | null | null | null | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""HostLogonSession Entity class."""
from datetime import datetime
from typing import Any, Mapping, Optional
from ..._version import VERSION
from ...common.utility import export
from .entity import Entity
from .account import Account
from .host import Host
__version__ = VERSION
__author__ = "Ian Hellen"
# pylint: disable=invalid-name
@export
class HostLogonSession(Entity):
"""
HostLogonSession Entity class.
Attributes
----------
Account : Account
HostLogonSession Account
StartTimeUtc : datetime
HostLogonSession StartTimeUtc
EndTimeUtc : datetime
HostLogonSession EndTimeUtc
Host : Host
HostLogonSession Host
SessionId : str
HostLogonSession SessionId
"""
ID_PROPERTIES = ["Account", "Host", "SessionId"]
def __init__(
self,
src_entity: Mapping[str, Any] = None,
src_event: Mapping[str, Any] = None,
**kwargs,
):
"""
Create a new instance of the entity type.
Parameters
----------
src_entity : Mapping[str, Any], optional
Create entity from existing entity or
other mapping object that implements entity properties.
(the default is None)
src_event : Mapping[str, Any], optional
Create entity from event properties
(the default is None)
Other Parameters
----------------
kwargs : Dict[str, Any]
Supply the entity properties as a set of
kw arguments.
"""
self.Account: Optional[Account] = None
self.StartTimeUtc: datetime = datetime.min
self.EndTimeUtc: datetime = datetime.min
self.Host: Optional[Host] = None
self.SessionId: str = ""
super().__init__(src_entity=src_entity, **kwargs)
if src_event is not None:
if "TimeCreatedUtc" in src_event:
self.StartTimeUtc = src_event["TimeCreatedUtc"]
elif "TimeGenerated" in src_event:
self.StartTimeUtc = src_event["TimeGenerated"]
self.EndTimeUtc = self.StartTimeUtc
self.SessionId = (
src_event["TargetLogonId"] if "TargetLogonId" in src_event else None
)
@property
def description_str(self) -> str:
"""Return Entity Description."""
if self.Host:
return f"{self.Host.HostName}: session: {self.SessionId}"
return self.__class__.__name__
_entity_schema = {
# Account
"Account": "Account",
# StartTimeUtc (type System.Nullable`1[System.DateTime])
"StartTimeUtc": None,
# EndTimeUtc (type System.Nullable`1[System.DateTime])
"EndTimeUtc": None,
# Host
"Host": "Host",
# SessionId (type System.String)
"SessionId": None,
}
| 29.425926 | 84 | 0.581812 |
from datetime import datetime
from typing import Any, Mapping, Optional
from ..._version import VERSION
from ...common.utility import export
from .entity import Entity
from .account import Account
from .host import Host
__version__ = VERSION
__author__ = "Ian Hellen"
@export
class HostLogonSession(Entity):
ID_PROPERTIES = ["Account", "Host", "SessionId"]
def __init__(
self,
src_entity: Mapping[str, Any] = None,
src_event: Mapping[str, Any] = None,
**kwargs,
):
self.Account: Optional[Account] = None
self.StartTimeUtc: datetime = datetime.min
self.EndTimeUtc: datetime = datetime.min
self.Host: Optional[Host] = None
self.SessionId: str = ""
super().__init__(src_entity=src_entity, **kwargs)
if src_event is not None:
if "TimeCreatedUtc" in src_event:
self.StartTimeUtc = src_event["TimeCreatedUtc"]
elif "TimeGenerated" in src_event:
self.StartTimeUtc = src_event["TimeGenerated"]
self.EndTimeUtc = self.StartTimeUtc
self.SessionId = (
src_event["TargetLogonId"] if "TargetLogonId" in src_event else None
)
@property
def description_str(self) -> str:
if self.Host:
return f"{self.Host.HostName}: session: {self.SessionId}"
return self.__class__.__name__
_entity_schema = {
"Account": "Account",
"StartTimeUtc": None,
"EndTimeUtc": None,
"Host": "Host",
"SessionId": None,
}
| true | true |
f73732e27bc370111d4d7024fed2a8f1e3597cd9 | 2,676 | py | Python | examples/diff/diff_test.py | adamruth/python-fire | 6912ccd56f50e0f4bb30a0725d95858ef29f3bde | [
"Apache-2.0"
] | 1 | 2020-02-05T04:43:03.000Z | 2020-02-05T04:43:03.000Z | examples/diff/diff_test.py | chesnjak/python-fire | 72604f40314008e562ba47936dcc183b51166b72 | [
"Apache-2.0"
] | null | null | null | examples/diff/diff_test.py | chesnjak/python-fire | 72604f40314008e562ba47936dcc183b51166b72 | [
"Apache-2.0"
] | 1 | 2020-02-05T04:44:05.000Z | 2020-02-05T04:44:05.000Z | # Copyright (C) 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the diff and difffull modules."""
import tempfile
from fire import testutils
from examples.diff import diff
from examples.diff import difffull
class DiffTest(testutils.BaseTestCase):
"""The purpose of these tests is to ensure the difflib wrappers works.
It is not the goal of these tests to exhaustively test difflib functionality.
"""
def setUp(self):
self.file1 = file1 = tempfile.NamedTemporaryFile()
self.file2 = file2 = tempfile.NamedTemporaryFile()
file1.write(b'test\ntest1\n')
file2.write(b'test\ntest2\nextraline\n')
file1.flush()
file2.flush()
self.diff = diff.DiffLibWrapper(file1.name, file2.name)
def testSetUp(self):
self.assertEqual(self.diff.fromlines, ['test\n', 'test1\n'])
self.assertEqual(self.diff.tolines, ['test\n', 'test2\n', 'extraline\n'])
def testUnifiedDiff(self):
results = list(self.diff.unified_diff())
self.assertTrue(results[0].startswith('--- ' + self.file1.name))
self.assertTrue(results[1].startswith('+++ ' + self.file2.name))
self.assertEqual(
results[2:],
[
'@@ -1,2 +1,3 @@\n',
' test\n',
'-test1\n',
'+test2\n',
'+extraline\n',
]
)
def testContextDiff(self):
expected_lines = [
'***************\n',
'*** 1,2 ****\n',
' test\n',
'! test1\n',
'--- 1,3 ----\n',
' test\n',
'! test2\n',
'! extraline\n']
results = list(self.diff.context_diff())
self.assertEqual(results[2:], expected_lines)
def testNDiff(self):
expected_lines = [
' test\n',
'- test1\n',
'? ^\n',
'+ test2\n',
'? ^\n',
'+ extraline\n']
results = list(self.diff.ndiff())
self.assertEqual(results, expected_lines)
def testMakeDiff(self):
self.assertTrue(''.join(self.diff.make_file()).startswith('\n<!DOC'))
def testDiffFull(self):
self.assertIsNotNone(difffull)
self.assertIsNotNone(difffull.difflib)
if __name__ == '__main__':
testutils.main()
| 27.875 | 79 | 0.626682 |
import tempfile
from fire import testutils
from examples.diff import diff
from examples.diff import difffull
class DiffTest(testutils.BaseTestCase):
def setUp(self):
self.file1 = file1 = tempfile.NamedTemporaryFile()
self.file2 = file2 = tempfile.NamedTemporaryFile()
file1.write(b'test\ntest1\n')
file2.write(b'test\ntest2\nextraline\n')
file1.flush()
file2.flush()
self.diff = diff.DiffLibWrapper(file1.name, file2.name)
def testSetUp(self):
self.assertEqual(self.diff.fromlines, ['test\n', 'test1\n'])
self.assertEqual(self.diff.tolines, ['test\n', 'test2\n', 'extraline\n'])
def testUnifiedDiff(self):
results = list(self.diff.unified_diff())
self.assertTrue(results[0].startswith('--- ' + self.file1.name))
self.assertTrue(results[1].startswith('+++ ' + self.file2.name))
self.assertEqual(
results[2:],
[
'@@ -1,2 +1,3 @@\n',
' test\n',
'-test1\n',
'+test2\n',
'+extraline\n',
]
)
def testContextDiff(self):
expected_lines = [
'***************\n',
'*** 1,2 ****\n',
' test\n',
'! test1\n',
'--- 1,3 ----\n',
' test\n',
'! test2\n',
'! extraline\n']
results = list(self.diff.context_diff())
self.assertEqual(results[2:], expected_lines)
def testNDiff(self):
expected_lines = [
' test\n',
'- test1\n',
'? ^\n',
'+ test2\n',
'? ^\n',
'+ extraline\n']
results = list(self.diff.ndiff())
self.assertEqual(results, expected_lines)
def testMakeDiff(self):
self.assertTrue(''.join(self.diff.make_file()).startswith('\n<!DOC'))
def testDiffFull(self):
self.assertIsNotNone(difffull)
self.assertIsNotNone(difffull.difflib)
if __name__ == '__main__':
testutils.main()
| true | true |
f7373392e3a8c483544fcb2bc60bfaa7bc7a3472 | 1,127 | py | Python | utils.py | grevych/graphparser | 67b7036ac3974d69616bc3765483968b797ed031 | [
"MIT"
] | null | null | null | utils.py | grevych/graphparser | 67b7036ac3974d69616bc3765483968b797ed031 | [
"MIT"
] | null | null | null | utils.py | grevych/graphparser | 67b7036ac3974d69616bc3765483968b797ed031 | [
"MIT"
] | null | null | null | # -*- encoding:utf-8 -*-
SETS = {
'D': '01234456789',
'L': 'abcdefghijklmnñopqrstuvwxyzABCDEFGHIJKLMNÑOPQRSTUVWXYZ',
'C': '01234456789abcdefghijklmnñopqrstuvwxyzABCDEFGHIJKLMNÑOPQRSTUVWXYZ!"·#$%&/()=?¿¡ºª*+^`[]´Ç¨{}-_:.;,<>\'\\\t '
}
AUTOMATAS = (
{'name': 'comment',
'states': (
((('#', 1, ), ), False, ),
(((SETS['C'], 1, ), ), True, ), )},
{'name': 'digit',
'states': (
((('+-' + SETS['D'], 1, ), ), False, ),
(((SETS['D'], 1, ),
('.', 2, ), ),
True, ),
(((SETS['D'], 3, ), ), False, ),
(((SETS['D'], 3, ), ), True, ), )},
{'name': 'reserved',
'states': (
(((SETS['L'], 1, ), ), False, ),
(((SETS['L'], 1, ), ), True, ), )},
{'name': 'variable',
'states': (
((('_' + SETS['L'], 1, ), ), False, ),
((('_' + SETS['D'] + SETS['L'], 1, ), ), True, ), )},
)
LEXEMES = {
'title': 'TITLE'
}
LEXEMES_AS_REGEXP = (
(r'^\d+$', 'DIGIT'),
(r'^\w+$', 'VARIABLE'),
(r'^".*"', 'STRING'),
(r'^#.*$', 'COMMENT'),
)
ESCAPE_CHARACTERS = (' ', '\n', '\t', '\r', )
| 24.5 | 118 | 0.377107 |
SETS = {
'D': '01234456789',
'L': 'abcdefghijklmnñopqrstuvwxyzABCDEFGHIJKLMNÑOPQRSTUVWXYZ',
'C': '01234456789abcdefghijklmnñopqrstuvwxyzABCDEFGHIJKLMNÑOPQRSTUVWXYZ!"·#$%&/()=?¿¡ºª*+^`[]´Ç¨{}-_:.;,<>\'\\\t '
}
AUTOMATAS = (
{'name': 'comment',
'states': (
((('#', 1, ), ), False, ),
(((SETS['C'], 1, ), ), True, ), )},
{'name': 'digit',
'states': (
((('+-' + SETS['D'], 1, ), ), False, ),
(((SETS['D'], 1, ),
('.', 2, ), ),
True, ),
(((SETS['D'], 3, ), ), False, ),
(((SETS['D'], 3, ), ), True, ), )},
{'name': 'reserved',
'states': (
(((SETS['L'], 1, ), ), False, ),
(((SETS['L'], 1, ), ), True, ), )},
{'name': 'variable',
'states': (
((('_' + SETS['L'], 1, ), ), False, ),
((('_' + SETS['D'] + SETS['L'], 1, ), ), True, ), )},
)
LEXEMES = {
'title': 'TITLE'
}
LEXEMES_AS_REGEXP = (
(r'^\d+$', 'DIGIT'),
(r'^\w+$', 'VARIABLE'),
(r'^".*"', 'STRING'),
(r'^#.*$', 'COMMENT'),
)
ESCAPE_CHARACTERS = (' ', '\n', '\t', '\r', )
| true | true |
f7373463128f4034161b82321034c6345197ba26 | 492 | py | Python | pyphys/pipette.py | Sepidak/spikeGUI | 25ae60160308c0a34e7180f3e39a1c4dc6aad708 | [
"MIT"
] | null | null | null | pyphys/pipette.py | Sepidak/spikeGUI | 25ae60160308c0a34e7180f3e39a1c4dc6aad708 | [
"MIT"
] | 3 | 2021-08-09T21:51:41.000Z | 2021-08-09T21:51:45.000Z | pyphys/pipette.py | Sepidak/spikeGUI | 25ae60160308c0a34e7180f3e39a1c4dc6aad708 | [
"MIT"
] | 3 | 2021-10-16T14:07:59.000Z | 2021-10-16T17:09:03.000Z | # -*- coding: utf-8 -*-
"""
Created on Fri Jul 18 14:03:19 2014
@author: crousse
"""
class Pipette(object):
def __init__(self):
self.resistance = 0
self.internal = ""
self.id = 0 # which pipette number
self.depth = 0.0
self.hitQuality = 0 # 0 to 5
self.sealQuality = 0 # 0 to 5
self.seriesResistance = 0 # different from resistance
self.vm = 0
self.pullOff = 0 # 0 to 5
self.remark = ''
| 24.6 | 62 | 0.53252 |
class Pipette(object):
def __init__(self):
self.resistance = 0
self.internal = ""
self.id = 0
self.depth = 0.0
self.hitQuality = 0
self.sealQuality = 0
self.seriesResistance = 0
self.vm = 0
self.pullOff = 0
self.remark = ''
| true | true |
f73734f7e14d978b1c19eb89fa4dba6f52a4689d | 1,072 | py | Python | groupdocs/models/UserPasswordInfo.py | groupdocs-legacy-sdk/python | 80e5ef5a9a14ac4a7815c6cf933b5b2997381455 | [
"Apache-2.0"
] | null | null | null | groupdocs/models/UserPasswordInfo.py | groupdocs-legacy-sdk/python | 80e5ef5a9a14ac4a7815c6cf933b5b2997381455 | [
"Apache-2.0"
] | null | null | null | groupdocs/models/UserPasswordInfo.py | groupdocs-legacy-sdk/python | 80e5ef5a9a14ac4a7815c6cf933b5b2997381455 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""
Copyright 2012 GroupDocs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class UserPasswordInfo:
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
'old_pswd_salt': 'str',
'new_pswd_salt': 'str',
'reset_token': 'str'
}
self.old_pswd_salt = None # str
self.new_pswd_salt = None # str
self.reset_token = None # str
| 28.972973 | 77 | 0.658582 |
class UserPasswordInfo:
def __init__(self):
self.swaggerTypes = {
'old_pswd_salt': 'str',
'new_pswd_salt': 'str',
'reset_token': 'str'
}
self.old_pswd_salt = None
self.new_pswd_salt = None
self.reset_token = None
| true | true |
f737359bfe01ddeb28c94cc3cf8c3c60c38ca264 | 14,647 | py | Python | scripts/run_token_classification.py | CAMeL-Lab/CAMeLBERT_morphosyntactic_tagger | 5bea542c2e731d263281d0ab16ba9c065f602f94 | [
"MIT"
] | null | null | null | scripts/run_token_classification.py | CAMeL-Lab/CAMeLBERT_morphosyntactic_tagger | 5bea542c2e731d263281d0ab16ba9c065f602f94 | [
"MIT"
] | null | null | null | scripts/run_token_classification.py | CAMeL-Lab/CAMeLBERT_morphosyntactic_tagger | 5bea542c2e731d263281d0ab16ba9c065f602f94 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# MIT License
#
# Copyright 2018-2021 New York University Abu Dhabi
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
""" Fine-tuning pre-trained models for token classification tasks.
Heavily adapted from: https://github.com/huggingface/transformers/blob/
v3.0.1/examples/token-classification/run_ner.py"""
import logging
import os
import sys
from dataclasses import dataclass, field
from typing import Dict, List, Optional, Tuple
import numpy as np
from seqeval.metrics import (
accuracy_score as seq_accuracy_score,
f1_score as seq_f1_score,
precision_score as seq_precision_score,
recall_score as seq_recall_score
)
from sklearn.metrics import (
accuracy_score,
f1_score,
precision_score,
recall_score
)
from torch import nn
from transformers import (
AutoConfig,
AutoModelForTokenClassification,
AutoTokenizer,
EvalPrediction,
HfArgumentParser,
Trainer,
TrainingArguments,
set_seed,
)
from utils import TokenClassificationDataSet, Split, get_labels
logger = logging.getLogger(__name__)
@dataclass
class ModelArguments:
"""
Arguments pertaining to which model/config/tokenizer we are
going to fine-tune from.
"""
model_name_or_path: str = field(
metadata={"help": "Path to pretrained model or model identifier from "
"huggingface.co/models"}
)
config_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained config name or path if "
"not the same as model_name"}
)
# If you want to tweak more attributes on your tokenizer, you should do it
# in a distinct script, or just modify its tokenizer_config.json.
tokenizer_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained tokenizer name or path if "
"not the same as model_name"}
)
use_fast: bool = field(default=False, metadata={"help": "Set this flag to "
"use fast "
"tokenization."})
task_type: Optional[str] = field(
default="ner", metadata={"help": "the name of the task (ner or pos)"}
)
cache_dir: Optional[str] = field(
default=None, metadata={"help": "Where do you want to store the "
"pretrained models downloaded from s3"}
)
@dataclass
class DataTrainingArguments:
"""
Arguments pertaining to what data we are going to input our model for
training and eval.
"""
data_dir: str = field(
metadata={"help": "The input data dir. Should contain the .txt files "
"for a CoNLL-2003-formatted task."}
)
labels: Optional[str] = field(
default=None,
metadata={"help": "Path to a file containing all labels."},
)
max_seq_length: int = field(
default=128,
metadata={
"help": "The maximum total input sequence length after "
"tokenization. Sequences longer than this will be truncated, "
"sequences shorter will be padded."
},
)
overwrite_cache: bool = field(
default=False, metadata={"help": "Overwrite the cached training and "
"evaluation sets"}
)
blind_test: bool = field(
default=False, metadata={"help": "Use blind test set"}
)
def main():
# See all possible arguments in src/transformers/training_args.py
# or by passing the --help flag to this script.
# We now keep distinct sets of args, for a cleaner separation of concerns.
parser = HfArgumentParser((ModelArguments,
DataTrainingArguments,
TrainingArguments))
if len(sys.argv) == 2 and sys.argv[1].endswith(".json"):
# If we pass only one argument to the script and it's the path to a
# json file, let's parse it to get our arguments.
model_args, data_args, training_args = parser.parse_json_file(
json_file=os.path.abspath(
sys.argv[1]))
else:
model_args, data_args, training_args = parser.parse_args_into_dataclasses()
if (
os.path.exists(training_args.output_dir)
and os.listdir(training_args.output_dir)
and training_args.do_train
and not training_args.overwrite_output_dir
):
raise ValueError(
f"Output directory ({training_args.output_dir}) already exists "
"and is not empty. Use --overwrite_output_dir to overcome."
)
# Setup logging
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
level=(logging.INFO if training_args.local_rank in [-1, 0]
else logging.WARN),
)
logger.warning(
"Process rank: %s, device: %s, n_gpu: %s, distributed training: %s, "
"16-bits training: %s",
training_args.local_rank,
training_args.device,
training_args.n_gpu,
bool(training_args.local_rank != -1),
training_args.fp16,
)
logger.info("Training/evaluation parameters %s", training_args)
# Set seed
set_seed(training_args.seed)
# Prepare task
labels = get_labels(data_args.labels)
label_map: Dict[int, str] = {i: label for i, label in enumerate(labels)}
num_labels = len(labels)
# Load pretrained model and tokenizer
#
# Distributed training:
# The .from_pretrained methods guarantee that only one local process can
# concurrently download model & vocab.
config = AutoConfig.from_pretrained(
(model_args.config_name if model_args.config_name
else model_args.model_name_or_path),
num_labels=num_labels,
id2label=label_map,
label2id={label: i for i, label in enumerate(labels)},
cache_dir=model_args.cache_dir,
)
tokenizer = AutoTokenizer.from_pretrained(
(model_args.tokenizer_name if model_args.tokenizer_name
else model_args.model_name_or_path),
cache_dir=model_args.cache_dir,
use_fast=model_args.use_fast,
)
model = AutoModelForTokenClassification.from_pretrained(
model_args.model_name_or_path,
from_tf=bool(".ckpt" in model_args.model_name_or_path),
config=config,
cache_dir=model_args.cache_dir,
)
# Get datasets
train_dataset = (
TokenClassificationDataSet(
data_dir=data_args.data_dir,
tokenizer=tokenizer,
labels=labels,
model_type=config.model_type,
max_seq_length=data_args.max_seq_length,
overwrite_cache=data_args.overwrite_cache,
mode=Split.train,
)
if training_args.do_train
else None
)
eval_dataset = (
TokenClassificationDataSet(
data_dir=data_args.data_dir,
tokenizer=tokenizer,
labels=labels,
model_type=config.model_type,
max_seq_length=data_args.max_seq_length,
overwrite_cache=data_args.overwrite_cache,
mode=Split.dev,
)
if training_args.do_eval
else None
)
def align_predictions(predictions: np.ndarray,
label_ids: np.ndarray) -> Tuple[List[int], List[int]]:
preds = np.argmax(predictions, axis=2)
batch_size, seq_len = preds.shape
out_label_list = [[] for _ in range(batch_size)]
preds_list = [[] for _ in range(batch_size)]
for i in range(batch_size):
for j in range(seq_len):
if label_ids[i, j] != nn.CrossEntropyLoss().ignore_index:
out_label_list[i].append(label_map[label_ids[i][j]])
preds_list[i].append(label_map[preds[i][j]])
return preds_list, out_label_list
def compute_metrics(p: EvalPrediction) -> Dict:
preds_list, out_label_list = align_predictions(p.predictions,
p.label_ids)
# If task type is NER, use seqeval metrics.
# Otherwise, use scikit learn
if model_args.task_type == "ner":
return {
"accuracy": seq_accuracy_score(out_label_list, preds_list),
"precision": seq_precision_score(out_label_list, preds_list),
"recall": seq_recall_score(out_label_list, preds_list),
"f1": seq_f1_score(out_label_list, preds_list),
}
else:
# Flatten the preds_list and out_label_list
preds_list = [p for sublist in preds_list for p in sublist]
out_label_list = [p for sublist in out_label_list for p in sublist]
return {
"accuracy": accuracy_score(out_label_list, preds_list),
"precision_micro": precision_score(out_label_list, preds_list,
average="micro"),
"recall_micro": recall_score(out_label_list, preds_list,
average="micro"),
"f1_micro": f1_score(out_label_list, preds_list,
average="micro"),
"precision_macro": precision_score(out_label_list, preds_list,
average="macro"),
"recall_macro": recall_score(out_label_list, preds_list,
average="macro"),
"f1_macro": f1_score(out_label_list, preds_list,
average="macro"),
}
# Initialize our Trainer
trainer = Trainer(
model=model,
args=training_args,
train_dataset=train_dataset,
eval_dataset=eval_dataset,
compute_metrics=compute_metrics,
)
# Training
if training_args.do_train:
trainer.train(
model_path=(model_args.model_name_or_path
if os.path.isdir(model_args.model_name_or_path)
else None)
)
trainer.save_model()
# For convenience, we also re-save the tokenizer to the same directory,
# so that you can share your model easily on huggingface.co/models =)
if trainer.is_world_master():
tokenizer.save_pretrained(training_args.output_dir)
# Evaluation
results = {}
if training_args.do_eval:
logger.info("*** Evaluate ***")
result = trainer.evaluate()
output_eval_file = os.path.join(training_args.output_dir,
"eval_results.txt")
if trainer.is_world_master():
with open(output_eval_file, "w") as writer:
logger.info("***** Eval results *****")
for key, value in result.items():
logger.info(" %s = %s", key, value)
writer.write("%s = %s\n" % (key, value))
results.update(result)
# Predict
if training_args.do_predict:
data_split = Split.test
if data_args.blind_test:
data_split = Split.blind_test
test_dataset = TokenClassificationDataSet(
data_dir=data_args.data_dir,
tokenizer=tokenizer,
labels=labels,
model_type=config.model_type,
max_seq_length=data_args.max_seq_length,
overwrite_cache=data_args.overwrite_cache,
mode=data_split,
)
predictions, label_ids, metrics = trainer.predict(test_dataset)
preds_list, _ = align_predictions(predictions, label_ids)
output_test_results_file = os.path.join(training_args.output_dir,
f"{data_split.value}_results.txt")
if trainer.is_world_master():
with open(output_test_results_file, "w") as writer:
for key, value in metrics.items():
logger.info(" %s = %s", key, value)
writer.write("%s = %s\n" % (key, value))
# Save predictions
output_test_predictions_file = os.path.join(training_args.output_dir,
f"{data_split.value}_predictions.txt")
if trainer.is_world_master():
with open(output_test_predictions_file, "w") as writer:
with open(os.path.join(data_args.data_dir, f"{data_split.value}.txt"), "r") as f:
example_id = 0
for line in f:
if (line.startswith("-DOCSTART-") or line == ""
or line == "\n"):
writer.write(line)
if not preds_list[example_id]:
example_id += 1
elif preds_list[example_id]:
output_line = (line.split()[0] + " " +
preds_list[example_id].pop(0) + "\n")
writer.write(output_line)
else:
logger.warning(
"Maximum sequence length exceeded: "
"No prediction for '%s'.", line.split()[0])
return results
if __name__ == "__main__":
main()
| 37.75 | 97 | 0.590428 |
import logging
import os
import sys
from dataclasses import dataclass, field
from typing import Dict, List, Optional, Tuple
import numpy as np
from seqeval.metrics import (
accuracy_score as seq_accuracy_score,
f1_score as seq_f1_score,
precision_score as seq_precision_score,
recall_score as seq_recall_score
)
from sklearn.metrics import (
accuracy_score,
f1_score,
precision_score,
recall_score
)
from torch import nn
from transformers import (
AutoConfig,
AutoModelForTokenClassification,
AutoTokenizer,
EvalPrediction,
HfArgumentParser,
Trainer,
TrainingArguments,
set_seed,
)
from utils import TokenClassificationDataSet, Split, get_labels
logger = logging.getLogger(__name__)
@dataclass
class ModelArguments:
model_name_or_path: str = field(
metadata={"help": "Path to pretrained model or model identifier from "
"huggingface.co/models"}
)
config_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained config name or path if "
"not the same as model_name"}
)
tokenizer_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained tokenizer name or path if "
"not the same as model_name"}
)
use_fast: bool = field(default=False, metadata={"help": "Set this flag to "
"use fast "
"tokenization."})
task_type: Optional[str] = field(
default="ner", metadata={"help": "the name of the task (ner or pos)"}
)
cache_dir: Optional[str] = field(
default=None, metadata={"help": "Where do you want to store the "
"pretrained models downloaded from s3"}
)
@dataclass
class DataTrainingArguments:
data_dir: str = field(
metadata={"help": "The input data dir. Should contain the .txt files "
"for a CoNLL-2003-formatted task."}
)
labels: Optional[str] = field(
default=None,
metadata={"help": "Path to a file containing all labels."},
)
max_seq_length: int = field(
default=128,
metadata={
"help": "The maximum total input sequence length after "
"tokenization. Sequences longer than this will be truncated, "
"sequences shorter will be padded."
},
)
overwrite_cache: bool = field(
default=False, metadata={"help": "Overwrite the cached training and "
"evaluation sets"}
)
blind_test: bool = field(
default=False, metadata={"help": "Use blind test set"}
)
def main():
parser = HfArgumentParser((ModelArguments,
DataTrainingArguments,
TrainingArguments))
if len(sys.argv) == 2 and sys.argv[1].endswith(".json"):
# json file, let's parse it to get our arguments.
model_args, data_args, training_args = parser.parse_json_file(
json_file=os.path.abspath(
sys.argv[1]))
else:
model_args, data_args, training_args = parser.parse_args_into_dataclasses()
if (
os.path.exists(training_args.output_dir)
and os.listdir(training_args.output_dir)
and training_args.do_train
and not training_args.overwrite_output_dir
):
raise ValueError(
f"Output directory ({training_args.output_dir}) already exists "
"and is not empty. Use --overwrite_output_dir to overcome."
)
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
level=(logging.INFO if training_args.local_rank in [-1, 0]
else logging.WARN),
)
logger.warning(
"Process rank: %s, device: %s, n_gpu: %s, distributed training: %s, "
"16-bits training: %s",
training_args.local_rank,
training_args.device,
training_args.n_gpu,
bool(training_args.local_rank != -1),
training_args.fp16,
)
logger.info("Training/evaluation parameters %s", training_args)
set_seed(training_args.seed)
labels = get_labels(data_args.labels)
label_map: Dict[int, str] = {i: label for i, label in enumerate(labels)}
num_labels = len(labels)
config = AutoConfig.from_pretrained(
(model_args.config_name if model_args.config_name
else model_args.model_name_or_path),
num_labels=num_labels,
id2label=label_map,
label2id={label: i for i, label in enumerate(labels)},
cache_dir=model_args.cache_dir,
)
tokenizer = AutoTokenizer.from_pretrained(
(model_args.tokenizer_name if model_args.tokenizer_name
else model_args.model_name_or_path),
cache_dir=model_args.cache_dir,
use_fast=model_args.use_fast,
)
model = AutoModelForTokenClassification.from_pretrained(
model_args.model_name_or_path,
from_tf=bool(".ckpt" in model_args.model_name_or_path),
config=config,
cache_dir=model_args.cache_dir,
)
train_dataset = (
TokenClassificationDataSet(
data_dir=data_args.data_dir,
tokenizer=tokenizer,
labels=labels,
model_type=config.model_type,
max_seq_length=data_args.max_seq_length,
overwrite_cache=data_args.overwrite_cache,
mode=Split.train,
)
if training_args.do_train
else None
)
eval_dataset = (
TokenClassificationDataSet(
data_dir=data_args.data_dir,
tokenizer=tokenizer,
labels=labels,
model_type=config.model_type,
max_seq_length=data_args.max_seq_length,
overwrite_cache=data_args.overwrite_cache,
mode=Split.dev,
)
if training_args.do_eval
else None
)
def align_predictions(predictions: np.ndarray,
label_ids: np.ndarray) -> Tuple[List[int], List[int]]:
preds = np.argmax(predictions, axis=2)
batch_size, seq_len = preds.shape
out_label_list = [[] for _ in range(batch_size)]
preds_list = [[] for _ in range(batch_size)]
for i in range(batch_size):
for j in range(seq_len):
if label_ids[i, j] != nn.CrossEntropyLoss().ignore_index:
out_label_list[i].append(label_map[label_ids[i][j]])
preds_list[i].append(label_map[preds[i][j]])
return preds_list, out_label_list
def compute_metrics(p: EvalPrediction) -> Dict:
preds_list, out_label_list = align_predictions(p.predictions,
p.label_ids)
if model_args.task_type == "ner":
return {
"accuracy": seq_accuracy_score(out_label_list, preds_list),
"precision": seq_precision_score(out_label_list, preds_list),
"recall": seq_recall_score(out_label_list, preds_list),
"f1": seq_f1_score(out_label_list, preds_list),
}
else:
preds_list = [p for sublist in preds_list for p in sublist]
out_label_list = [p for sublist in out_label_list for p in sublist]
return {
"accuracy": accuracy_score(out_label_list, preds_list),
"precision_micro": precision_score(out_label_list, preds_list,
average="micro"),
"recall_micro": recall_score(out_label_list, preds_list,
average="micro"),
"f1_micro": f1_score(out_label_list, preds_list,
average="micro"),
"precision_macro": precision_score(out_label_list, preds_list,
average="macro"),
"recall_macro": recall_score(out_label_list, preds_list,
average="macro"),
"f1_macro": f1_score(out_label_list, preds_list,
average="macro"),
}
trainer = Trainer(
model=model,
args=training_args,
train_dataset=train_dataset,
eval_dataset=eval_dataset,
compute_metrics=compute_metrics,
)
if training_args.do_train:
trainer.train(
model_path=(model_args.model_name_or_path
if os.path.isdir(model_args.model_name_or_path)
else None)
)
trainer.save_model()
if trainer.is_world_master():
tokenizer.save_pretrained(training_args.output_dir)
results = {}
if training_args.do_eval:
logger.info("*** Evaluate ***")
result = trainer.evaluate()
output_eval_file = os.path.join(training_args.output_dir,
"eval_results.txt")
if trainer.is_world_master():
with open(output_eval_file, "w") as writer:
logger.info("***** Eval results *****")
for key, value in result.items():
logger.info(" %s = %s", key, value)
writer.write("%s = %s\n" % (key, value))
results.update(result)
if training_args.do_predict:
data_split = Split.test
if data_args.blind_test:
data_split = Split.blind_test
test_dataset = TokenClassificationDataSet(
data_dir=data_args.data_dir,
tokenizer=tokenizer,
labels=labels,
model_type=config.model_type,
max_seq_length=data_args.max_seq_length,
overwrite_cache=data_args.overwrite_cache,
mode=data_split,
)
predictions, label_ids, metrics = trainer.predict(test_dataset)
preds_list, _ = align_predictions(predictions, label_ids)
output_test_results_file = os.path.join(training_args.output_dir,
f"{data_split.value}_results.txt")
if trainer.is_world_master():
with open(output_test_results_file, "w") as writer:
for key, value in metrics.items():
logger.info(" %s = %s", key, value)
writer.write("%s = %s\n" % (key, value))
output_test_predictions_file = os.path.join(training_args.output_dir,
f"{data_split.value}_predictions.txt")
if trainer.is_world_master():
with open(output_test_predictions_file, "w") as writer:
with open(os.path.join(data_args.data_dir, f"{data_split.value}.txt"), "r") as f:
example_id = 0
for line in f:
if (line.startswith("-DOCSTART-") or line == ""
or line == "\n"):
writer.write(line)
if not preds_list[example_id]:
example_id += 1
elif preds_list[example_id]:
output_line = (line.split()[0] + " " +
preds_list[example_id].pop(0) + "\n")
writer.write(output_line)
else:
logger.warning(
"Maximum sequence length exceeded: "
"No prediction for '%s'.", line.split()[0])
return results
if __name__ == "__main__":
main()
| true | true |
f7373644759d17c45cf3ae146fedf1c85f452b87 | 251 | py | Python | manage.py | octaflop/geovinci | 928917e23ee480b12f8b250ee58ba1a3701eee6a | [
"MIT"
] | 4 | 2015-08-06T01:20:17.000Z | 2019-09-10T22:36:25.000Z | manage.py | octaflop/geovinci | 928917e23ee480b12f8b250ee58ba1a3701eee6a | [
"MIT"
] | 4 | 2020-02-11T22:51:53.000Z | 2021-06-10T17:50:46.000Z | manage.py | octaflop/geovinci | 928917e23ee480b12f8b250ee58ba1a3701eee6a | [
"MIT"
] | 3 | 2015-08-06T01:22:04.000Z | 2017-12-11T16:28:11.000Z | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "geovinci.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 22.818182 | 72 | 0.772908 |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "geovinci.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| true | true |
f73737e1efa9ce37f0808baedac2ae44ce769e5c | 1,997 | py | Python | python/phonenumbers/data/region_MU.py | ageron/python-phonenumbers | ebfa84db04e4eefc67f33eed9ff2bb62203c9f11 | [
"Apache-2.0"
] | 2 | 2019-03-30T02:12:54.000Z | 2021-03-08T18:59:40.000Z | python/phonenumbers/data/region_MU.py | ageron/python-phonenumbers | ebfa84db04e4eefc67f33eed9ff2bb62203c9f11 | [
"Apache-2.0"
] | null | null | null | python/phonenumbers/data/region_MU.py | ageron/python-phonenumbers | ebfa84db04e4eefc67f33eed9ff2bb62203c9f11 | [
"Apache-2.0"
] | 1 | 2018-11-10T03:47:34.000Z | 2018-11-10T03:47:34.000Z | """Auto-generated file, do not edit by hand. MU metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_MU = PhoneMetadata(id='MU', country_code=230, international_prefix='0(?:0|[2-7]0|33)',
general_desc=PhoneNumberDesc(national_number_pattern='[2-9]\\d{6,7}', possible_number_pattern='\\d{7,8}'),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:2(?:[03478]\\d|1[0-7]|6[1-69])|4(?:[013568]\\d|2[4-7])|5(44\\d|471)|6\\d{2}|8(?:14|3[129]))\\d{4}', possible_number_pattern='\\d{7,8}', example_number='2012345'),
mobile=PhoneNumberDesc(national_number_pattern='5(?:2[59]\\d|4(?:2[1-389]|4\\d|7[1-9]|9\\d)|7\\d{2}|8(?:[26]\\d|7[15-8])|9[0-8]\\d)\\d{4}', possible_number_pattern='\\d{8}', example_number='52512345'),
toll_free=PhoneNumberDesc(national_number_pattern='80[012]\\d{4}', possible_number_pattern='\\d{7}', example_number='8001234'),
premium_rate=PhoneNumberDesc(national_number_pattern='30\\d{5}', possible_number_pattern='\\d{7}', example_number='3012345'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='3(?:20|9\\d)\\d{4}', possible_number_pattern='\\d{7}', example_number='3201234'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
preferred_international_prefix='020',
number_format=[NumberFormat(pattern='([2-46-9]\\d{2})(\\d{4})', format=u'\\1 \\2', leading_digits_pattern=['[2-46-9]']),
NumberFormat(pattern='(5\\d{3})(\\d{4})', format=u'\\1 \\2', leading_digits_pattern=['5'])])
| 99.85 | 221 | 0.723085 | from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_MU = PhoneMetadata(id='MU', country_code=230, international_prefix='0(?:0|[2-7]0|33)',
general_desc=PhoneNumberDesc(national_number_pattern='[2-9]\\d{6,7}', possible_number_pattern='\\d{7,8}'),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:2(?:[03478]\\d|1[0-7]|6[1-69])|4(?:[013568]\\d|2[4-7])|5(44\\d|471)|6\\d{2}|8(?:14|3[129]))\\d{4}', possible_number_pattern='\\d{7,8}', example_number='2012345'),
mobile=PhoneNumberDesc(national_number_pattern='5(?:2[59]\\d|4(?:2[1-389]|4\\d|7[1-9]|9\\d)|7\\d{2}|8(?:[26]\\d|7[15-8])|9[0-8]\\d)\\d{4}', possible_number_pattern='\\d{8}', example_number='52512345'),
toll_free=PhoneNumberDesc(national_number_pattern='80[012]\\d{4}', possible_number_pattern='\\d{7}', example_number='8001234'),
premium_rate=PhoneNumberDesc(national_number_pattern='30\\d{5}', possible_number_pattern='\\d{7}', example_number='3012345'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='3(?:20|9\\d)\\d{4}', possible_number_pattern='\\d{7}', example_number='3201234'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
preferred_international_prefix='020',
number_format=[NumberFormat(pattern='([2-46-9]\\d{2})(\\d{4})', format=u'\\1 \\2', leading_digits_pattern=['[2-46-9]']),
NumberFormat(pattern='(5\\d{3})(\\d{4})', format=u'\\1 \\2', leading_digits_pattern=['5'])])
| true | true |
f73739152499a56e209e5514a9685cce0e6593cc | 10,419 | py | Python | benchmarks/f3_wrong_hints_permutations/scaling_ltl_infinite_state/18-extending_bound_23.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 3 | 2021-04-23T23:29:26.000Z | 2022-03-23T10:00:30.000Z | benchmarks/f3_wrong_hints_permutations/scaling_ltl_infinite_state/18-extending_bound_23.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | null | null | null | benchmarks/f3_wrong_hints_permutations/scaling_ltl_infinite_state/18-extending_bound_23.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 1 | 2021-11-17T22:02:56.000Z | 2021-11-17T22:02:56.000Z | from typing import Tuple, FrozenSet
from collections import Iterable
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or
from mathsat import msat_make_leq, msat_make_equal
from mathsat import msat_make_number, msat_make_plus
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def check_ltl(menv: msat_env, enc: LTLEncoder) -> Tuple[Iterable, msat_term,
msat_term, msat_term]:
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
bool_type = msat_get_bool_type(menv)
real_type = msat_get_rational_type(menv)
i = msat_declare_function(menv, "i", real_type)
i = msat_make_constant(menv, i)
r = msat_declare_function(menv, "r", real_type)
r = msat_make_constant(menv, r)
l = msat_declare_function(menv, "l", real_type)
l = msat_make_constant(menv, l)
inc_i = msat_declare_function(menv, "inc_i", bool_type)
inc_i = msat_make_constant(menv, inc_i)
x_i = msat_declare_function(menv, name_next("i"), real_type)
x_i = msat_make_constant(menv, x_i)
x_r = msat_declare_function(menv, name_next("r"), real_type)
x_r = msat_make_constant(menv, x_r)
x_l = msat_declare_function(menv, name_next("l"), real_type)
x_l = msat_make_constant(menv, x_l)
x_inc_i = msat_declare_function(menv, name_next("inc_i"), bool_type)
x_inc_i = msat_make_constant(menv, x_inc_i)
curr2next = {i: x_i, r: x_r, l: x_l, inc_i: x_inc_i}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
r_gt_0 = msat_make_gt(menv, r, zero)
r_lt_l = msat_make_lt(menv, r, l)
i_geq_0 = msat_make_geq(menv, i, zero)
init = msat_make_and(menv, r_gt_0, r_lt_l)
init = msat_make_and(menv, init,
msat_make_and(menv, i_geq_0,
msat_make_not(menv, inc_i)))
init = msat_make_and(menv, init, msat_make_gt(menv, l, zero))
# r' = r
trans = msat_make_equal(menv, x_r, r)
# i < l -> ((inc_i' & i' = i + 1) | (!inc_i' & i' = i)) & l' = l
i_lt_l = msat_make_lt(menv, i, l)
x_i_eq_i_p_1 = msat_make_and(menv, x_inc_i,
msat_make_equal(menv, x_i,
msat_make_plus(menv, i, one)))
x_i_eq_i = msat_make_and(menv, msat_make_not(menv, x_inc_i),
msat_make_equal(menv, x_i, i))
x_i_eq_i_p_1_or_i = msat_make_or(menv, x_i_eq_i_p_1, x_i_eq_i)
x_l_eq_l = msat_make_equal(menv, x_l, l)
x_i_eq_i_p_1_or_i_and_x_l_eq_l = msat_make_and(menv, x_i_eq_i_p_1_or_i,
x_l_eq_l)
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_lt_l,
x_i_eq_i_p_1_or_i_and_x_l_eq_l))
# i >= l -> i' = 0 & l' = l + 1 & !inc_i'
i_geq_l = msat_make_geq(menv, i, l)
x_i_eq_0 = msat_make_equal(menv, x_i, zero)
x_l_eq_l_p_1 = msat_make_equal(menv, x_l, msat_make_plus(menv, l, one))
x_i_eq_0_and_x_l_eq_l_p_1 = msat_make_and(menv,
msat_make_and(menv, x_i_eq_0,
x_l_eq_l_p_1),
msat_make_not(menv, x_inc_i))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_geq_l,
x_i_eq_0_and_x_l_eq_l_p_1))
# (G F inc_i) -> ! G F r > i
G_F_x_i_gt_i = enc.make_G(enc.make_F(inc_i))
r_gt_i = msat_make_gt(menv, r, i)
n_G_F_r_gt_i = msat_make_not(menv, enc.make_G(enc.make_F(r_gt_i)))
ltl = msat_make_impl(menv, G_F_x_i_gt_i, n_G_F_r_gt_i)
return TermMap(curr2next), init, trans, ltl
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
i = mgr.Symbol("i", types.REAL)
r = mgr.Symbol("r", types.REAL)
l = mgr.Symbol("l", types.REAL)
inc_i = mgr.Symbol("inc_i", types.BOOL)
symbs = frozenset([i, r, l, inc_i])
x_i = symb_to_next(mgr, i)
x_r = symb_to_next(mgr, r)
x_l = symb_to_next(mgr, l)
x_inc_i = symb_to_next(mgr, inc_i)
res = []
n0 = mgr.Real(0)
n1 = mgr.Real(1)
loc0 = Location(env, mgr.GE(r, n0), mgr.GE(i, n0),
stutterT=mgr.Equals(x_r, mgr.Plus(r, i)))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r3", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1])
res.append(h_r)
loc = Location(env, mgr.Not(inc_i))
loc.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc1", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc])
res.append(h_inc)
loc0 = Location(env, mgr.GE(l, n0))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l2", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1])
res.append(h_l)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i, stutterT=x_inc_i)
loc1.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc3", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1])
res.append(h_inc)
loc0 = Location(env, mgr.GE(r, n0))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(2, mgr.Equals(x_r, mgr.Plus(r, n1)))
loc2 = Location(env, mgr.GE(r, n0))
loc2.set_progress(0, mgr.Equals(x_r, r))
h_r = Hint("h_r4", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1, loc2])
res.append(h_r)
stutter = mgr.Equals(x_i, i)
loc = Location(env, mgr.LE(i, n0), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_i, mgr.Minus(i, n1)))
h_i = Hint("h_i1", env, frozenset([i]), symbs)
h_i.set_locs([loc])
res.append(h_i)
loc0 = Location(env, mgr.GE(i, n0), mgr.GE(l, n0),
stutterT=mgr.Equals(x_i, mgr.Plus(i, l)))
loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1)))
loc1 = Location(env, mgr.GE(i, n0))
loc1.set_progress(0, mgr.Equals(x_i, i))
h_i = Hint("h_i3", env, frozenset([i]), symbs)
h_i.set_locs([loc0, loc1])
res.append(h_i)
stutter = mgr.Equals(x_i, i)
loc = Location(env, mgr.GE(i, n0), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_i, mgr.Plus(i, n1)))
h_i = Hint("h_i0", env, frozenset([i]), symbs)
h_i.set_locs([loc])
res.append(h_i)
loc = Location(env, inc_i)
loc.set_progress(0, x_inc_i)
h_inc = Hint("h_inc0", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc])
res.append(h_inc)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i, stutterT=x_inc_i)
loc1.set_progress(2, mgr.Not(x_inc_i))
loc2 = Location(env, mgr.Not(inc_i))
loc2.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc4", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1, loc2])
res.append(h_inc)
loc = Location(env, mgr.LE(l, n0))
loc.set_progress(0, mgr.Equals(x_l, mgr.Minus(l, n1)))
h_l = Hint("h_l1", env, frozenset([l]), symbs)
h_l.set_locs([loc])
res.append(h_l)
loc0 = Location(env, mgr.GE(l, n0), mgr.GE(r, n0),
stutterT=mgr.Equals(x_l, mgr.Plus(l, r)))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l3", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1])
res.append(h_l)
loc0 = Location(env, mgr.GE(l, n0))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(2, mgr.Equals(x_l, l))
loc2 = Location(env, mgr.GE(l, n0))
loc2.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l4", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1, loc2])
res.append(h_l)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i)
loc1.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc2", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1])
res.append(h_inc)
loc0 = Location(env, mgr.GE(i, n0))
loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1)))
loc1 = Location(env, mgr.GE(i, n0))
loc1.set_progress(0, mgr.Equals(x_i, i))
h_i = Hint("h_i2", env, frozenset([i]), symbs)
h_i.set_locs([loc0, loc1])
res.append(h_i)
loc = Location(env, mgr.GE(r, n0))
loc.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r0", env, frozenset([r]), symbs)
h_r.set_locs([loc])
res.append(h_r)
loc = Location(env, mgr.GE(l, n0))
loc.set_progress(0, mgr.Equals(x_l, mgr.Plus(l, n1)))
h_l = Hint("h_l0", env, frozenset([l]), symbs)
h_l.set_locs([loc])
res.append(h_l)
loc0 = Location(env, mgr.GE(i, n0))
loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1)))
loc1 = Location(env, mgr.GE(i, n0))
loc1.set_progress(2, mgr.Equals(x_i, i))
loc2 = Location(env, mgr.GE(i, n0))
loc2.set_progress(0, mgr.Equals(x_i, i))
h_i = Hint("h_i4", env, frozenset([i]), symbs)
h_i.set_locs([loc0, loc1, loc2])
res.append(h_i)
return frozenset(res)
| 35.318644 | 89 | 0.624628 | from typing import Tuple, FrozenSet
from collections import Iterable
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or
from mathsat import msat_make_leq, msat_make_equal
from mathsat import msat_make_number, msat_make_plus
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def check_ltl(menv: msat_env, enc: LTLEncoder) -> Tuple[Iterable, msat_term,
msat_term, msat_term]:
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
bool_type = msat_get_bool_type(menv)
real_type = msat_get_rational_type(menv)
i = msat_declare_function(menv, "i", real_type)
i = msat_make_constant(menv, i)
r = msat_declare_function(menv, "r", real_type)
r = msat_make_constant(menv, r)
l = msat_declare_function(menv, "l", real_type)
l = msat_make_constant(menv, l)
inc_i = msat_declare_function(menv, "inc_i", bool_type)
inc_i = msat_make_constant(menv, inc_i)
x_i = msat_declare_function(menv, name_next("i"), real_type)
x_i = msat_make_constant(menv, x_i)
x_r = msat_declare_function(menv, name_next("r"), real_type)
x_r = msat_make_constant(menv, x_r)
x_l = msat_declare_function(menv, name_next("l"), real_type)
x_l = msat_make_constant(menv, x_l)
x_inc_i = msat_declare_function(menv, name_next("inc_i"), bool_type)
x_inc_i = msat_make_constant(menv, x_inc_i)
curr2next = {i: x_i, r: x_r, l: x_l, inc_i: x_inc_i}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
r_gt_0 = msat_make_gt(menv, r, zero)
r_lt_l = msat_make_lt(menv, r, l)
i_geq_0 = msat_make_geq(menv, i, zero)
init = msat_make_and(menv, r_gt_0, r_lt_l)
init = msat_make_and(menv, init,
msat_make_and(menv, i_geq_0,
msat_make_not(menv, inc_i)))
init = msat_make_and(menv, init, msat_make_gt(menv, l, zero))
trans = msat_make_equal(menv, x_r, r)
# i < l -> ((inc_i' & i' = i + 1) | (!inc_i' & i' = i)) & l' = l
i_lt_l = msat_make_lt(menv, i, l)
x_i_eq_i_p_1 = msat_make_and(menv, x_inc_i,
msat_make_equal(menv, x_i,
msat_make_plus(menv, i, one)))
x_i_eq_i = msat_make_and(menv, msat_make_not(menv, x_inc_i),
msat_make_equal(menv, x_i, i))
x_i_eq_i_p_1_or_i = msat_make_or(menv, x_i_eq_i_p_1, x_i_eq_i)
x_l_eq_l = msat_make_equal(menv, x_l, l)
x_i_eq_i_p_1_or_i_and_x_l_eq_l = msat_make_and(menv, x_i_eq_i_p_1_or_i,
x_l_eq_l)
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_lt_l,
x_i_eq_i_p_1_or_i_and_x_l_eq_l))
i_geq_l = msat_make_geq(menv, i, l)
x_i_eq_0 = msat_make_equal(menv, x_i, zero)
x_l_eq_l_p_1 = msat_make_equal(menv, x_l, msat_make_plus(menv, l, one))
x_i_eq_0_and_x_l_eq_l_p_1 = msat_make_and(menv,
msat_make_and(menv, x_i_eq_0,
x_l_eq_l_p_1),
msat_make_not(menv, x_inc_i))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, i_geq_l,
x_i_eq_0_and_x_l_eq_l_p_1))
# (G F inc_i) -> ! G F r > i
G_F_x_i_gt_i = enc.make_G(enc.make_F(inc_i))
r_gt_i = msat_make_gt(menv, r, i)
n_G_F_r_gt_i = msat_make_not(menv, enc.make_G(enc.make_F(r_gt_i)))
ltl = msat_make_impl(menv, G_F_x_i_gt_i, n_G_F_r_gt_i)
return TermMap(curr2next), init, trans, ltl
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
i = mgr.Symbol("i", types.REAL)
r = mgr.Symbol("r", types.REAL)
l = mgr.Symbol("l", types.REAL)
inc_i = mgr.Symbol("inc_i", types.BOOL)
symbs = frozenset([i, r, l, inc_i])
x_i = symb_to_next(mgr, i)
x_r = symb_to_next(mgr, r)
x_l = symb_to_next(mgr, l)
x_inc_i = symb_to_next(mgr, inc_i)
res = []
n0 = mgr.Real(0)
n1 = mgr.Real(1)
loc0 = Location(env, mgr.GE(r, n0), mgr.GE(i, n0),
stutterT=mgr.Equals(x_r, mgr.Plus(r, i)))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r3", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1])
res.append(h_r)
loc = Location(env, mgr.Not(inc_i))
loc.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc1", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc])
res.append(h_inc)
loc0 = Location(env, mgr.GE(l, n0))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l2", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1])
res.append(h_l)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i, stutterT=x_inc_i)
loc1.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc3", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1])
res.append(h_inc)
loc0 = Location(env, mgr.GE(r, n0))
loc0.set_progress(1, mgr.Equals(x_r, r))
loc1 = Location(env, mgr.GE(r, n0))
loc1.set_progress(2, mgr.Equals(x_r, mgr.Plus(r, n1)))
loc2 = Location(env, mgr.GE(r, n0))
loc2.set_progress(0, mgr.Equals(x_r, r))
h_r = Hint("h_r4", env, frozenset([r]), symbs)
h_r.set_locs([loc0, loc1, loc2])
res.append(h_r)
stutter = mgr.Equals(x_i, i)
loc = Location(env, mgr.LE(i, n0), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_i, mgr.Minus(i, n1)))
h_i = Hint("h_i1", env, frozenset([i]), symbs)
h_i.set_locs([loc])
res.append(h_i)
loc0 = Location(env, mgr.GE(i, n0), mgr.GE(l, n0),
stutterT=mgr.Equals(x_i, mgr.Plus(i, l)))
loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1)))
loc1 = Location(env, mgr.GE(i, n0))
loc1.set_progress(0, mgr.Equals(x_i, i))
h_i = Hint("h_i3", env, frozenset([i]), symbs)
h_i.set_locs([loc0, loc1])
res.append(h_i)
stutter = mgr.Equals(x_i, i)
loc = Location(env, mgr.GE(i, n0), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_i, mgr.Plus(i, n1)))
h_i = Hint("h_i0", env, frozenset([i]), symbs)
h_i.set_locs([loc])
res.append(h_i)
loc = Location(env, inc_i)
loc.set_progress(0, x_inc_i)
h_inc = Hint("h_inc0", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc])
res.append(h_inc)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i, stutterT=x_inc_i)
loc1.set_progress(2, mgr.Not(x_inc_i))
loc2 = Location(env, mgr.Not(inc_i))
loc2.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc4", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1, loc2])
res.append(h_inc)
loc = Location(env, mgr.LE(l, n0))
loc.set_progress(0, mgr.Equals(x_l, mgr.Minus(l, n1)))
h_l = Hint("h_l1", env, frozenset([l]), symbs)
h_l.set_locs([loc])
res.append(h_l)
loc0 = Location(env, mgr.GE(l, n0), mgr.GE(r, n0),
stutterT=mgr.Equals(x_l, mgr.Plus(l, r)))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l3", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1])
res.append(h_l)
loc0 = Location(env, mgr.GE(l, n0))
loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1)))
loc1 = Location(env, mgr.GE(l, n0))
loc1.set_progress(2, mgr.Equals(x_l, l))
loc2 = Location(env, mgr.GE(l, n0))
loc2.set_progress(0, mgr.Equals(x_l, l))
h_l = Hint("h_l4", env, frozenset([l]), symbs)
h_l.set_locs([loc0, loc1, loc2])
res.append(h_l)
loc0 = Location(env, mgr.Not(inc_i))
loc0.set_progress(1, x_inc_i)
loc1 = Location(env, inc_i)
loc1.set_progress(0, mgr.Not(x_inc_i))
h_inc = Hint("h_inc2", env, frozenset([inc_i]), symbs)
h_inc.set_locs([loc0, loc1])
res.append(h_inc)
loc0 = Location(env, mgr.GE(i, n0))
loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1)))
loc1 = Location(env, mgr.GE(i, n0))
loc1.set_progress(0, mgr.Equals(x_i, i))
h_i = Hint("h_i2", env, frozenset([i]), symbs)
h_i.set_locs([loc0, loc1])
res.append(h_i)
loc = Location(env, mgr.GE(r, n0))
loc.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1)))
h_r = Hint("h_r0", env, frozenset([r]), symbs)
h_r.set_locs([loc])
res.append(h_r)
loc = Location(env, mgr.GE(l, n0))
loc.set_progress(0, mgr.Equals(x_l, mgr.Plus(l, n1)))
h_l = Hint("h_l0", env, frozenset([l]), symbs)
h_l.set_locs([loc])
res.append(h_l)
loc0 = Location(env, mgr.GE(i, n0))
loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1)))
loc1 = Location(env, mgr.GE(i, n0))
loc1.set_progress(2, mgr.Equals(x_i, i))
loc2 = Location(env, mgr.GE(i, n0))
loc2.set_progress(0, mgr.Equals(x_i, i))
h_i = Hint("h_i4", env, frozenset([i]), symbs)
h_i.set_locs([loc0, loc1, loc2])
res.append(h_i)
return frozenset(res)
| true | true |
f737396bc186d22fcdc75c3467ac9bfc7555f47b | 300 | py | Python | blogcookiecutter/conftest.py | saaddine/blogcookiecutt | 95fbf4e8d1bdfb74abda344d883a4db3c2c88108 | [
"MIT"
] | null | null | null | blogcookiecutter/conftest.py | saaddine/blogcookiecutt | 95fbf4e8d1bdfb74abda344d883a4db3c2c88108 | [
"MIT"
] | 4 | 2022-02-02T05:26:44.000Z | 2022-03-02T05:35:46.000Z | blogcookiecutter/conftest.py | saaddine/blogcookiecutt | 95fbf4e8d1bdfb74abda344d883a4db3c2c88108 | [
"MIT"
] | null | null | null | import pytest
#from blogcookiecutter.users.models import User
#from blogcookiecutter.users.tests.factories import UserFactory
@pytest.fixture(autouse=True)
def media_storage(settings, tmpdir):
settings.MEDIA_ROOT = tmpdir.strpath
@pytest.fixture
def user() -> User:
return UserFactory()
| 20 | 63 | 0.783333 | import pytest
@pytest.fixture(autouse=True)
def media_storage(settings, tmpdir):
settings.MEDIA_ROOT = tmpdir.strpath
@pytest.fixture
def user() -> User:
return UserFactory()
| true | true |
f7373b3ae985e25e1e77bf7a2341a5af803f701e | 1,947 | py | Python | artificial_intelligence/qsvm_kernel_multiclass.py | chunfuchen/qiskit-acqua-tutorials | 74b0bcaac1678fc6c0de5be13e99d7ecd11b3075 | [
"Apache-2.0"
] | null | null | null | artificial_intelligence/qsvm_kernel_multiclass.py | chunfuchen/qiskit-acqua-tutorials | 74b0bcaac1678fc6c0de5be13e99d7ecd11b3075 | [
"Apache-2.0"
] | null | null | null | artificial_intelligence/qsvm_kernel_multiclass.py | chunfuchen/qiskit-acqua-tutorials | 74b0bcaac1678fc6c0de5be13e99d7ecd11b3075 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2018 IBM.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from datasets import *
from qiskit_aqua.utils import split_dataset_to_data_and_labels
from qiskit_aqua.input import get_input_instance
from qiskit_aqua import run_algorithm
import numpy as np
n = 2 # dimension of each data point
sample_Total, training_input, test_input, class_labels = Wine(training_size=40,
test_size=10, n=n, PLOT_DATA=False)
temp = [test_input[k] for k in test_input]
total_array = np.concatenate(temp)
params = {
'problem': {'name': 'svm_classification', 'random_seed': 10598},
'algorithm': {
'name': 'QSVM.Kernel',
},
'backend': {'name': 'qasm_simulator', 'shots': 1024},
# 'multiclass_extension': {'name': 'OneAgainstRest'},
'multiclass_extension': {'name': 'AllPairs'},
# 'multiclass_extension': {'name': 'ErrorCorrectingCode', 'code_size': 5},
'feature_map': {'name': 'SecondOrderExpansion', 'depth': 2, 'entangler_map': {0: [1]}}
}
algo_input = get_input_instance('SVMInput')
algo_input.training_dataset = training_input
algo_input.test_dataset = test_input
algo_input.datapoints = total_array
result = run_algorithm(params, algo_input)
print(result)
| 38.176471 | 104 | 0.648177 |
from datasets import *
from qiskit_aqua.utils import split_dataset_to_data_and_labels
from qiskit_aqua.input import get_input_instance
from qiskit_aqua import run_algorithm
import numpy as np
n = 2
sample_Total, training_input, test_input, class_labels = Wine(training_size=40,
test_size=10, n=n, PLOT_DATA=False)
temp = [test_input[k] for k in test_input]
total_array = np.concatenate(temp)
params = {
'problem': {'name': 'svm_classification', 'random_seed': 10598},
'algorithm': {
'name': 'QSVM.Kernel',
},
'backend': {'name': 'qasm_simulator', 'shots': 1024},
'multiclass_extension': {'name': 'AllPairs'},
'feature_map': {'name': 'SecondOrderExpansion', 'depth': 2, 'entangler_map': {0: [1]}}
}
algo_input = get_input_instance('SVMInput')
algo_input.training_dataset = training_input
algo_input.test_dataset = test_input
algo_input.datapoints = total_array
result = run_algorithm(params, algo_input)
print(result)
| true | true |
f7373bf86a03f8bad6b826d01fd916279dff7f92 | 731 | py | Python | core/etl/loader/administrativedomain.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
] | 84 | 2017-10-22T11:01:39.000Z | 2022-02-27T03:43:48.000Z | core/etl/loader/administrativedomain.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
] | 22 | 2017-12-11T07:21:56.000Z | 2021-09-23T02:53:50.000Z | core/etl/loader/administrativedomain.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
] | 23 | 2017-12-06T06:59:52.000Z | 2022-02-24T00:02:25.000Z | # ----------------------------------------------------------------------
# Administrative Domain loader
# ----------------------------------------------------------------------
# Copyright (C) 2007-2015 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# NOC modules
from .base import BaseLoader
from ..models.administrativedomain import AdministrativeDomain
from noc.sa.models.administrativedomain import AdministrativeDomain as AdministrativeDomainModel
class AdminitstrativeDomainLoader(BaseLoader):
"""
Administrative Domain loader
"""
name = "administrativedomain"
model = AdministrativeDomainModel
data_model = AdministrativeDomain
| 33.227273 | 96 | 0.556772 |
from .base import BaseLoader
from ..models.administrativedomain import AdministrativeDomain
from noc.sa.models.administrativedomain import AdministrativeDomain as AdministrativeDomainModel
class AdminitstrativeDomainLoader(BaseLoader):
name = "administrativedomain"
model = AdministrativeDomainModel
data_model = AdministrativeDomain
| true | true |
f7373c5b1e66a26a8bb8d95c6e73d3dc64396078 | 11,973 | py | Python | scripts/parse_weather.py | danielbee/PracticalIntroDataSci | feecd7d1b18ba44fb3ea59d7709c2ff493c0c79f | [
"Apache-2.0"
] | null | null | null | scripts/parse_weather.py | danielbee/PracticalIntroDataSci | feecd7d1b18ba44fb3ea59d7709c2ff493c0c79f | [
"Apache-2.0"
] | null | null | null | scripts/parse_weather.py | danielbee/PracticalIntroDataSci | feecd7d1b18ba44fb3ea59d7709c2ff493c0c79f | [
"Apache-2.0"
] | null | null | null | # The purpose of this script is to collect all the station data into a single data structure.
# This will require regular expressions to find things like station changes.
#the hope is that we can simply export this single data structure to a single file is whatever format we want.
# Need to figure out how to deal with 'null' values.
import re
import pandas as pd
def main():
dataPath = '../data/weather/'
dataStationPath = dataPath+'stations/'
with open(dataPath+'stations.txt') as f:
stations = f.read().splitlines()
bigFrame = []
stationDataRaw = {}
for station in stations:
print(station)
stationDataRaw[station]= open(dataStationPath+station+'.txt').read().splitlines()
stationFrame = getDataFrame(stationDataRaw[station])
# Extract things like height above sea level, longitude and latitude and site changes.
stationFrame = getDataExtras(stationDataRaw[station],stationFrame)
# add a column for the station
stationFrame['station'] = station
# Make station column the most signifiant index in the multiIndex
stationFrame.set_index(['station', stationFrame.index],inplace=True)
# Append to list of dataframes
bigFrame.append(stationFrame)
# Combine all the dataframes
stationsData = pd.concat(bigFrame)
# print(stationsData.reset_index().dtypes)
# Print out in desired formats
stationsData.to_excel(dataPath+'stationData.xlsx')
stationsData.to_csv(dataPath+'stationData.csv')
stationsData.to_string(dataPath+'stationData.txt')
# bit of an assumption
tableStart = re.compile('\s{3}yyyy')
reWord = re.compile('\w+')
reNum = re.compile('[0-9.]+')
def getDataFrame(raw):
for ln,line in enumerate(raw):
if re.search(tableStart,line):
tableStartLine = ln
# stop going through lines
break
table = raw[tableStartLine:]
# remove empty string lines
table = list(filter(None, table))
headers= table[0].split()
#print(headers)
prevEnd = 0
units = {}
headerCols = [re.search(header,table[0]) for header in headers]
for colI,col in enumerate(headerCols):
units[headers[colI]] = reWord.findall(table[1],prevEnd,col.end())
prevEnd = col.end()
records = []
for row in table[2:]:
prevEnd = 0
record = {}
for colI,col in enumerate(headerCols):
res= reNum.findall(row,prevEnd,col.end())
record[headers[colI]] = res[0] if res else None
prevEnd = col.end()
if record['yyyy'] != None:
records.append(record)
df = pd.DataFrame.from_dict(records)
df[['yyyy','mm']] = df[['yyyy','mm']].astype(int)
# other columns
df[['tmax','tmin','af','rain','sun']] = df[['tmax','tmin','af','rain','sun']].astype(float)
df.set_index(['yyyy', 'mm'],inplace=True)
#print(df)
return df
import math
def getDataExtras(raw,df):
topRaw = '\n'.join(raw[0:20])
gridRef = re.findall(r'\d+E \d+N',topRaw)
asml=[]
latlon=[]
lowerYr=[]
upperYrMonth=[]
upperYr=[]
## Extract Features
for line in raw[0:20]:
if re.search(gridRef[0],line):
print(line)
if len(gridRef) > 1 :
yearSearch = re.search(r'([1-2][7-9,0][0-9]{2})?\s+(\bfrom\b|\bafter\b|\bto\b|\buntil\b)\s+([a-zA-Z]*)\s*([1-2][7-9,0][0-9]{2})',line)
#print(yearSearch)
if yearSearch:
lowerYr.append(yearSearch.group(1))
upperYrMonth.append(yearSearch.group(3))
upperYr.append(yearSearch.group(4))
print('from {} to {} {}'.format(lowerYr[0],upperYrMonth[0],upperYr[0]))
asml.append(re.search(r'(\d+)\s*m\w*\samsl',line).group(1))
latlonSearch = re.search(r'lat\s*(-*\d+\.\d+) lon\s*(-*\d+\.\d+)',str.lower(line))
if latlonSearch:
latlon.append((latlonSearch.group(1),latlonSearch.group(2)))
else:
#print("No long lat!!")
latlon.append(getLatLong(gridRef[0]))
if len(gridRef) > 1 :
# we have site change
if re.search(gridRef[1],line):
print(line)
yearSearch = re.search(r'([1-2][7-9,0][0-9]{2})?\s+(\bfrom\b|\bafter\b|\bto\b)\s+([a-zA-Z]*)\s*([1-2][7-9,0][0-9]{2})',line)
#print(yearSearch)
if yearSearch:
lowerYr.append(yearSearch.group(1))
upperYrMonth.append(yearSearch.group(3))
upperYr.append(yearSearch.group(4))
print('from {} to {} {}'.format(lowerYr[-1],upperYrMonth[-1],upperYr[-1]))
asml.append(re.search(r'(\d+)\s*m\w*\samsl',line).group(1))
latlonSearch = re.search(r'lat\s*(-*\d+\.\d+) lon\s*(-*\d+\.\d+)',str.lower(line))
if latlonSearch:
latlon.append((latlonSearch.group(1),latlonSearch.group(2)))
else:
#print("No long lat!!")
latlon.append(getLatLong(gridRef[0]))
#print('asml:{}\nlatlon:{}'.format(asml,latlon))
## Add features to dataframe
# This is wrong, but i just want to get data in there and start classify.
# Tehcnically, we should determine site changes , which may have a significant impact on frost days if asml gets higher.
extra_df = setExtrasInDf(df,
df_filter= df.index.get_level_values('yyyy') > 0,
asml=asml[0], lat=latlon[0][0],long=latlon[0][1],gridRef=gridRef[0]
)
with open('dfL.txt','a') as f:
print(extra_df.to_string(), file=f)
return extra_df
if len(gridRef) >1:
# Need to apply features using extracted years.
#print(df.dtypes)
tempTypeDf = df.reset_index()
#tempTypeDf[['yyyy','mm']] = tempTypeDf[['yyyy','mm']].astype(int)
#tempTypeDf[['tmax','tmin','af','rain','sun']] = tempTypeDf[['tmax','tmin','af','rain','sun']].astype(float)
#defensive
if len(lowerYr) >0 and len(upperYr) >0:
# We were able to find SOMETHING we can use.
print('lower: {} \t upper: {} \t month {}'.format(lowerYr,upperYr,upperYrMonth))
#if upperYr[0] > lowerYr[1]:
# print('issue')
if len(lowerYr) == 1:
# super simple
#if upperYrMonth[0]:
#
# tempTypeDf = setExtrasInDf(tempTypeDf,
# df_filter= tempTypeDf['yyyy']<int(upperYr[0]) or (tempTypeDf['yyyy']==int(upperYr[0]) and tempTypeDf['mm']<int(upperYrMonth[0])),
# asml=asml[0], lat=latlon[0][0],long=latlon[0][1],gridRef=gridRef[0]
# )
# tempTypeDf = setExtrasInDf(tempTypeDf,
# df_filter= tempTypeDf['yyyy']>=int(upperYr[0]) or (tempTypeDf['yyyy']==int(upperYr[0]) and tempTypeDf['mm']>=int(upperYrMonth[0])),
# asml=asml[1], lat=latlon[1][0],long=latlon[1][1],gridRef=gridRef[1]
# )
#else:
tempTypeDf = setExtrasInDf(tempTypeDf,
df_filter= tempTypeDf['yyyy']<int(upperYr[0]),
asml=asml[0], lat=latlon[0][0],long=latlon[0][1],gridRef=gridRef[0]
)
tempTypeDf = setExtrasInDf(tempTypeDf,
df_filter=tempTypeDf['yyyy']>=int(upperYr[0]),
asml=asml[1], lat=latlon[1][0],long=latlon[1][1],gridRef=gridRef[1]
)
#if lowerYr[0] and upperYr[0]:
# tempTypeDf = setExtrasInDf(tempTypeDf,
# df_filter= tempTypeDf['yyyy']>=int(lowerYr[0]) and tempTypeDf['yyyy']<int(upperYr[0]),
# asml=asml[0], lat=latlon[0][0],long=latlon[0][1],gridRef=gridRef[0]
# )
# tempTypeDf = setExtrasInDf(tempTypeDf,
# df_filter= tempTypeDf['yyyy']>=int(upperYr[0]),
# asml=asml[1], lat=latlon[1][0],long=latlon[1][1],gridRef=gridRef[1]
# )
#elif upperYr[0] and lowerYr[0] == None:
#if lowerYr[0] == None and lowerYr[1] == None:
# if upperYr[0] and upperYr[1]:
# # Nice simple case
# if upperYr[0] == upperYr[1]:
# tempTypeDf = setExtrasInDf(tempTypeDf,
# df_filter= tempTypeDf['yyyy']<int(upperYr[1]),
# asml=asml[0], lat=latlon[0][0],long=latlon[0][1],gridRef=gridRef[0]
# )
# tempTypeDf = setExtrasInDf(tempTypeDf,
# df_filter= tempTypeDf['yyyy']>=int(upperYr[1]),
# asml=asml[0], lat=latlon[0][0],long=latlon[0][1],gridRef=gridRef[0]
# )
## TODO:
#if upperYrMonth[0] and upperYrMonth[1] :
#
#elif upperYrMonth[0] and upperYrMonth[1] == None:
#elif upperYrMonth[1]:
#else:
else :
print('unable to aquire site change year. Will dump other grid refs of {} and keep only {}.'.format(gridRef[1:],gridRef[0]))
if len(upperYr) >0 :
#tempTypeDf = setExtrasInDf(
# tempTypeDf,
# df_filter= tempTypeDf['yyyy']<int(upperYr[-1]),
# asml=asml[0], lat=latlon[0][0],long=latlon[0][1],gridRef=gridRef[0])
#tempTypeDf = setExtrasInDf(
# tempTypeDf,
# df_filter= tempTypeDf['yyyy']>=int(upperYr[-1]),
# asml=asml[-1], lat=latlon[-1][0],long=latlon[-1][1],gridRef=gridRef[-1])
#tempTypeDf.loc[tempTypeDf['yyyy']<int(upperYr[-1]),'asml'] = int(asml[0])
#tempTypeDf.loc[tempTypeDf['yyyy']<int(upperYr[-1]),'Lat'] = float(latlon[0][0])
#tempTypeDf.loc[tempTypeDf['yyyy']<int(upperYr[-1]),'Long'] = float(latlon[0][1])
#tempTypeDf.loc[tempTypeDf['yyyy']>=int(upperYr[-1]),'asml'] = int(asml[-1])
#tempTypeDf.loc[tempTypeDf['yyyy']>=int(upperYr[-1]),'Lat'] = float(latlon[-1][0])
#tempTypeDf.loc[tempTypeDf['yyyy']>=int(upperYr[-1]),'Long'] = float(latlon[-1][1])
#print(len(tempTypeDf.reset_index()['yyyy']))
#print(len([int(x) for x in tempTypeDf.index.get_level_values('yyyy').values if (math.isnan(float(x)) == False)]))
with open('df.txt','a') as f:
print(tempTypeDf.to_string(), file=f)
# print(tempTypeDf.reset_index().dropna(subset=['yyyy']).to_string(), file=f)
#with open('df_before.txt','w') as f:
# print(tempTypeDf.reset_index().to_string(), file=f)
#.loc[:(upperYr[-1],),:])
#print([int(x) for x in tempTypeDf.index.get_level_values('yyyy').values if (math.isnan(float(x)) == False and x == upperYr[-1])])
#print([int(x) for x in tempTypeDf.index.get_level_values('yyyy').values if (math.isnan(float(x)) == False and x != upperYr[-1])])
def setExtrasInDf(df, df_filter, asml, lat, long, gridRef):
df.loc[df_filter,'asml'] = int(asml)
df.loc[df_filter,'lat'] = float(lat)
df.loc[df_filter,'long'] = float(long)
df.loc[df_filter,'gridRef'] = str(gridRef)
return df
def getLatLong(gridRef):
import requests
page = requests.get('http://www.nearby.org.uk/coord.cgi?p='+gridRef+'&f=conv')
#print(page.text)
pageSearch = re.search(r'Decimal: <B>(-*\d+\.\d+) (-*\d+\.\d+)</B>',page.text)
return (pageSearch.group(1),pageSearch.group(2))
main() | 48.278226 | 155 | 0.542805 |
import re
import pandas as pd
def main():
dataPath = '../data/weather/'
dataStationPath = dataPath+'stations/'
with open(dataPath+'stations.txt') as f:
stations = f.read().splitlines()
bigFrame = []
stationDataRaw = {}
for station in stations:
print(station)
stationDataRaw[station]= open(dataStationPath+station+'.txt').read().splitlines()
stationFrame = getDataFrame(stationDataRaw[station])
stationFrame = getDataExtras(stationDataRaw[station],stationFrame)
stationFrame['station'] = station
stationFrame.set_index(['station', stationFrame.index],inplace=True)
bigFrame.append(stationFrame)
stationsData = pd.concat(bigFrame)
stationsData.to_excel(dataPath+'stationData.xlsx')
stationsData.to_csv(dataPath+'stationData.csv')
stationsData.to_string(dataPath+'stationData.txt')
tableStart = re.compile('\s{3}yyyy')
reWord = re.compile('\w+')
reNum = re.compile('[0-9.]+')
def getDataFrame(raw):
for ln,line in enumerate(raw):
if re.search(tableStart,line):
tableStartLine = ln
break
table = raw[tableStartLine:]
table = list(filter(None, table))
headers= table[0].split()
prevEnd = 0
units = {}
headerCols = [re.search(header,table[0]) for header in headers]
for colI,col in enumerate(headerCols):
units[headers[colI]] = reWord.findall(table[1],prevEnd,col.end())
prevEnd = col.end()
records = []
for row in table[2:]:
prevEnd = 0
record = {}
for colI,col in enumerate(headerCols):
res= reNum.findall(row,prevEnd,col.end())
record[headers[colI]] = res[0] if res else None
prevEnd = col.end()
if record['yyyy'] != None:
records.append(record)
df = pd.DataFrame.from_dict(records)
df[['yyyy','mm']] = df[['yyyy','mm']].astype(int)
df[['tmax','tmin','af','rain','sun']] = df[['tmax','tmin','af','rain','sun']].astype(float)
df.set_index(['yyyy', 'mm'],inplace=True)
return df
import math
def getDataExtras(raw,df):
topRaw = '\n'.join(raw[0:20])
gridRef = re.findall(r'\d+E \d+N',topRaw)
asml=[]
latlon=[]
lowerYr=[]
upperYrMonth=[]
upperYr=[]
aw[0:20]:
if re.search(gridRef[0],line):
print(line)
if len(gridRef) > 1 :
yearSearch = re.search(r'([1-2][7-9,0][0-9]{2})?\s+(\bfrom\b|\bafter\b|\bto\b|\buntil\b)\s+([a-zA-Z]*)\s*([1-2][7-9,0][0-9]{2})',line)
if yearSearch:
lowerYr.append(yearSearch.group(1))
upperYrMonth.append(yearSearch.group(3))
upperYr.append(yearSearch.group(4))
print('from {} to {} {}'.format(lowerYr[0],upperYrMonth[0],upperYr[0]))
asml.append(re.search(r'(\d+)\s*m\w*\samsl',line).group(1))
latlonSearch = re.search(r'lat\s*(-*\d+\.\d+) lon\s*(-*\d+\.\d+)',str.lower(line))
if latlonSearch:
latlon.append((latlonSearch.group(1),latlonSearch.group(2)))
else:
latlon.append(getLatLong(gridRef[0]))
if len(gridRef) > 1 :
if re.search(gridRef[1],line):
print(line)
yearSearch = re.search(r'([1-2][7-9,0][0-9]{2})?\s+(\bfrom\b|\bafter\b|\bto\b)\s+([a-zA-Z]*)\s*([1-2][7-9,0][0-9]{2})',line)
if yearSearch:
lowerYr.append(yearSearch.group(1))
upperYrMonth.append(yearSearch.group(3))
upperYr.append(yearSearch.group(4))
print('from {} to {} {}'.format(lowerYr[-1],upperYrMonth[-1],upperYr[-1]))
asml.append(re.search(r'(\d+)\s*m\w*\samsl',line).group(1))
latlonSearch = re.search(r'lat\s*(-*\d+\.\d+) lon\s*(-*\d+\.\d+)',str.lower(line))
if latlonSearch:
latlon.append((latlonSearch.group(1),latlonSearch.group(2)))
else:
latlon.append(getLatLong(gridRef[0]))
setExtrasInDf(df,
df_filter= df.index.get_level_values('yyyy') > 0,
asml=asml[0], lat=latlon[0][0],long=latlon[0][1],gridRef=gridRef[0]
)
with open('dfL.txt','a') as f:
print(extra_df.to_string(), file=f)
return extra_df
if len(gridRef) >1:
tempTypeDf = df.reset_index()
if len(lowerYr) >0 and len(upperYr) >0:
print('lower: {} \t upper: {} \t month {}'.format(lowerYr,upperYr,upperYrMonth))
if len(lowerYr) == 1:
tempTypeDf = setExtrasInDf(tempTypeDf,
df_filter= tempTypeDf['yyyy']<int(upperYr[0]),
asml=asml[0], lat=latlon[0][0],long=latlon[0][1],gridRef=gridRef[0]
)
tempTypeDf = setExtrasInDf(tempTypeDf,
df_filter=tempTypeDf['yyyy']>=int(upperYr[0]),
asml=asml[1], lat=latlon[1][0],long=latlon[1][1],gridRef=gridRef[1]
)
else :
print('unable to aquire site change year. Will dump other grid refs of {} and keep only {}.'.format(gridRef[1:],gridRef[0]))
if len(upperYr) >0 :
with open('df.txt','a') as f:
print(tempTypeDf.to_string(), file=f)
def setExtrasInDf(df, df_filter, asml, lat, long, gridRef):
df.loc[df_filter,'asml'] = int(asml)
df.loc[df_filter,'lat'] = float(lat)
df.loc[df_filter,'long'] = float(long)
df.loc[df_filter,'gridRef'] = str(gridRef)
return df
def getLatLong(gridRef):
import requests
page = requests.get('http://www.nearby.org.uk/coord.cgi?p='+gridRef+'&f=conv')
pageSearch = re.search(r'Decimal: <B>(-*\d+\.\d+) (-*\d+\.\d+)</B>',page.text)
return (pageSearch.group(1),pageSearch.group(2))
main() | true | true |
f7373e258f6d142eea5a5fc642e8f1ef4d215a05 | 483 | py | Python | ethgreen/wallet/cc_wallet/cc_info.py | ethgreen/ethgreen-blockchain | 8f1a450897ab7a82326aea7e57e18ac2c03a9e83 | [
"Apache-2.0"
] | 11 | 2021-11-10T19:30:12.000Z | 2022-02-09T04:30:29.000Z | ethgreen/wallet/cc_wallet/cc_info.py | ethgreen/ethgreen-blockchain | 8f1a450897ab7a82326aea7e57e18ac2c03a9e83 | [
"Apache-2.0"
] | 6 | 2021-11-16T17:11:03.000Z | 2021-12-28T17:11:20.000Z | ethgreen/wallet/cc_wallet/cc_info.py | ethgreen/ethgreen-blockchain | 8f1a450897ab7a82326aea7e57e18ac2c03a9e83 | [
"Apache-2.0"
] | 3 | 2021-11-21T02:27:10.000Z | 2022-03-15T08:34:47.000Z | from dataclasses import dataclass
from typing import List, Optional, Tuple
from ethgreen.types.blockchain_format.program import Program
from ethgreen.types.blockchain_format.sized_bytes import bytes32
from ethgreen.util.streamable import Streamable, streamable
@dataclass(frozen=True)
@streamable
class CCInfo(Streamable):
my_genesis_checker: Optional[Program] # this is the program
lineage_proofs: List[Tuple[bytes32, Optional[Program]]] # {coin.name(): lineage_proof}
| 34.5 | 91 | 0.811594 | from dataclasses import dataclass
from typing import List, Optional, Tuple
from ethgreen.types.blockchain_format.program import Program
from ethgreen.types.blockchain_format.sized_bytes import bytes32
from ethgreen.util.streamable import Streamable, streamable
@dataclass(frozen=True)
@streamable
class CCInfo(Streamable):
my_genesis_checker: Optional[Program]
lineage_proofs: List[Tuple[bytes32, Optional[Program]]]
| true | true |
f7373ec8bfcd49ac886a396c63a0eefc6d457cd9 | 3,295 | py | Python | system_hotkey/util.py | timeyyy/system_wide_hotkeys | 37073bb04bdf78f8d3af909a9da3d3c30c7b8854 | [
"BSD-3-Clause"
] | 35 | 2016-01-12T13:58:54.000Z | 2022-03-21T05:39:55.000Z | system_hotkey/util.py | timeyyy/system_wide_hotkeys | 37073bb04bdf78f8d3af909a9da3d3c30c7b8854 | [
"BSD-3-Clause"
] | 23 | 2016-02-26T17:32:15.000Z | 2022-03-09T04:01:44.000Z | system_hotkey/util.py | timeyyy/system_wide_hotkeys | 37073bb04bdf78f8d3af909a9da3d3c30c7b8854 | [
"BSD-3-Clause"
] | 20 | 2015-08-30T12:56:01.000Z | 2022-01-22T05:42:41.000Z | '''
system_hotkey.util
general utilites..
'''
import _thread as thread
import threading
from queue import Queue
import queue
from functools import wraps
import time
def unique_int(values):
'''
returns the first lowest integer
that is not in the sequence passed in
if a list looks like 3,6
of the first call will return 1, and then 2
and then 4 etc
'''
last = 0
for num in values:
if last not in values:
break
else:
last += 1
return last
class ExceptionSerializer():
def __init__(self):
self.queue = queue.Queue()
def catch_and_raise(self, func, timeout=0.5):
'''
wait for a function to finish and raise any errors'''
self.wait_event(func, timeout)
self._check_for_errors(func)
def mark_done(self, function):
'''Wrap functions so that we can monitor when they are done'''
self.init_wrap(function)
@wraps(function)
def decorator(*args, **kwargs):
# Function has started running
self.clear_event(function)
try:
results = function(*args, **kwargs)
except Exception as err:
self.queue.put(err)
else:
return results
finally:
# Function has finished running
self.set_event(function)
return decorator
def put(self, x):
self.queue.put(x)
def init_wrap(self, func):
name = self._make_event_name(func)
event = threading.Event()
setattr(self, name, event)
def _check_for_errors(self, func):
try:
error = self.queue.get(block=False)
except queue.Empty:
pass
else:
raise error
def _make_event_name(self, func):
return '_event_' + func.__name__
def get_event(self, func):
return getattr(self, self._make_event_name(func))
def set_event(self, func):
self.get_event(func).set()
def clear_event(self, func):
self.get_event(func).clear()
def wait_event(self, func, *args):
self.get_event(func).wait(*args)
class CallSerializer():
def __init__(self):
self.queue = Queue()
thread.start_new_thread(self.call_functions, (),)
self.bug_catcher = ExceptionSerializer()
def call_functions(self):
while 1:
func, args, kwargs = self.queue.get(block=True)
func(*args, **kwargs)
def serialize_call(self, timeout=0.5):
'''
a call to a function decorated will not have
overlapping calls, i.e thread safe
'''
def state(function):
@wraps(function)
def decorator(*args, **kwargs):
# Function will let us know when it is done running
# This is done so we can catch exceptions raised
# in functions that are run within threads
mark_func = self.bug_catcher.mark_done(function)
self.queue.put((mark_func, args, kwargs))
# wait for the function to finish and raise errors
self.bug_catcher.catch_and_raise(function, timeout)
return decorator
return state
| 27.008197 | 70 | 0.584825 | import _thread as thread
import threading
from queue import Queue
import queue
from functools import wraps
import time
def unique_int(values):
last = 0
for num in values:
if last not in values:
break
else:
last += 1
return last
class ExceptionSerializer():
def __init__(self):
self.queue = queue.Queue()
def catch_and_raise(self, func, timeout=0.5):
self.wait_event(func, timeout)
self._check_for_errors(func)
def mark_done(self, function):
self.init_wrap(function)
@wraps(function)
def decorator(*args, **kwargs):
self.clear_event(function)
try:
results = function(*args, **kwargs)
except Exception as err:
self.queue.put(err)
else:
return results
finally:
self.set_event(function)
return decorator
def put(self, x):
self.queue.put(x)
def init_wrap(self, func):
name = self._make_event_name(func)
event = threading.Event()
setattr(self, name, event)
def _check_for_errors(self, func):
try:
error = self.queue.get(block=False)
except queue.Empty:
pass
else:
raise error
def _make_event_name(self, func):
return '_event_' + func.__name__
def get_event(self, func):
return getattr(self, self._make_event_name(func))
def set_event(self, func):
self.get_event(func).set()
def clear_event(self, func):
self.get_event(func).clear()
def wait_event(self, func, *args):
self.get_event(func).wait(*args)
class CallSerializer():
def __init__(self):
self.queue = Queue()
thread.start_new_thread(self.call_functions, (),)
self.bug_catcher = ExceptionSerializer()
def call_functions(self):
while 1:
func, args, kwargs = self.queue.get(block=True)
func(*args, **kwargs)
def serialize_call(self, timeout=0.5):
def state(function):
@wraps(function)
def decorator(*args, **kwargs):
mark_func = self.bug_catcher.mark_done(function)
self.queue.put((mark_func, args, kwargs))
self.bug_catcher.catch_and_raise(function, timeout)
return decorator
return state
| true | true |
f7373fcb933e047ac9a8d19a7111d3bc61c11d23 | 7,346 | py | Python | data/dataloader.py | 51N84D/Virtual-Try-On | 3b3d4f6066885446e2a6eadb6c2668237e62e03b | [
"MIT"
] | 6 | 2020-11-19T06:00:47.000Z | 2022-01-07T13:44:11.000Z | data/dataloader.py | 51N84D/Virtual-Try-On | 3b3d4f6066885446e2a6eadb6c2668237e62e03b | [
"MIT"
] | 5 | 2020-09-13T03:22:38.000Z | 2022-02-21T09:01:14.000Z | data/dataloader.py | 51N84D/Virtual-Try-On | 3b3d4f6066885446e2a6eadb6c2668237e62e03b | [
"MIT"
] | 4 | 2020-07-21T09:13:48.000Z | 2020-11-19T06:02:20.000Z | # coding=utf-8
import torch
import torch.utils.data as data
import torchvision.transforms as transforms
from torch.utils.data import DataLoader, Dataset
from PIL import Image
from PIL import ImageDraw
from addict import Dict
import os.path as osp
import numpy as np
import argparse
import matplotlib.pyplot as plt
import sys
import cv2
import json
class CPDataset(data.Dataset):
def __init__(self, opt):
super(CPDataset, self).__init__()
# base setting
self.opt = opt
self.dataroot = opt.data.files.base
if opt.model.is_train:
self.datamode = "train"
self.data_list = opt.data.files.train
else:
self.datamode = "test"
self.data_list = opt.data.files.test
print(self.data_list)
self.fine_height = opt.data.transforms.height
self.fine_width = opt.data.transforms.width
self.radius = opt.data.transforms.radius
self.data_path = osp.join(self.dataroot, self.datamode)
self.transform = transforms.Compose(
[transforms.ToTensor(), transforms.Normalize((0.5,), (0.5,))]
)
# load data list
im_names = []
c_names = []
with open(osp.join(self.dataroot, self.data_list), "r") as f:
print(f)
for line in f.readlines():
im_name, c_name = line.strip().split()
im_names.append(im_name)
c_names.append(c_name)
self.im_names = im_names
self.c_names = c_names
def name(self):
return "CPDataset"
def __getitem__(self, index):
c_name = self.c_names[index]
im_name = self.im_names[index]
# cloth image & cloth mask
c = Image.open(osp.join(self.data_path, "cloth", c_name))
#c.show()
cm = Image.open(osp.join(self.data_path, "cloth-mask", c_name))
c = self.transform(c) # [-1,1]
cm_array = np.array(cm)
cm_array = (cm_array >= 128).astype(np.float32)
cm = torch.from_numpy(cm_array) # [0,1]
cm.unsqueeze_(0)
# person image
im = Image.open(osp.join(self.data_path, "image", im_name))
im = self.transform(im) # [-1,1]
# load parsing image
parse_name = im_name.replace(".jpg", ".png")
im_parse = Image.open(osp.join(self.data_path, "image-parse", parse_name))
parse_array = np.array(im_parse)
# -------Find segmentation class labels manually
#Image1 = Image.open(osp.join(self.data_path, 'image-parse', parse_name))
Image2 = Image.open(osp.join(self.data_path, "image", im_name))
#plt.imshow(Image1)
#plt.imshow(parse_array, alpha=0.5)
#plt.imshow(Image2)
#plt.colorbar()
#plt.show()
# shirt = 126, pants = 59
# hair = 76, face = 29
# ------End
parse_shape = (parse_array > 0).astype(np.float32)
parse_cloth = (parse_array == 126).astype(np.float32)
# get cropped top img
source = Image.open(osp.join(self.data_path, "image", im_name))
mask = Image.fromarray(np.uint8(255 * parse_cloth)).convert("L")
blankImg = Image.new("RGB", (self.fine_height, self.fine_width), (255, 255, 255))
imgCropped = Image.composite(source, blankImg, mask)
#imgCropped.show()
#mask.show()
imgCropped = self.transform(imgCropped) # [-1,1]
# shape downsample
parse_shape = Image.fromarray((parse_shape * 255).astype(np.uint8))
parse_shape = parse_shape.resize(
(self.fine_width // 16, self.fine_height // 16), Image.BILINEAR
)
parse_shape = parse_shape.resize((self.fine_width, self.fine_height), Image.BILINEAR)
shape = self.transform(parse_shape) # [-1,1]
pcm = torch.from_numpy(parse_cloth) # [0,1]
#plt.imshow(pcm)
#plt.show()
# clean up
im_c = im * pcm + (1 - pcm) # [-1,1], fill 1 for other parts
pcm = pcm.unsqueeze_(0)
#-----pose
pose_name = im_name.replace('.jpg', '_keypoints.json')
with open(osp.join(self.data_path, 'pose', pose_name), 'r') as f:
pose_label = json.load(f)
pose_data = pose_label['people'][0]['pose_keypoints']
pose_data = np.array(pose_data)
pose_data = pose_data.reshape((-1,3))
point_num = pose_data.shape[0]
pose_map = torch.zeros(point_num, self.fine_height, self.fine_width)
r = self.radius
im_pose = Image.new('L', (self.fine_width, self.fine_height))
pose_draw = ImageDraw.Draw(im_pose)
for i in range(point_num):
one_map = Image.new('L', (self.fine_width, self.fine_height))
draw = ImageDraw.Draw(one_map)
pointx = pose_data[i,0]
pointy = pose_data[i,1]
if pointx > 1 and pointy > 1:
draw.ellipse((pointx-r, pointy-r, pointx+r, pointy+r), 'white', 'white')
pose_draw.ellipse((pointx-r, pointy-r, pointx+r, pointy+r), 'white', 'white')
#plt.imshow(one_map, cmap='jet', alpha=.9)
#plt.show()
one_map = self.transform(one_map) #[-1,1]
pose_map[i] = one_map[0]
#plt.imshow(im_pose, cmap='jet', alpha=0.5)
#plt.show()
#for i in range(18):
# show_ = np.squeeze(pose_map[i])
# plt.imshow(Image2)
# plt.imshow(show_, cmap="jet", alpha=.5)
# plt.show()
#just for visualization
im_pose = self.transform(im_pose)
result = {
"c_name": c_name, # for visualization
"im_name": im_name, # for visualization or ground truth
"pose_image": im_pose, #visualize pose, can overlay with image for better visualization
"pose": pose_map, #for input
"cloth": c, # for input
"cloth_mask": cm, # for input
"image": imgCropped, # for visualization
"parse_cloth": pcm, # was im_c # for ground truth
"shape": shape, # for visualization
}
return Dict(result)
def __len__(self):
return len(self.im_names)
class CPDataLoader(object):
def __init__(self, opt, dataset):
super(CPDataLoader, self).__init__()
if opt.data.loaders.shuffle:
train_sampler = torch.utils.data.sampler.RandomSampler(dataset)
else:
train_sampler = None
self.data_loader = torch.utils.data.DataLoader(
dataset,
batch_size=opt.data.loaders.batch_size,
shuffle=(train_sampler is None),
num_workers=opt.data.loaders.num_workers,
pin_memory=True,
sampler=train_sampler,
)
self.dataset = dataset
self.data_iter = self.data_loader.__iter__()
def next_batch(self):
try:
batch = self.data_iter.__next__()
except StopIteration:
self.data_iter = self.data_loader.__iter__()
batch = self.data_iter.__next__()
return batch
def get_loader(opts):
return DataLoader(
CPDataset(opts),
batch_size=opts.data.loaders.get("batch_size", 4),
shuffle=True,
num_workers=opts.data.loaders.get("num_workers", 8),
)
| 32.941704 | 99 | 0.58508 |
import torch
import torch.utils.data as data
import torchvision.transforms as transforms
from torch.utils.data import DataLoader, Dataset
from PIL import Image
from PIL import ImageDraw
from addict import Dict
import os.path as osp
import numpy as np
import argparse
import matplotlib.pyplot as plt
import sys
import cv2
import json
class CPDataset(data.Dataset):
def __init__(self, opt):
super(CPDataset, self).__init__()
self.opt = opt
self.dataroot = opt.data.files.base
if opt.model.is_train:
self.datamode = "train"
self.data_list = opt.data.files.train
else:
self.datamode = "test"
self.data_list = opt.data.files.test
print(self.data_list)
self.fine_height = opt.data.transforms.height
self.fine_width = opt.data.transforms.width
self.radius = opt.data.transforms.radius
self.data_path = osp.join(self.dataroot, self.datamode)
self.transform = transforms.Compose(
[transforms.ToTensor(), transforms.Normalize((0.5,), (0.5,))]
)
im_names = []
c_names = []
with open(osp.join(self.dataroot, self.data_list), "r") as f:
print(f)
for line in f.readlines():
im_name, c_name = line.strip().split()
im_names.append(im_name)
c_names.append(c_name)
self.im_names = im_names
self.c_names = c_names
def name(self):
return "CPDataset"
def __getitem__(self, index):
c_name = self.c_names[index]
im_name = self.im_names[index]
c = Image.open(osp.join(self.data_path, "cloth", c_name))
cm = Image.open(osp.join(self.data_path, "cloth-mask", c_name))
c = self.transform(c)
cm_array = np.array(cm)
cm_array = (cm_array >= 128).astype(np.float32)
cm = torch.from_numpy(cm_array)
cm.unsqueeze_(0)
im = Image.open(osp.join(self.data_path, "image", im_name))
im = self.transform(im)
parse_name = im_name.replace(".jpg", ".png")
im_parse = Image.open(osp.join(self.data_path, "image-parse", parse_name))
parse_array = np.array(im_parse)
Image2 = Image.open(osp.join(self.data_path, "image", im_name))
parse_shape = (parse_array > 0).astype(np.float32)
parse_cloth = (parse_array == 126).astype(np.float32)
source = Image.open(osp.join(self.data_path, "image", im_name))
mask = Image.fromarray(np.uint8(255 * parse_cloth)).convert("L")
blankImg = Image.new("RGB", (self.fine_height, self.fine_width), (255, 255, 255))
imgCropped = Image.composite(source, blankImg, mask)
imgCropped = self.transform(imgCropped)
parse_shape = Image.fromarray((parse_shape * 255).astype(np.uint8))
parse_shape = parse_shape.resize(
(self.fine_width // 16, self.fine_height // 16), Image.BILINEAR
)
parse_shape = parse_shape.resize((self.fine_width, self.fine_height), Image.BILINEAR)
shape = self.transform(parse_shape)
pcm = torch.from_numpy(parse_cloth)
im_c = im * pcm + (1 - pcm)
pcm = pcm.unsqueeze_(0)
pose_name = im_name.replace('.jpg', '_keypoints.json')
with open(osp.join(self.data_path, 'pose', pose_name), 'r') as f:
pose_label = json.load(f)
pose_data = pose_label['people'][0]['pose_keypoints']
pose_data = np.array(pose_data)
pose_data = pose_data.reshape((-1,3))
point_num = pose_data.shape[0]
pose_map = torch.zeros(point_num, self.fine_height, self.fine_width)
r = self.radius
im_pose = Image.new('L', (self.fine_width, self.fine_height))
pose_draw = ImageDraw.Draw(im_pose)
for i in range(point_num):
one_map = Image.new('L', (self.fine_width, self.fine_height))
draw = ImageDraw.Draw(one_map)
pointx = pose_data[i,0]
pointy = pose_data[i,1]
if pointx > 1 and pointy > 1:
draw.ellipse((pointx-r, pointy-r, pointx+r, pointy+r), 'white', 'white')
pose_draw.ellipse((pointx-r, pointy-r, pointx+r, pointy+r), 'white', 'white')
one_map = self.transform(one_map)
pose_map[i] = one_map[0]
im_pose = self.transform(im_pose)
result = {
"c_name": c_name,
"im_name": im_name,
"pose_image": im_pose,
"pose": pose_map,
"cloth": c,
"cloth_mask": cm,
"image": imgCropped,
"parse_cloth": pcm, e": shape,
}
return Dict(result)
def __len__(self):
return len(self.im_names)
class CPDataLoader(object):
def __init__(self, opt, dataset):
super(CPDataLoader, self).__init__()
if opt.data.loaders.shuffle:
train_sampler = torch.utils.data.sampler.RandomSampler(dataset)
else:
train_sampler = None
self.data_loader = torch.utils.data.DataLoader(
dataset,
batch_size=opt.data.loaders.batch_size,
shuffle=(train_sampler is None),
num_workers=opt.data.loaders.num_workers,
pin_memory=True,
sampler=train_sampler,
)
self.dataset = dataset
self.data_iter = self.data_loader.__iter__()
def next_batch(self):
try:
batch = self.data_iter.__next__()
except StopIteration:
self.data_iter = self.data_loader.__iter__()
batch = self.data_iter.__next__()
return batch
def get_loader(opts):
return DataLoader(
CPDataset(opts),
batch_size=opts.data.loaders.get("batch_size", 4),
shuffle=True,
num_workers=opts.data.loaders.get("num_workers", 8),
)
| true | true |
f7373ffef1fa0c2e51d262561fbc288db2280ed0 | 145 | py | Python | tests/test_api.py | gargii/python-skytools | 8b75647e03809f3ceb98cd614e868393f3262e6a | [
"ISC"
] | 7 | 2019-11-17T01:59:41.000Z | 2021-04-23T15:01:03.000Z | tests/test_api.py | gargii/python-skytools | 8b75647e03809f3ceb98cd614e868393f3262e6a | [
"ISC"
] | 2 | 2019-09-15T16:38:02.000Z | 2022-02-07T07:35:28.000Z | tests/test_api.py | gargii/python-skytools | 8b75647e03809f3ceb98cd614e868393f3262e6a | [
"ISC"
] | 4 | 2017-11-18T16:49:26.000Z | 2021-11-15T15:19:33.000Z |
import skytools
def test_version():
a = skytools.natsort_key(skytools.__version__)
b = skytools.natsort_key('3.3')
assert a >= b
| 14.5 | 50 | 0.675862 |
import skytools
def test_version():
a = skytools.natsort_key(skytools.__version__)
b = skytools.natsort_key('3.3')
assert a >= b
| true | true |
f7374068d77ef31e22dfe3771bbaa614973ffe29 | 14,203 | py | Python | tests/unit/test_expression_tree/test_operations/test_jac.py | pmohtat/PyBaMM | 8f0a6d82e26c19f5735ed81b55671574af29eb16 | [
"BSD-3-Clause"
] | null | null | null | tests/unit/test_expression_tree/test_operations/test_jac.py | pmohtat/PyBaMM | 8f0a6d82e26c19f5735ed81b55671574af29eb16 | [
"BSD-3-Clause"
] | null | null | null | tests/unit/test_expression_tree/test_operations/test_jac.py | pmohtat/PyBaMM | 8f0a6d82e26c19f5735ed81b55671574af29eb16 | [
"BSD-3-Clause"
] | null | null | null | #
# Tests for the jacobian methods
#
import pybamm
import numpy as np
import unittest
from scipy.sparse import eye
from tests import get_mesh_for_testing
def test_multi_var_function(arg1, arg2):
return arg1 + arg2
class TestJacobian(unittest.TestCase):
def test_variable_is_statevector(self):
a = pybamm.Symbol("a")
with self.assertRaisesRegex(
TypeError, "Jacobian can only be taken with respect to a 'StateVector'"
):
a.jac(a)
def test_linear(self):
y = pybamm.StateVector(slice(0, 4))
u = pybamm.StateVector(slice(0, 2))
v = pybamm.StateVector(slice(2, 4))
y0 = np.ones(4)
func = u
jacobian = np.array([[1, 0, 0, 0], [0, 1, 0, 0]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = -v
jacobian = np.array([[0, 0, -1, 0], [0, 0, 0, -1]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = 3 * u + 4 * v
jacobian = np.array([[3, 0, 4, 0], [0, 3, 0, 4]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = 7 * u - v * 9
jacobian = np.array([[7, 0, -9, 0], [0, 7, 0, -9]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
A = pybamm.Matrix(2 * eye(2))
func = A @ u
jacobian = np.array([[2, 0, 0, 0], [0, 2, 0, 0]])
dfunc_dy = func.jac(y).simplify().evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = u @ pybamm.StateVector(slice(0, 1))
with self.assertRaises(NotImplementedError):
func.jac(y)
# when differentiating by independent part of the state vector
jacobian = np.array([[0, 0], [0, 0]])
du_dv = u.jac(v).evaluate().toarray()
np.testing.assert_array_equal(du_dv, jacobian)
def test_nonlinear(self):
y = pybamm.StateVector(slice(0, 4))
u = pybamm.StateVector(slice(0, 2))
v = pybamm.StateVector(slice(2, 4))
y0 = np.array([1, 2, 3, 4])
func = v ** 2
jacobian = np.array([[0, 0, 6, 0], [0, 0, 0, 8]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = 2 ** v
jacobian = np.array(
[[0, 0, 2 ** 3 * np.log(2), 0], [0, 0, 0, 2 ** 4 * np.log(2)]]
)
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = v ** v
jacobian = [[0, 0, 27 * (1 + np.log(3)), 0], [0, 0, 0, 256 * (1 + np.log(4))]]
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_almost_equal(jacobian, dfunc_dy.toarray())
func = u * v
jacobian = np.array([[3, 0, 1, 0], [0, 4, 0, 2]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = u * (u + v)
jacobian = np.array([[5, 0, 1, 0], [0, 8, 0, 2]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = 1 / u + v / 3
jacobian = np.array([[-1, 0, 1 / 3, 0], [0, -1 / 4, 0, 1 / 3]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = u / v
jacobian = np.array([[1 / 3, 0, -1 / 9, 0], [0, 1 / 4, 0, -1 / 8]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = v / (1 + v)
jacobian = np.array([[0, 0, 1 / 16, 0], [0, 0, 0, 1 / 25]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
def test_multislice_raises(self):
y1 = pybamm.StateVector(slice(0, 4), slice(7, 8))
y_dot1 = pybamm.StateVectorDot(slice(0, 4), slice(7, 8))
y2 = pybamm.StateVector(slice(4, 7))
with self.assertRaises(NotImplementedError):
y1.jac(y1)
with self.assertRaises(NotImplementedError):
y2.jac(y1)
with self.assertRaises(NotImplementedError):
y_dot1.jac(y1)
def test_linear_ydot(self):
y = pybamm.StateVector(slice(0, 4))
y_dot = pybamm.StateVectorDot(slice(0, 4))
u = pybamm.StateVector(slice(0, 2))
v = pybamm.StateVector(slice(2, 4))
u_dot = pybamm.StateVectorDot(slice(0, 2))
v_dot = pybamm.StateVectorDot(slice(2, 4))
y0 = np.ones(4)
y_dot0 = np.ones(4)
func = u_dot
jacobian = np.array([[1, 0, 0, 0], [0, 1, 0, 0]])
dfunc_dy = func.jac(y_dot).evaluate(y=y0, y_dot=y_dot0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = -v_dot
jacobian = np.array([[0, 0, -1, 0], [0, 0, 0, -1]])
dfunc_dy = func.jac(y_dot).evaluate(y=y0, y_dot=y_dot0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = u_dot
jacobian = np.array([[0, 0, 0, 0], [0, 0, 0, 0]])
dfunc_dy = func.jac(y).evaluate(y=y0, y_dot=y_dot0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = -v_dot
jacobian = np.array([[0, 0, 0, 0], [0, 0, 0, 0]])
dfunc_dy = func.jac(y).evaluate(y=y0, y_dot=y_dot0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = u
jacobian = np.array([[0, 0, 0, 0], [0, 0, 0, 0]])
dfunc_dy = func.jac(y_dot).evaluate(y=y0, y_dot=y_dot0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = -v
jacobian = np.array([[0, 0, 0, 0], [0, 0, 0, 0]])
dfunc_dy = func.jac(y_dot).evaluate(y=y0, y_dot=y_dot0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
def test_functions(self):
y = pybamm.StateVector(slice(0, 4))
u = pybamm.StateVector(slice(0, 2))
v = pybamm.StateVector(slice(2, 4))
const = pybamm.Scalar(1)
y0 = np.array([1.0, 2.0, 3.0, 4.0])
func = pybamm.sin(u)
jacobian = np.array([[np.cos(1), 0, 0, 0], [0, np.cos(2), 0, 0]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = pybamm.cos(v)
jacobian = np.array([[0, 0, -np.sin(3), 0], [0, 0, 0, -np.sin(4)]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = pybamm.sin(3 * u * v)
jacobian = np.array(
[
[9 * np.cos(9), 0, 3 * np.cos(9), 0],
[0, 12 * np.cos(24), 0, 6 * np.cos(24)],
]
)
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = pybamm.cos(5 * pybamm.exp(u + v))
jacobian = np.array(
[
[
-5 * np.exp(4) * np.sin(5 * np.exp(4)),
0,
-5 * np.exp(4) * np.sin(5 * np.exp(4)),
0,
],
[
0,
-5 * np.exp(6) * np.sin(5 * np.exp(6)),
0,
-5 * np.exp(6) * np.sin(5 * np.exp(6)),
],
]
)
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
# when child evaluates to number
func = pybamm.sin(const)
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(0, dfunc_dy)
# several children
func = pybamm.Function(test_multi_var_function, 2 * y, 3 * y)
jacobian = np.diag(5 * np.ones(4))
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
def test_index(self):
vec = pybamm.StateVector(slice(0, 5))
ind = pybamm.Index(vec, 3)
jac = ind.jac(vec).evaluate(y=np.linspace(0, 2, 5)).toarray()
np.testing.assert_array_equal(jac, np.array([[0, 0, 0, 1, 0]]))
# jac of ind of something that isn't a StateVector should return zeros
const_vec = pybamm.Vector(np.ones(3))
ind = pybamm.Index(const_vec, 2)
jac = ind.jac(vec).evaluate(y=np.linspace(0, 2, 5)).toarray()
np.testing.assert_array_equal(jac, np.array([[0, 0, 0, 0, 0]]))
def test_jac_of_number(self):
"Jacobian of a number should be zero"
a = pybamm.Scalar(1)
b = pybamm.Scalar(2)
y = pybamm.StateVector(slice(0, 1))
self.assertEqual(a.jac(y).evaluate(), 0)
add = a + b
self.assertEqual(add.jac(y).evaluate(), 0)
subtract = a - b
self.assertEqual(subtract.jac(y).evaluate(), 0)
multiply = a * b
self.assertEqual(multiply.jac(y).evaluate(), 0)
divide = a / b
self.assertEqual(divide.jac(y).evaluate(), 0)
power = a ** b
self.assertEqual(power.jac(y).evaluate(), 0)
def test_jac_of_symbol(self):
a = pybamm.Symbol("a")
y = pybamm.StateVector(slice(0, 1))
with self.assertRaises(NotImplementedError):
a.jac(y)
def test_spatial_operator(self):
a = pybamm.Variable("a")
b = pybamm.SpatialOperator("Operator", a)
y = pybamm.StateVector(slice(0, 1))
with self.assertRaises(NotImplementedError):
b.jac(y)
def test_jac_of_unary_operator(self):
a = pybamm.Scalar(1)
b = pybamm.UnaryOperator("Operator", a)
y = pybamm.StateVector(slice(0, 1))
with self.assertRaises(NotImplementedError):
b.jac(y)
def test_jac_of_independent_variable(self):
a = pybamm.IndependentVariable("Variable")
y = pybamm.StateVector(slice(0, 1))
self.assertEqual(a.jac(y).evaluate(), 0)
def test_jac_of_inner(self):
a = pybamm.Scalar(1)
b = pybamm.Scalar(2)
y = pybamm.StateVector(slice(0, 1))
self.assertEqual(pybamm.inner(a, b).jac(y).evaluate(), 0)
self.assertEqual(pybamm.inner(a, y).jac(y).evaluate(), 1)
self.assertEqual(pybamm.inner(y, b).jac(y).evaluate(), 2)
vec = pybamm.StateVector(slice(0, 2))
jac = pybamm.inner(a * vec, b * vec).jac(vec).evaluate(y=np.ones(2)).toarray()
np.testing.assert_array_equal(jac, 4 * np.eye(2))
def test_jac_of_heaviside(self):
a = pybamm.Scalar(1)
y = pybamm.StateVector(slice(0, 5))
np.testing.assert_array_equal(
((a < y) * y ** 2).jac(y).evaluate(y=5 * np.ones(5)), 10 * np.eye(5)
)
np.testing.assert_array_equal(
((a < y) * y ** 2).jac(y).evaluate(y=-5 * np.ones(5)), 0
)
def test_jac_of_minimum_maximum(self):
y = pybamm.StateVector(slice(0, 10))
y_test = np.linspace(0, 2, 10)
np.testing.assert_array_equal(
np.diag(pybamm.minimum(1, y ** 2).jac(y).evaluate(y=y_test)),
2 * y_test * (y_test < 1),
)
np.testing.assert_array_equal(
np.diag(pybamm.maximum(1, y ** 2).jac(y).evaluate(y=y_test)),
2 * y_test * (y_test > 1),
)
def test_jac_of_abs(self):
y = pybamm.StateVector(slice(0, 10))
absy = abs(y)
jac = absy.jac(y)
y_test = np.linspace(-2, 2, 10)
np.testing.assert_array_equal(
np.diag(jac.evaluate(y=y_test).toarray()), np.sign(y_test)
)
def test_jac_of_sign(self):
y = pybamm.StateVector(slice(0, 10))
func = pybamm.sign(y) * y
jac = func.jac(y)
y_test = np.linspace(-2, 2, 10)
np.testing.assert_array_equal(np.diag(jac.evaluate(y=y_test)), np.sign(y_test))
def test_jac_of_domain_concatenation(self):
# create mesh
mesh = get_mesh_for_testing()
y = pybamm.StateVector(slice(0, 100))
# Jacobian of a DomainConcatenation of constants is a zero matrix of the
# appropriate size
a_dom = ["negative electrode"]
b_dom = ["separator"]
c_dom = ["positive electrode"]
a_npts = mesh[a_dom[0]][0].npts
b_npts = mesh[b_dom[0]][0].npts
c_npts = mesh[c_dom[0]][0].npts
a = 2 * pybamm.Vector(np.ones(a_npts), domain=a_dom)
b = pybamm.Vector(np.ones(b_npts), domain=b_dom)
c = 3 * pybamm.Vector(np.ones(c_npts), domain=c_dom)
conc = pybamm.DomainConcatenation([a, b, c], mesh)
jac = conc.jac(y).evaluate().toarray()
np.testing.assert_array_equal(jac, np.zeros((100, 100)))
# Jacobian of a DomainConcatenation of StateVectors
a = 2 * pybamm.StateVector(slice(0, a_npts), domain=a_dom)
b = pybamm.StateVector(slice(a_npts, a_npts + b_npts), domain=b_dom)
c = 3 * pybamm.StateVector(
slice(a_npts + b_npts, a_npts + b_npts + c_npts), domain=c_dom
)
conc = pybamm.DomainConcatenation([a, b, c], mesh)
y0 = np.ones(100)
jac = conc.jac(y).evaluate(y=y0).toarray()
np.testing.assert_array_equal(
jac,
np.diag(
np.concatenate(
[2 * np.ones(a_npts), np.ones(b_npts), 3 * np.ones(c_npts)]
)
),
)
# multi=domain case not implemented
a = 2 * pybamm.StateVector(slice(0, a_npts), domain=a_dom)
b = pybamm.StateVector(
slice(a_npts, a_npts + b_npts + c_npts), domain=b_dom + c_dom
)
conc = pybamm.DomainConcatenation([a, b], mesh)
with self.assertRaisesRegex(
NotImplementedError, "jacobian only implemented for when each child has"
):
conc.jac(y)
if __name__ == "__main__":
print("Add -v for more debug output")
import sys
if "-v" in sys.argv:
debug = True
pybamm.settings.debug_mode = True
unittest.main()
| 35.866162 | 87 | 0.558403 |
import pybamm
import numpy as np
import unittest
from scipy.sparse import eye
from tests import get_mesh_for_testing
def test_multi_var_function(arg1, arg2):
return arg1 + arg2
class TestJacobian(unittest.TestCase):
def test_variable_is_statevector(self):
a = pybamm.Symbol("a")
with self.assertRaisesRegex(
TypeError, "Jacobian can only be taken with respect to a 'StateVector'"
):
a.jac(a)
def test_linear(self):
y = pybamm.StateVector(slice(0, 4))
u = pybamm.StateVector(slice(0, 2))
v = pybamm.StateVector(slice(2, 4))
y0 = np.ones(4)
func = u
jacobian = np.array([[1, 0, 0, 0], [0, 1, 0, 0]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = -v
jacobian = np.array([[0, 0, -1, 0], [0, 0, 0, -1]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = 3 * u + 4 * v
jacobian = np.array([[3, 0, 4, 0], [0, 3, 0, 4]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = 7 * u - v * 9
jacobian = np.array([[7, 0, -9, 0], [0, 7, 0, -9]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
A = pybamm.Matrix(2 * eye(2))
func = A @ u
jacobian = np.array([[2, 0, 0, 0], [0, 2, 0, 0]])
dfunc_dy = func.jac(y).simplify().evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = u @ pybamm.StateVector(slice(0, 1))
with self.assertRaises(NotImplementedError):
func.jac(y)
jacobian = np.array([[0, 0], [0, 0]])
du_dv = u.jac(v).evaluate().toarray()
np.testing.assert_array_equal(du_dv, jacobian)
def test_nonlinear(self):
y = pybamm.StateVector(slice(0, 4))
u = pybamm.StateVector(slice(0, 2))
v = pybamm.StateVector(slice(2, 4))
y0 = np.array([1, 2, 3, 4])
func = v ** 2
jacobian = np.array([[0, 0, 6, 0], [0, 0, 0, 8]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = 2 ** v
jacobian = np.array(
[[0, 0, 2 ** 3 * np.log(2), 0], [0, 0, 0, 2 ** 4 * np.log(2)]]
)
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = v ** v
jacobian = [[0, 0, 27 * (1 + np.log(3)), 0], [0, 0, 0, 256 * (1 + np.log(4))]]
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_almost_equal(jacobian, dfunc_dy.toarray())
func = u * v
jacobian = np.array([[3, 0, 1, 0], [0, 4, 0, 2]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = u * (u + v)
jacobian = np.array([[5, 0, 1, 0], [0, 8, 0, 2]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = 1 / u + v / 3
jacobian = np.array([[-1, 0, 1 / 3, 0], [0, -1 / 4, 0, 1 / 3]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = u / v
jacobian = np.array([[1 / 3, 0, -1 / 9, 0], [0, 1 / 4, 0, -1 / 8]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = v / (1 + v)
jacobian = np.array([[0, 0, 1 / 16, 0], [0, 0, 0, 1 / 25]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
def test_multislice_raises(self):
y1 = pybamm.StateVector(slice(0, 4), slice(7, 8))
y_dot1 = pybamm.StateVectorDot(slice(0, 4), slice(7, 8))
y2 = pybamm.StateVector(slice(4, 7))
with self.assertRaises(NotImplementedError):
y1.jac(y1)
with self.assertRaises(NotImplementedError):
y2.jac(y1)
with self.assertRaises(NotImplementedError):
y_dot1.jac(y1)
def test_linear_ydot(self):
y = pybamm.StateVector(slice(0, 4))
y_dot = pybamm.StateVectorDot(slice(0, 4))
u = pybamm.StateVector(slice(0, 2))
v = pybamm.StateVector(slice(2, 4))
u_dot = pybamm.StateVectorDot(slice(0, 2))
v_dot = pybamm.StateVectorDot(slice(2, 4))
y0 = np.ones(4)
y_dot0 = np.ones(4)
func = u_dot
jacobian = np.array([[1, 0, 0, 0], [0, 1, 0, 0]])
dfunc_dy = func.jac(y_dot).evaluate(y=y0, y_dot=y_dot0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = -v_dot
jacobian = np.array([[0, 0, -1, 0], [0, 0, 0, -1]])
dfunc_dy = func.jac(y_dot).evaluate(y=y0, y_dot=y_dot0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = u_dot
jacobian = np.array([[0, 0, 0, 0], [0, 0, 0, 0]])
dfunc_dy = func.jac(y).evaluate(y=y0, y_dot=y_dot0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = -v_dot
jacobian = np.array([[0, 0, 0, 0], [0, 0, 0, 0]])
dfunc_dy = func.jac(y).evaluate(y=y0, y_dot=y_dot0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = u
jacobian = np.array([[0, 0, 0, 0], [0, 0, 0, 0]])
dfunc_dy = func.jac(y_dot).evaluate(y=y0, y_dot=y_dot0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = -v
jacobian = np.array([[0, 0, 0, 0], [0, 0, 0, 0]])
dfunc_dy = func.jac(y_dot).evaluate(y=y0, y_dot=y_dot0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
def test_functions(self):
y = pybamm.StateVector(slice(0, 4))
u = pybamm.StateVector(slice(0, 2))
v = pybamm.StateVector(slice(2, 4))
const = pybamm.Scalar(1)
y0 = np.array([1.0, 2.0, 3.0, 4.0])
func = pybamm.sin(u)
jacobian = np.array([[np.cos(1), 0, 0, 0], [0, np.cos(2), 0, 0]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = pybamm.cos(v)
jacobian = np.array([[0, 0, -np.sin(3), 0], [0, 0, 0, -np.sin(4)]])
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = pybamm.sin(3 * u * v)
jacobian = np.array(
[
[9 * np.cos(9), 0, 3 * np.cos(9), 0],
[0, 12 * np.cos(24), 0, 6 * np.cos(24)],
]
)
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = pybamm.cos(5 * pybamm.exp(u + v))
jacobian = np.array(
[
[
-5 * np.exp(4) * np.sin(5 * np.exp(4)),
0,
-5 * np.exp(4) * np.sin(5 * np.exp(4)),
0,
],
[
0,
-5 * np.exp(6) * np.sin(5 * np.exp(6)),
0,
-5 * np.exp(6) * np.sin(5 * np.exp(6)),
],
]
)
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
func = pybamm.sin(const)
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(0, dfunc_dy)
func = pybamm.Function(test_multi_var_function, 2 * y, 3 * y)
jacobian = np.diag(5 * np.ones(4))
dfunc_dy = func.jac(y).evaluate(y=y0)
np.testing.assert_array_equal(jacobian, dfunc_dy.toarray())
def test_index(self):
vec = pybamm.StateVector(slice(0, 5))
ind = pybamm.Index(vec, 3)
jac = ind.jac(vec).evaluate(y=np.linspace(0, 2, 5)).toarray()
np.testing.assert_array_equal(jac, np.array([[0, 0, 0, 1, 0]]))
const_vec = pybamm.Vector(np.ones(3))
ind = pybamm.Index(const_vec, 2)
jac = ind.jac(vec).evaluate(y=np.linspace(0, 2, 5)).toarray()
np.testing.assert_array_equal(jac, np.array([[0, 0, 0, 0, 0]]))
def test_jac_of_number(self):
a = pybamm.Scalar(1)
b = pybamm.Scalar(2)
y = pybamm.StateVector(slice(0, 1))
self.assertEqual(a.jac(y).evaluate(), 0)
add = a + b
self.assertEqual(add.jac(y).evaluate(), 0)
subtract = a - b
self.assertEqual(subtract.jac(y).evaluate(), 0)
multiply = a * b
self.assertEqual(multiply.jac(y).evaluate(), 0)
divide = a / b
self.assertEqual(divide.jac(y).evaluate(), 0)
power = a ** b
self.assertEqual(power.jac(y).evaluate(), 0)
def test_jac_of_symbol(self):
a = pybamm.Symbol("a")
y = pybamm.StateVector(slice(0, 1))
with self.assertRaises(NotImplementedError):
a.jac(y)
def test_spatial_operator(self):
a = pybamm.Variable("a")
b = pybamm.SpatialOperator("Operator", a)
y = pybamm.StateVector(slice(0, 1))
with self.assertRaises(NotImplementedError):
b.jac(y)
def test_jac_of_unary_operator(self):
a = pybamm.Scalar(1)
b = pybamm.UnaryOperator("Operator", a)
y = pybamm.StateVector(slice(0, 1))
with self.assertRaises(NotImplementedError):
b.jac(y)
def test_jac_of_independent_variable(self):
a = pybamm.IndependentVariable("Variable")
y = pybamm.StateVector(slice(0, 1))
self.assertEqual(a.jac(y).evaluate(), 0)
def test_jac_of_inner(self):
a = pybamm.Scalar(1)
b = pybamm.Scalar(2)
y = pybamm.StateVector(slice(0, 1))
self.assertEqual(pybamm.inner(a, b).jac(y).evaluate(), 0)
self.assertEqual(pybamm.inner(a, y).jac(y).evaluate(), 1)
self.assertEqual(pybamm.inner(y, b).jac(y).evaluate(), 2)
vec = pybamm.StateVector(slice(0, 2))
jac = pybamm.inner(a * vec, b * vec).jac(vec).evaluate(y=np.ones(2)).toarray()
np.testing.assert_array_equal(jac, 4 * np.eye(2))
def test_jac_of_heaviside(self):
a = pybamm.Scalar(1)
y = pybamm.StateVector(slice(0, 5))
np.testing.assert_array_equal(
((a < y) * y ** 2).jac(y).evaluate(y=5 * np.ones(5)), 10 * np.eye(5)
)
np.testing.assert_array_equal(
((a < y) * y ** 2).jac(y).evaluate(y=-5 * np.ones(5)), 0
)
def test_jac_of_minimum_maximum(self):
y = pybamm.StateVector(slice(0, 10))
y_test = np.linspace(0, 2, 10)
np.testing.assert_array_equal(
np.diag(pybamm.minimum(1, y ** 2).jac(y).evaluate(y=y_test)),
2 * y_test * (y_test < 1),
)
np.testing.assert_array_equal(
np.diag(pybamm.maximum(1, y ** 2).jac(y).evaluate(y=y_test)),
2 * y_test * (y_test > 1),
)
def test_jac_of_abs(self):
y = pybamm.StateVector(slice(0, 10))
absy = abs(y)
jac = absy.jac(y)
y_test = np.linspace(-2, 2, 10)
np.testing.assert_array_equal(
np.diag(jac.evaluate(y=y_test).toarray()), np.sign(y_test)
)
def test_jac_of_sign(self):
y = pybamm.StateVector(slice(0, 10))
func = pybamm.sign(y) * y
jac = func.jac(y)
y_test = np.linspace(-2, 2, 10)
np.testing.assert_array_equal(np.diag(jac.evaluate(y=y_test)), np.sign(y_test))
def test_jac_of_domain_concatenation(self):
# create mesh
mesh = get_mesh_for_testing()
y = pybamm.StateVector(slice(0, 100))
# Jacobian of a DomainConcatenation of constants is a zero matrix of the
# appropriate size
a_dom = ["negative electrode"]
b_dom = ["separator"]
c_dom = ["positive electrode"]
a_npts = mesh[a_dom[0]][0].npts
b_npts = mesh[b_dom[0]][0].npts
c_npts = mesh[c_dom[0]][0].npts
a = 2 * pybamm.Vector(np.ones(a_npts), domain=a_dom)
b = pybamm.Vector(np.ones(b_npts), domain=b_dom)
c = 3 * pybamm.Vector(np.ones(c_npts), domain=c_dom)
conc = pybamm.DomainConcatenation([a, b, c], mesh)
jac = conc.jac(y).evaluate().toarray()
np.testing.assert_array_equal(jac, np.zeros((100, 100)))
# Jacobian of a DomainConcatenation of StateVectors
a = 2 * pybamm.StateVector(slice(0, a_npts), domain=a_dom)
b = pybamm.StateVector(slice(a_npts, a_npts + b_npts), domain=b_dom)
c = 3 * pybamm.StateVector(
slice(a_npts + b_npts, a_npts + b_npts + c_npts), domain=c_dom
)
conc = pybamm.DomainConcatenation([a, b, c], mesh)
y0 = np.ones(100)
jac = conc.jac(y).evaluate(y=y0).toarray()
np.testing.assert_array_equal(
jac,
np.diag(
np.concatenate(
[2 * np.ones(a_npts), np.ones(b_npts), 3 * np.ones(c_npts)]
)
),
)
# multi=domain case not implemented
a = 2 * pybamm.StateVector(slice(0, a_npts), domain=a_dom)
b = pybamm.StateVector(
slice(a_npts, a_npts + b_npts + c_npts), domain=b_dom + c_dom
)
conc = pybamm.DomainConcatenation([a, b], mesh)
with self.assertRaisesRegex(
NotImplementedError, "jacobian only implemented for when each child has"
):
conc.jac(y)
if __name__ == "__main__":
print("Add -v for more debug output")
import sys
if "-v" in sys.argv:
debug = True
pybamm.settings.debug_mode = True
unittest.main()
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.