hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 958k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f72bc4838f0722457e558e34b578faff5078826d | 27,139 | py | Python | swift/container/sync.py | ctvera/swift-deb | cf889b6fa8b8059ac5ce2680064d60db7602d166 | [
"Apache-2.0"
] | null | null | null | swift/container/sync.py | ctvera/swift-deb | cf889b6fa8b8059ac5ce2680064d60db7602d166 | [
"Apache-2.0"
] | null | null | null | swift/container/sync.py | ctvera/swift-deb | cf889b6fa8b8059ac5ce2680064d60db7602d166 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import os
import uuid
from swift import gettext_ as _
from time import ctime, time
from random import choice, random
from struct import unpack_from
from eventlet import sleep, Timeout
import swift.common.db
from swift.common.db import DatabaseConnectionError
from swift.container.backend import ContainerBroker
from swift.container.sync_store import ContainerSyncStore
from swift.common.container_sync_realms import ContainerSyncRealms
from swift.common.internal_client import (
delete_object, put_object, head_object,
InternalClient, UnexpectedResponse)
from swift.common.exceptions import ClientException
from swift.common.ring import Ring
from swift.common.ring.utils import is_local_device
from swift.common.utils import (
clean_content_type, config_true_value,
FileLikeIter, get_logger, hash_path, quote, urlparse, validate_sync_to,
whataremyips, Timestamp, decode_timestamps)
from swift.common.daemon import Daemon
from swift.common.http import HTTP_UNAUTHORIZED, HTTP_NOT_FOUND
from swift.common.wsgi import ConfigString
# The default internal client config body is to support upgrades without
# requiring deployment of the new /etc/swift/internal-client.conf
ic_conf_body = """
[DEFAULT]
# swift_dir = /etc/swift
# user = swift
# You can specify default log routing here if you want:
# log_name = swift
# log_facility = LOG_LOCAL0
# log_level = INFO
# log_address = /dev/log
#
# comma separated list of functions to call to setup custom log handlers.
# functions get passed: conf, name, log_to_console, log_route, fmt, logger,
# adapted_logger
# log_custom_handlers =
#
# If set, log_udp_host will override log_address
# log_udp_host =
# log_udp_port = 514
#
# You can enable StatsD logging here:
# log_statsd_host =
# log_statsd_port = 8125
# log_statsd_default_sample_rate = 1.0
# log_statsd_sample_rate_factor = 1.0
# log_statsd_metric_prefix =
[pipeline:main]
pipeline = catch_errors proxy-logging cache proxy-server
[app:proxy-server]
use = egg:swift#proxy
# See proxy-server.conf-sample for options
[filter:cache]
use = egg:swift#memcache
# See proxy-server.conf-sample for options
[filter:proxy-logging]
use = egg:swift#proxy_logging
[filter:catch_errors]
use = egg:swift#catch_errors
# See proxy-server.conf-sample for options
""".lstrip()
class ContainerSync(Daemon):
"""
Daemon to sync syncable containers.
This is done by scanning the local devices for container databases and
checking for x-container-sync-to and x-container-sync-key metadata values.
If they exist, newer rows since the last sync will trigger PUTs or DELETEs
to the other container.
The actual syncing is slightly more complicated to make use of the three
(or number-of-replicas) main nodes for a container without each trying to
do the exact same work but also without missing work if one node happens to
be down.
Two sync points are kept per container database. All rows between the two
sync points trigger updates. Any rows newer than both sync points cause
updates depending on the node's position for the container (primary nodes
do one third, etc. depending on the replica count of course). After a sync
run, the first sync point is set to the newest ROWID known and the second
sync point is set to newest ROWID for which all updates have been sent.
An example may help. Assume replica count is 3 and perfectly matching
ROWIDs starting at 1.
First sync run, database has 6 rows:
* SyncPoint1 starts as -1.
* SyncPoint2 starts as -1.
* No rows between points, so no "all updates" rows.
* Six rows newer than SyncPoint1, so a third of the rows are sent
by node 1, another third by node 2, remaining third by node 3.
* SyncPoint1 is set as 6 (the newest ROWID known).
* SyncPoint2 is left as -1 since no "all updates" rows were synced.
Next sync run, database has 12 rows:
* SyncPoint1 starts as 6.
* SyncPoint2 starts as -1.
* The rows between -1 and 6 all trigger updates (most of which
should short-circuit on the remote end as having already been
done).
* Six more rows newer than SyncPoint1, so a third of the rows are
sent by node 1, another third by node 2, remaining third by node
3.
* SyncPoint1 is set as 12 (the newest ROWID known).
* SyncPoint2 is set as 6 (the newest "all updates" ROWID).
In this way, under normal circumstances each node sends its share of
updates each run and just sends a batch of older updates to ensure nothing
was missed.
:param conf: The dict of configuration values from the [container-sync]
section of the container-server.conf
:param container_ring: If None, the <swift_dir>/container.ring.gz will be
loaded. This is overridden by unit tests.
"""
def __init__(self, conf, container_ring=None, logger=None):
#: The dict of configuration values from the [container-sync] section
#: of the container-server.conf.
self.conf = conf
#: Logger to use for container-sync log lines.
self.logger = logger or get_logger(conf, log_route='container-sync')
#: Path to the local device mount points.
self.devices = conf.get('devices', '/srv/node')
#: Indicates whether mount points should be verified as actual mount
#: points (normally true, false for tests and SAIO).
self.mount_check = config_true_value(conf.get('mount_check', 'true'))
#: Minimum time between full scans. This is to keep the daemon from
#: running wild on near empty systems.
self.interval = int(conf.get('interval', 300))
#: Maximum amount of time to spend syncing a container before moving on
#: to the next one. If a container sync hasn't finished in this time,
#: it'll just be resumed next scan.
self.container_time = int(conf.get('container_time', 60))
#: ContainerSyncCluster instance for validating sync-to values.
self.realms_conf = ContainerSyncRealms(
os.path.join(
conf.get('swift_dir', '/etc/swift'),
'container-sync-realms.conf'),
self.logger)
#: The list of hosts we're allowed to send syncs to. This can be
#: overridden by data in self.realms_conf
self.allowed_sync_hosts = [
h.strip()
for h in conf.get('allowed_sync_hosts', '127.0.0.1').split(',')
if h.strip()]
self.http_proxies = [
a.strip()
for a in conf.get('sync_proxy', '').split(',')
if a.strip()]
#: ContainerSyncStore instance for iterating over synced containers
self.sync_store = ContainerSyncStore(self.devices,
self.logger,
self.mount_check)
#: Number of containers with sync turned on that were successfully
#: synced.
self.container_syncs = 0
#: Number of successful DELETEs triggered.
self.container_deletes = 0
#: Number of successful PUTs triggered.
self.container_puts = 0
#: Number of containers whose sync has been turned off, but
#: are not yet cleared from the sync store.
self.container_skips = 0
#: Number of containers that had a failure of some type.
self.container_failures = 0
#: Time of last stats report.
self.reported = time()
self.swift_dir = conf.get('swift_dir', '/etc/swift')
#: swift.common.ring.Ring for locating containers.
self.container_ring = container_ring or Ring(self.swift_dir,
ring_name='container')
bind_ip = conf.get('bind_ip', '0.0.0.0')
self._myips = whataremyips(bind_ip)
self._myport = int(conf.get('bind_port', 6001))
swift.common.db.DB_PREALLOCATION = \
config_true_value(conf.get('db_preallocation', 'f'))
self.conn_timeout = float(conf.get('conn_timeout', 5))
request_tries = int(conf.get('request_tries') or 3)
internal_client_conf_path = conf.get('internal_client_conf_path')
if not internal_client_conf_path:
self.logger.warning(
_('Configuration option internal_client_conf_path not '
'defined. Using default configuration, See '
'internal-client.conf-sample for options'))
internal_client_conf = ConfigString(ic_conf_body)
else:
internal_client_conf = internal_client_conf_path
try:
self.swift = InternalClient(
internal_client_conf, 'Swift Container Sync', request_tries)
except IOError as err:
if err.errno != errno.ENOENT:
raise
raise SystemExit(
_('Unable to load internal client from config: %r (%s)') %
(internal_client_conf_path, err))
def run_forever(self, *args, **kwargs):
"""
Runs container sync scans until stopped.
"""
sleep(random() * self.interval)
while True:
begin = time()
for path in self.sync_store.synced_containers_generator():
self.container_sync(path)
if time() - self.reported >= 3600: # once an hour
self.report()
elapsed = time() - begin
if elapsed < self.interval:
sleep(self.interval - elapsed)
def run_once(self, *args, **kwargs):
"""
Runs a single container sync scan.
"""
self.logger.info(_('Begin container sync "once" mode'))
begin = time()
for path in self.sync_store.synced_containers_generator():
self.container_sync(path)
if time() - self.reported >= 3600: # once an hour
self.report()
self.report()
elapsed = time() - begin
self.logger.info(
_('Container sync "once" mode completed: %.02fs'), elapsed)
def report(self):
"""
Writes a report of the stats to the logger and resets the stats for the
next report.
"""
self.logger.info(
_('Since %(time)s: %(sync)s synced [%(delete)s deletes, %(put)s '
'puts], %(skip)s skipped, %(fail)s failed'),
{'time': ctime(self.reported),
'sync': self.container_syncs,
'delete': self.container_deletes,
'put': self.container_puts,
'skip': self.container_skips,
'fail': self.container_failures})
self.reported = time()
self.container_syncs = 0
self.container_deletes = 0
self.container_puts = 0
self.container_skips = 0
self.container_failures = 0
def container_sync(self, path):
"""
Checks the given path for a container database, determines if syncing
is turned on for that database and, if so, sends any updates to the
other container.
:param path: the path to a container db
"""
broker = None
try:
broker = ContainerBroker(path)
# The path we pass to the ContainerBroker is a real path of
# a container DB. If we get here, however, it means that this
# path is linked from the sync_containers dir. In rare cases
# of race or processes failures the link can be stale and
# the get_info below will raise a DB doesn't exist exception
# In this case we remove the stale link and raise an error
# since in most cases the db should be there.
try:
info = broker.get_info()
except DatabaseConnectionError as db_err:
if str(db_err).endswith("DB doesn't exist"):
self.sync_store.remove_synced_container(broker)
raise
x, nodes = self.container_ring.get_nodes(info['account'],
info['container'])
for ordinal, node in enumerate(nodes):
if is_local_device(self._myips, self._myport,
node['ip'], node['port']):
break
else:
return
if not broker.is_deleted():
sync_to = None
user_key = None
sync_point1 = info['x_container_sync_point1']
sync_point2 = info['x_container_sync_point2']
for key, (value, timestamp) in broker.metadata.items():
if key.lower() == 'x-container-sync-to':
sync_to = value
elif key.lower() == 'x-container-sync-key':
user_key = value
if not sync_to or not user_key:
self.container_skips += 1
self.logger.increment('skips')
return
err, sync_to, realm, realm_key = validate_sync_to(
sync_to, self.allowed_sync_hosts, self.realms_conf)
if err:
self.logger.info(
_('ERROR %(db_file)s: %(validate_sync_to_err)s'),
{'db_file': str(broker),
'validate_sync_to_err': err})
self.container_failures += 1
self.logger.increment('failures')
return
stop_at = time() + self.container_time
next_sync_point = None
while time() < stop_at and sync_point2 < sync_point1:
rows = broker.get_items_since(sync_point2, 1)
if not rows:
break
row = rows[0]
if row['ROWID'] > sync_point1:
break
# This node will only initially sync out one third of the
# objects (if 3 replicas, 1/4 if 4, etc.) and will skip
# problematic rows as needed in case of faults.
# This section will attempt to sync previously skipped
# rows in case the previous attempts by any of the nodes
# didn't succeed.
if not self.container_sync_row(
row, sync_to, user_key, broker, info, realm,
realm_key):
if not next_sync_point:
next_sync_point = sync_point2
sync_point2 = row['ROWID']
broker.set_x_container_sync_points(None, sync_point2)
if next_sync_point:
broker.set_x_container_sync_points(None, next_sync_point)
while time() < stop_at:
rows = broker.get_items_since(sync_point1, 1)
if not rows:
break
row = rows[0]
key = hash_path(info['account'], info['container'],
row['name'], raw_digest=True)
# This node will only initially sync out one third of the
# objects (if 3 replicas, 1/4 if 4, etc.). It'll come back
# around to the section above and attempt to sync
# previously skipped rows in case the other nodes didn't
# succeed or in case it failed to do so the first time.
if unpack_from('>I', key)[0] % \
len(nodes) == ordinal:
self.container_sync_row(
row, sync_to, user_key, broker, info, realm,
realm_key)
sync_point1 = row['ROWID']
broker.set_x_container_sync_points(sync_point1, None)
self.container_syncs += 1
self.logger.increment('syncs')
except (Exception, Timeout):
self.container_failures += 1
self.logger.increment('failures')
self.logger.exception(_('ERROR Syncing %s'),
broker if broker else path)
def _update_sync_to_headers(self, name, sync_to, user_key,
realm, realm_key, method, headers):
"""
Updates container sync headers
:param name: The name of the object
:param sync_to: The URL to the remote container.
:param user_key: The X-Container-Sync-Key to use when sending requests
to the other container.
:param realm: The realm from self.realms_conf, if there is one.
If None, fallback to using the older allowed_sync_hosts
way of syncing.
:param realm_key: The realm key from self.realms_conf, if there
is one. If None, fallback to using the older
allowed_sync_hosts way of syncing.
:param method: HTTP method to create sig with
:param headers: headers to update with container sync headers
"""
if realm and realm_key:
nonce = uuid.uuid4().hex
path = urlparse(sync_to).path + '/' + quote(name)
sig = self.realms_conf.get_sig(method, path,
headers.get('x-timestamp', 0),
nonce, realm_key,
user_key)
headers['x-container-sync-auth'] = '%s %s %s' % (realm,
nonce,
sig)
else:
headers['x-container-sync-key'] = user_key
def _object_in_remote_container(self, name, sync_to, user_key,
realm, realm_key, timestamp):
"""
Performs head object on remote to eliminate extra remote put and
local get object calls
:param name: The name of the object in the updated row in the local
database triggering the sync update.
:param sync_to: The URL to the remote container.
:param user_key: The X-Container-Sync-Key to use when sending requests
to the other container.
:param realm: The realm from self.realms_conf, if there is one.
If None, fallback to using the older allowed_sync_hosts
way of syncing.
:param realm_key: The realm key from self.realms_conf, if there
is one. If None, fallback to using the older
allowed_sync_hosts way of syncing.
:param timestamp: last modified date of local object
:returns: True if object already exists in remote
"""
headers = {'x-timestamp': timestamp.internal}
self._update_sync_to_headers(name, sync_to, user_key, realm,
realm_key, 'HEAD', headers)
try:
metadata, _ = head_object(sync_to, name=name,
headers=headers,
proxy=self.select_http_proxy(),
logger=self.logger,
retries=0)
remote_ts = Timestamp(metadata.get('x-timestamp', 0))
self.logger.debug("remote obj timestamp %s local obj %s" %
(timestamp.internal, remote_ts.internal))
if timestamp <= remote_ts:
return True
# Object in remote should be updated
return False
except ClientException as http_err:
# Object not in remote
if http_err.http_status == 404:
return False
raise http_err
def container_sync_row(self, row, sync_to, user_key, broker, info,
realm, realm_key):
"""
Sends the update the row indicates to the sync_to container.
Update can be either delete or put.
:param row: The updated row in the local database triggering the sync
update.
:param sync_to: The URL to the remote container.
:param user_key: The X-Container-Sync-Key to use when sending requests
to the other container.
:param broker: The local container database broker.
:param info: The get_info result from the local container database
broker.
:param realm: The realm from self.realms_conf, if there is one.
If None, fallback to using the older allowed_sync_hosts
way of syncing.
:param realm_key: The realm key from self.realms_conf, if there
is one. If None, fallback to using the older
allowed_sync_hosts way of syncing.
:returns: True on success
"""
try:
start_time = time()
# extract last modified time from the created_at value
ts_data, ts_ctype, ts_meta = decode_timestamps(
row['created_at'])
if row['deleted']:
# when sync'ing a deleted object, use ts_data - this is the
# timestamp of the source tombstone
try:
headers = {'x-timestamp': ts_data.internal}
self._update_sync_to_headers(row['name'], sync_to,
user_key, realm, realm_key,
'DELETE', headers)
delete_object(sync_to, name=row['name'], headers=headers,
proxy=self.select_http_proxy(),
logger=self.logger,
timeout=self.conn_timeout)
except ClientException as err:
if err.http_status != HTTP_NOT_FOUND:
raise
self.container_deletes += 1
self.logger.increment('deletes')
self.logger.timing_since('deletes.timing', start_time)
else:
# when sync'ing a live object, use ts_meta - this is the time
# at which the source object was last modified by a PUT or POST
if self._object_in_remote_container(row['name'],
sync_to, user_key, realm,
realm_key, ts_meta):
return True
exc = None
# look up for the newest one
headers_out = {'X-Newest': True,
'X-Backend-Storage-Policy-Index':
str(info['storage_policy_index'])}
try:
source_obj_status, headers, body = \
self.swift.get_object(info['account'],
info['container'], row['name'],
headers=headers_out,
acceptable_statuses=(2, 4))
except (Exception, UnexpectedResponse, Timeout) as err:
headers = {}
body = None
exc = err
timestamp = Timestamp(headers.get('x-timestamp', 0))
if timestamp < ts_meta:
if exc:
raise exc
raise Exception(
_('Unknown exception trying to GET: '
'%(account)r %(container)r %(object)r'),
{'account': info['account'],
'container': info['container'],
'object': row['name']})
for key in ('date', 'last-modified'):
if key in headers:
del headers[key]
if 'etag' in headers:
headers['etag'] = headers['etag'].strip('"')
if 'content-type' in headers:
headers['content-type'] = clean_content_type(
headers['content-type'])
self._update_sync_to_headers(row['name'], sync_to, user_key,
realm, realm_key, 'PUT', headers)
put_object(sync_to, name=row['name'], headers=headers,
contents=FileLikeIter(body),
proxy=self.select_http_proxy(), logger=self.logger,
timeout=self.conn_timeout)
self.container_puts += 1
self.logger.increment('puts')
self.logger.timing_since('puts.timing', start_time)
except ClientException as err:
if err.http_status == HTTP_UNAUTHORIZED:
self.logger.info(
_('Unauth %(sync_from)r => %(sync_to)r'),
{'sync_from': '%s/%s' %
(quote(info['account']), quote(info['container'])),
'sync_to': sync_to})
elif err.http_status == HTTP_NOT_FOUND:
self.logger.info(
_('Not found %(sync_from)r => %(sync_to)r \
- object %(obj_name)r'),
{'sync_from': '%s/%s' %
(quote(info['account']), quote(info['container'])),
'sync_to': sync_to, 'obj_name': row['name']})
else:
self.logger.exception(
_('ERROR Syncing %(db_file)s %(row)s'),
{'db_file': str(broker), 'row': row})
self.container_failures += 1
self.logger.increment('failures')
return False
except (Exception, Timeout) as err:
self.logger.exception(
_('ERROR Syncing %(db_file)s %(row)s'),
{'db_file': str(broker), 'row': row})
self.container_failures += 1
self.logger.increment('failures')
return False
return True
def select_http_proxy(self):
return choice(self.http_proxies) if self.http_proxies else None
| 45.765599 | 79 | 0.562659 |
import errno
import os
import uuid
from swift import gettext_ as _
from time import ctime, time
from random import choice, random
from struct import unpack_from
from eventlet import sleep, Timeout
import swift.common.db
from swift.common.db import DatabaseConnectionError
from swift.container.backend import ContainerBroker
from swift.container.sync_store import ContainerSyncStore
from swift.common.container_sync_realms import ContainerSyncRealms
from swift.common.internal_client import (
delete_object, put_object, head_object,
InternalClient, UnexpectedResponse)
from swift.common.exceptions import ClientException
from swift.common.ring import Ring
from swift.common.ring.utils import is_local_device
from swift.common.utils import (
clean_content_type, config_true_value,
FileLikeIter, get_logger, hash_path, quote, urlparse, validate_sync_to,
whataremyips, Timestamp, decode_timestamps)
from swift.common.daemon import Daemon
from swift.common.http import HTTP_UNAUTHORIZED, HTTP_NOT_FOUND
from swift.common.wsgi import ConfigString
ic_conf_body = """
[DEFAULT]
# swift_dir = /etc/swift
# user = swift
# You can specify default log routing here if you want:
# log_name = swift
# log_facility = LOG_LOCAL0
# log_level = INFO
# log_address = /dev/log
#
# comma separated list of functions to call to setup custom log handlers.
# functions get passed: conf, name, log_to_console, log_route, fmt, logger,
# adapted_logger
# log_custom_handlers =
#
# If set, log_udp_host will override log_address
# log_udp_host =
# log_udp_port = 514
#
# You can enable StatsD logging here:
# log_statsd_host =
# log_statsd_port = 8125
# log_statsd_default_sample_rate = 1.0
# log_statsd_sample_rate_factor = 1.0
# log_statsd_metric_prefix =
[pipeline:main]
pipeline = catch_errors proxy-logging cache proxy-server
[app:proxy-server]
use = egg:swift#proxy
# See proxy-server.conf-sample for options
[filter:cache]
use = egg:swift#memcache
# See proxy-server.conf-sample for options
[filter:proxy-logging]
use = egg:swift#proxy_logging
[filter:catch_errors]
use = egg:swift#catch_errors
# See proxy-server.conf-sample for options
""".lstrip()
class ContainerSync(Daemon):
def __init__(self, conf, container_ring=None, logger=None):
self.conf = conf
self.logger = logger or get_logger(conf, log_route='container-sync')
self.devices = conf.get('devices', '/srv/node')
self.mount_check = config_true_value(conf.get('mount_check', 'true'))
self.interval = int(conf.get('interval', 300))
#: it'll just be resumed next scan.
self.container_time = int(conf.get('container_time', 60))
self.realms_conf = ContainerSyncRealms(
os.path.join(
conf.get('swift_dir', '/etc/swift'),
'container-sync-realms.conf'),
self.logger)
#: overridden by data in self.realms_conf
self.allowed_sync_hosts = [
h.strip()
for h in conf.get('allowed_sync_hosts', '127.0.0.1').split(',')
if h.strip()]
self.http_proxies = [
a.strip()
for a in conf.get('sync_proxy', '').split(',')
if a.strip()]
#: ContainerSyncStore instance for iterating over synced containers
self.sync_store = ContainerSyncStore(self.devices,
self.logger,
self.mount_check)
#: Number of containers with sync turned on that were successfully
#: synced.
self.container_syncs = 0
#: Number of successful DELETEs triggered.
self.container_deletes = 0
#: Number of successful PUTs triggered.
self.container_puts = 0
#: Number of containers whose sync has been turned off, but
#: are not yet cleared from the sync store.
self.container_skips = 0
#: Number of containers that had a failure of some type.
self.container_failures = 0
#: Time of last stats report.
self.reported = time()
self.swift_dir = conf.get('swift_dir', '/etc/swift')
#: swift.common.ring.Ring for locating containers.
self.container_ring = container_ring or Ring(self.swift_dir,
ring_name='container')
bind_ip = conf.get('bind_ip', '0.0.0.0')
self._myips = whataremyips(bind_ip)
self._myport = int(conf.get('bind_port', 6001))
swift.common.db.DB_PREALLOCATION = \
config_true_value(conf.get('db_preallocation', 'f'))
self.conn_timeout = float(conf.get('conn_timeout', 5))
request_tries = int(conf.get('request_tries') or 3)
internal_client_conf_path = conf.get('internal_client_conf_path')
if not internal_client_conf_path:
self.logger.warning(
_('Configuration option internal_client_conf_path not '
'defined. Using default configuration, See '
'internal-client.conf-sample for options'))
internal_client_conf = ConfigString(ic_conf_body)
else:
internal_client_conf = internal_client_conf_path
try:
self.swift = InternalClient(
internal_client_conf, 'Swift Container Sync', request_tries)
except IOError as err:
if err.errno != errno.ENOENT:
raise
raise SystemExit(
_('Unable to load internal client from config: %r (%s)') %
(internal_client_conf_path, err))
def run_forever(self, *args, **kwargs):
sleep(random() * self.interval)
while True:
begin = time()
for path in self.sync_store.synced_containers_generator():
self.container_sync(path)
if time() - self.reported >= 3600: # once an hour
self.report()
elapsed = time() - begin
if elapsed < self.interval:
sleep(self.interval - elapsed)
def run_once(self, *args, **kwargs):
self.logger.info(_('Begin container sync "once" mode'))
begin = time()
for path in self.sync_store.synced_containers_generator():
self.container_sync(path)
if time() - self.reported >= 3600: # once an hour
self.report()
self.report()
elapsed = time() - begin
self.logger.info(
_('Container sync "once" mode completed: %.02fs'), elapsed)
def report(self):
self.logger.info(
_('Since %(time)s: %(sync)s synced [%(delete)s deletes, %(put)s '
'puts], %(skip)s skipped, %(fail)s failed'),
{'time': ctime(self.reported),
'sync': self.container_syncs,
'delete': self.container_deletes,
'put': self.container_puts,
'skip': self.container_skips,
'fail': self.container_failures})
self.reported = time()
self.container_syncs = 0
self.container_deletes = 0
self.container_puts = 0
self.container_skips = 0
self.container_failures = 0
def container_sync(self, path):
broker = None
try:
broker = ContainerBroker(path)
# The path we pass to the ContainerBroker is a real path of
# a container DB. If we get here, however, it means that this
# path is linked from the sync_containers dir. In rare cases
# of race or processes failures the link can be stale and
# the get_info below will raise a DB doesn't exist exception
try:
info = broker.get_info()
except DatabaseConnectionError as db_err:
if str(db_err).endswith("DB doesn't exist"):
self.sync_store.remove_synced_container(broker)
raise
x, nodes = self.container_ring.get_nodes(info['account'],
info['container'])
for ordinal, node in enumerate(nodes):
if is_local_device(self._myips, self._myport,
node['ip'], node['port']):
break
else:
return
if not broker.is_deleted():
sync_to = None
user_key = None
sync_point1 = info['x_container_sync_point1']
sync_point2 = info['x_container_sync_point2']
for key, (value, timestamp) in broker.metadata.items():
if key.lower() == 'x-container-sync-to':
sync_to = value
elif key.lower() == 'x-container-sync-key':
user_key = value
if not sync_to or not user_key:
self.container_skips += 1
self.logger.increment('skips')
return
err, sync_to, realm, realm_key = validate_sync_to(
sync_to, self.allowed_sync_hosts, self.realms_conf)
if err:
self.logger.info(
_('ERROR %(db_file)s: %(validate_sync_to_err)s'),
{'db_file': str(broker),
'validate_sync_to_err': err})
self.container_failures += 1
self.logger.increment('failures')
return
stop_at = time() + self.container_time
next_sync_point = None
while time() < stop_at and sync_point2 < sync_point1:
rows = broker.get_items_since(sync_point2, 1)
if not rows:
break
row = rows[0]
if row['ROWID'] > sync_point1:
break
# This node will only initially sync out one third of the
# objects (if 3 replicas, 1/4 if 4, etc.) and will skip
# problematic rows as needed in case of faults.
# This section will attempt to sync previously skipped
# rows in case the previous attempts by any of the nodes
# didn't succeed.
if not self.container_sync_row(
row, sync_to, user_key, broker, info, realm,
realm_key):
if not next_sync_point:
next_sync_point = sync_point2
sync_point2 = row['ROWID']
broker.set_x_container_sync_points(None, sync_point2)
if next_sync_point:
broker.set_x_container_sync_points(None, next_sync_point)
while time() < stop_at:
rows = broker.get_items_since(sync_point1, 1)
if not rows:
break
row = rows[0]
key = hash_path(info['account'], info['container'],
row['name'], raw_digest=True)
# around to the section above and attempt to sync
# previously skipped rows in case the other nodes didn't
if unpack_from('>I', key)[0] % \
len(nodes) == ordinal:
self.container_sync_row(
row, sync_to, user_key, broker, info, realm,
realm_key)
sync_point1 = row['ROWID']
broker.set_x_container_sync_points(sync_point1, None)
self.container_syncs += 1
self.logger.increment('syncs')
except (Exception, Timeout):
self.container_failures += 1
self.logger.increment('failures')
self.logger.exception(_('ERROR Syncing %s'),
broker if broker else path)
def _update_sync_to_headers(self, name, sync_to, user_key,
realm, realm_key, method, headers):
if realm and realm_key:
nonce = uuid.uuid4().hex
path = urlparse(sync_to).path + '/' + quote(name)
sig = self.realms_conf.get_sig(method, path,
headers.get('x-timestamp', 0),
nonce, realm_key,
user_key)
headers['x-container-sync-auth'] = '%s %s %s' % (realm,
nonce,
sig)
else:
headers['x-container-sync-key'] = user_key
def _object_in_remote_container(self, name, sync_to, user_key,
realm, realm_key, timestamp):
headers = {'x-timestamp': timestamp.internal}
self._update_sync_to_headers(name, sync_to, user_key, realm,
realm_key, 'HEAD', headers)
try:
metadata, _ = head_object(sync_to, name=name,
headers=headers,
proxy=self.select_http_proxy(),
logger=self.logger,
retries=0)
remote_ts = Timestamp(metadata.get('x-timestamp', 0))
self.logger.debug("remote obj timestamp %s local obj %s" %
(timestamp.internal, remote_ts.internal))
if timestamp <= remote_ts:
return True
return False
except ClientException as http_err:
if http_err.http_status == 404:
return False
raise http_err
def container_sync_row(self, row, sync_to, user_key, broker, info,
realm, realm_key):
try:
start_time = time()
ts_data, ts_ctype, ts_meta = decode_timestamps(
row['created_at'])
if row['deleted']:
# timestamp of the source tombstone
try:
headers = {'x-timestamp': ts_data.internal}
self._update_sync_to_headers(row['name'], sync_to,
user_key, realm, realm_key,
'DELETE', headers)
delete_object(sync_to, name=row['name'], headers=headers,
proxy=self.select_http_proxy(),
logger=self.logger,
timeout=self.conn_timeout)
except ClientException as err:
if err.http_status != HTTP_NOT_FOUND:
raise
self.container_deletes += 1
self.logger.increment('deletes')
self.logger.timing_since('deletes.timing', start_time)
else:
# when sync'ing a live object, use ts_meta - this is the time
if self._object_in_remote_container(row['name'],
sync_to, user_key, realm,
realm_key, ts_meta):
return True
exc = None
headers_out = {'X-Newest': True,
'X-Backend-Storage-Policy-Index':
str(info['storage_policy_index'])}
try:
source_obj_status, headers, body = \
self.swift.get_object(info['account'],
info['container'], row['name'],
headers=headers_out,
acceptable_statuses=(2, 4))
except (Exception, UnexpectedResponse, Timeout) as err:
headers = {}
body = None
exc = err
timestamp = Timestamp(headers.get('x-timestamp', 0))
if timestamp < ts_meta:
if exc:
raise exc
raise Exception(
_('Unknown exception trying to GET: '
'%(account)r %(container)r %(object)r'),
{'account': info['account'],
'container': info['container'],
'object': row['name']})
for key in ('date', 'last-modified'):
if key in headers:
del headers[key]
if 'etag' in headers:
headers['etag'] = headers['etag'].strip('"')
if 'content-type' in headers:
headers['content-type'] = clean_content_type(
headers['content-type'])
self._update_sync_to_headers(row['name'], sync_to, user_key,
realm, realm_key, 'PUT', headers)
put_object(sync_to, name=row['name'], headers=headers,
contents=FileLikeIter(body),
proxy=self.select_http_proxy(), logger=self.logger,
timeout=self.conn_timeout)
self.container_puts += 1
self.logger.increment('puts')
self.logger.timing_since('puts.timing', start_time)
except ClientException as err:
if err.http_status == HTTP_UNAUTHORIZED:
self.logger.info(
_('Unauth %(sync_from)r => %(sync_to)r'),
{'sync_from': '%s/%s' %
(quote(info['account']), quote(info['container'])),
'sync_to': sync_to})
elif err.http_status == HTTP_NOT_FOUND:
self.logger.info(
_('Not found %(sync_from)r => %(sync_to)r \
- object %(obj_name)r'),
{'sync_from': '%s/%s' %
(quote(info['account']), quote(info['container'])),
'sync_to': sync_to, 'obj_name': row['name']})
else:
self.logger.exception(
_('ERROR Syncing %(db_file)s %(row)s'),
{'db_file': str(broker), 'row': row})
self.container_failures += 1
self.logger.increment('failures')
return False
except (Exception, Timeout) as err:
self.logger.exception(
_('ERROR Syncing %(db_file)s %(row)s'),
{'db_file': str(broker), 'row': row})
self.container_failures += 1
self.logger.increment('failures')
return False
return True
def select_http_proxy(self):
return choice(self.http_proxies) if self.http_proxies else None
| true | true |
f72bc64706a2419c1f41c94124de0ffce474b339 | 307 | py | Python | imgs.py | Entropy03/linyi | 1e5f924c217095d6757e29cac128e5ac5085ec11 | [
"MIT"
] | null | null | null | imgs.py | Entropy03/linyi | 1e5f924c217095d6757e29cac128e5ac5085ec11 | [
"MIT"
] | null | null | null | imgs.py | Entropy03/linyi | 1e5f924c217095d6757e29cac128e5ac5085ec11 | [
"MIT"
] | null | null | null | from PIL import Image
from PIL import ImageFilter
import urllib
import urllib2
import requests
import re
import json
import ssl
http://jlnetc.blog.51cto.com/10920149/1907446
http://python.jobbole.com/81359/
http://jlnetc.blog.51cto.com/10920149/1907446
https://gist.github.com/loveNight/214f82b43926528342f2 | 25.583333 | 54 | 0.827362 | from PIL import Image
from PIL import ImageFilter
import urllib
import urllib2
import requests
import re
import json
import ssl
http://jlnetc.blog.51cto.com/10920149/1907446
http://python.jobbole.com/81359/
http://jlnetc.blog.51cto.com/10920149/1907446
https://gist.github.com/loveNight/214f82b43926528342f2 | false | true |
f72bc6e7a5a7d63e1045e7006d2cabcd2f8a9690 | 1,305 | py | Python | oauth2_provider/generators.py | dqfort/django-oauth-toolkit | 492a867499b50f348c28db4ef3e429e8f46dc412 | [
"BSD-2-Clause-FreeBSD"
] | 1,406 | 2018-04-09T18:46:01.000Z | 2022-03-30T00:42:23.000Z | oauth2_provider/generators.py | dqfort/django-oauth-toolkit | 492a867499b50f348c28db4ef3e429e8f46dc412 | [
"BSD-2-Clause-FreeBSD"
] | 625 | 2018-04-08T06:06:29.000Z | 2022-03-28T20:48:19.000Z | oauth2_provider/generators.py | dqfort/django-oauth-toolkit | 492a867499b50f348c28db4ef3e429e8f46dc412 | [
"BSD-2-Clause-FreeBSD"
] | 378 | 2018-04-11T20:08:11.000Z | 2022-03-30T17:53:21.000Z | from oauthlib.common import UNICODE_ASCII_CHARACTER_SET
from oauthlib.common import generate_client_id as oauthlib_generate_client_id
from .settings import oauth2_settings
class BaseHashGenerator:
"""
All generators should extend this class overriding `.hash()` method.
"""
def hash(self):
raise NotImplementedError()
class ClientIdGenerator(BaseHashGenerator):
def hash(self):
"""
Generate a client_id for Basic Authentication scheme without colon char
as in http://tools.ietf.org/html/rfc2617#section-2
"""
return oauthlib_generate_client_id(length=40, chars=UNICODE_ASCII_CHARACTER_SET)
class ClientSecretGenerator(BaseHashGenerator):
def hash(self):
length = oauth2_settings.CLIENT_SECRET_GENERATOR_LENGTH
chars = UNICODE_ASCII_CHARACTER_SET
return oauthlib_generate_client_id(length=length, chars=chars)
def generate_client_id():
"""
Generate a suitable client id
"""
client_id_generator = oauth2_settings.CLIENT_ID_GENERATOR_CLASS()
return client_id_generator.hash()
def generate_client_secret():
"""
Generate a suitable client secret
"""
client_secret_generator = oauth2_settings.CLIENT_SECRET_GENERATOR_CLASS()
return client_secret_generator.hash()
| 28.369565 | 88 | 0.743295 | from oauthlib.common import UNICODE_ASCII_CHARACTER_SET
from oauthlib.common import generate_client_id as oauthlib_generate_client_id
from .settings import oauth2_settings
class BaseHashGenerator:
def hash(self):
raise NotImplementedError()
class ClientIdGenerator(BaseHashGenerator):
def hash(self):
return oauthlib_generate_client_id(length=40, chars=UNICODE_ASCII_CHARACTER_SET)
class ClientSecretGenerator(BaseHashGenerator):
def hash(self):
length = oauth2_settings.CLIENT_SECRET_GENERATOR_LENGTH
chars = UNICODE_ASCII_CHARACTER_SET
return oauthlib_generate_client_id(length=length, chars=chars)
def generate_client_id():
client_id_generator = oauth2_settings.CLIENT_ID_GENERATOR_CLASS()
return client_id_generator.hash()
def generate_client_secret():
client_secret_generator = oauth2_settings.CLIENT_SECRET_GENERATOR_CLASS()
return client_secret_generator.hash()
| true | true |
f72bc75458786940b2244977b48352162a62d379 | 767 | py | Python | src/data_consumer.py | perrycao/docker_project | e07c4a388eb4936a5f88f63e291ecb8a115bcbe2 | [
"MIT"
] | null | null | null | src/data_consumer.py | perrycao/docker_project | e07c4a388eb4936a5f88f63e291ecb8a115bcbe2 | [
"MIT"
] | null | null | null | src/data_consumer.py | perrycao/docker_project | e07c4a388eb4936a5f88f63e291ecb8a115bcbe2 | [
"MIT"
] | null | null | null | # -*- coding:utf-8 -*-
# version 0.1 2019-05-03 First update
import argparse
from kafka import KafkaConsumer
def consume(_topic_name, _kafka_broker):
"""
helper method to consume certain topic data from kafka broker
"""
consumer = KafkaConsumer(_topic_name, bootstrap_servers=_kafka_broker)
for message in consumer:
print(message)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("topic_name", help="the kafka topic to pull from.")
parser.add_argument("kafka_broker", help="the location of kafka broker.")
# Parse arguments
args = parser.parse_args()
topic_name = args.topic_name
kafka_broker = args.kafka_broker
# Consume topic
consume(topic_name, kafka_broker)
| 26.448276 | 77 | 0.711864 |
import argparse
from kafka import KafkaConsumer
def consume(_topic_name, _kafka_broker):
consumer = KafkaConsumer(_topic_name, bootstrap_servers=_kafka_broker)
for message in consumer:
print(message)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("topic_name", help="the kafka topic to pull from.")
parser.add_argument("kafka_broker", help="the location of kafka broker.")
args = parser.parse_args()
topic_name = args.topic_name
kafka_broker = args.kafka_broker
consume(topic_name, kafka_broker)
| true | true |
f72bc80b55e85e3fe6182a7b4565c13a421a0651 | 1,734 | py | Python | bioseq/management/commands/import_genome.py | ezequieljsosa/sndg-web | 7763c8fbc83dc92abb9c53326e2fe227bcabf607 | [
"MIT"
] | null | null | null | bioseq/management/commands/import_genome.py | ezequieljsosa/sndg-web | 7763c8fbc83dc92abb9c53326e2fe227bcabf607 | [
"MIT"
] | 2 | 2021-03-10T17:08:59.000Z | 2021-09-29T17:40:48.000Z | bioseq/management/commands/import_genome.py | sndg-arg/covid19-web | fe809c87563a00bbec53c3646338d571fdde6a69 | [
"MIT"
] | null | null | null | import os
import warnings
from tqdm import tqdm
import subprocess as sp
import gzip
from io import StringIO
import Bio.SeqIO as bpio
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from bioseq.io.BioIO import BioIO
from Bio import BiopythonWarning, BiopythonParserWarning, BiopythonDeprecationWarning, BiopythonExperimentalWarning
warnings.simplefilter('ignore', RuntimeWarning)
warnings.simplefilter('ignore', BiopythonWarning)
warnings.simplefilter('ignore', BiopythonParserWarning)
warnings.simplefilter('ignore', BiopythonDeprecationWarning)
warnings.simplefilter('ignore', BiopythonExperimentalWarning)
class Command(BaseCommand):
help = 'Loads a genome in the database'
def add_arguments(self, parser):
parser.add_argument('--input', '-i', required=True)
parser.add_argument('--accession', '-a', required=True)
parser.add_argument('--name', '-n', required=True)
parser.add_argument('--taxon', '-t', type=int, required=True)
def handle(self, *args, **options):
input_file = options['input']
accession = options['accession']
name = options['name']
taxon = options['taxon']
assert os.path.exists(input_file),"'%s' does not exists" % input_file
io = BioIO(accession, taxon)
grep_cmd = 'grep -c "FEATURES *Location/Qualifiers" "%s"' % input_file
if input_file.endswith(".gz"):
grep_cmd = 'z' + grep_cmd
input_file = gzip.open(input_file, "rt")
total = int(sp.check_output(grep_cmd, shell=True))
io.create_db()
io.process_record_list(bpio.parse(input_file, "gb"), total)
self.stderr.write("genome imported!")
| 36.125 | 115 | 0.703576 | import os
import warnings
from tqdm import tqdm
import subprocess as sp
import gzip
from io import StringIO
import Bio.SeqIO as bpio
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from bioseq.io.BioIO import BioIO
from Bio import BiopythonWarning, BiopythonParserWarning, BiopythonDeprecationWarning, BiopythonExperimentalWarning
warnings.simplefilter('ignore', RuntimeWarning)
warnings.simplefilter('ignore', BiopythonWarning)
warnings.simplefilter('ignore', BiopythonParserWarning)
warnings.simplefilter('ignore', BiopythonDeprecationWarning)
warnings.simplefilter('ignore', BiopythonExperimentalWarning)
class Command(BaseCommand):
help = 'Loads a genome in the database'
def add_arguments(self, parser):
parser.add_argument('--input', '-i', required=True)
parser.add_argument('--accession', '-a', required=True)
parser.add_argument('--name', '-n', required=True)
parser.add_argument('--taxon', '-t', type=int, required=True)
def handle(self, *args, **options):
input_file = options['input']
accession = options['accession']
name = options['name']
taxon = options['taxon']
assert os.path.exists(input_file),"'%s' does not exists" % input_file
io = BioIO(accession, taxon)
grep_cmd = 'grep -c "FEATURES *Location/Qualifiers" "%s"' % input_file
if input_file.endswith(".gz"):
grep_cmd = 'z' + grep_cmd
input_file = gzip.open(input_file, "rt")
total = int(sp.check_output(grep_cmd, shell=True))
io.create_db()
io.process_record_list(bpio.parse(input_file, "gb"), total)
self.stderr.write("genome imported!")
| true | true |
f72bc81ec96d1edc6e8d005ef6645e2c6fb9e483 | 14,906 | py | Python | models/SHL_2018/transforms.py | HuguesMoreau/Sensors_similariy | 4b8592049c83b03a11f5c57fab247290ee29b8f5 | [
"Apache-2.0"
] | null | null | null | models/SHL_2018/transforms.py | HuguesMoreau/Sensors_similariy | 4b8592049c83b03a11f5c57fab247290ee29b8f5 | [
"Apache-2.0"
] | null | null | null | models/SHL_2018/transforms.py | HuguesMoreau/Sensors_similariy | 4b8592049c83b03a11f5c57fab247290ee29b8f5 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This file contains diverse preprocessing functions (mostly norms ans spectrograms),
and basic tests and visualizations.
If you are to work with any IPython console (ex: with Jupyter or spyder), is is advised
to launch a '%matplotlib qt' ,to get clean widow
"""
if __name__ == '__main__': # this is used to launch the file from anywhere
import sys
sys.path.append("../..")
import numpy as np
import torch
import scipy.signal, scipy.interpolate, scipy.ndimage
from param import classes_names, fs, duration_window, duration_overlap, spectro_batch_size
from models.SHL_2018 import Datasets
if __name__ == "__main__":
import matplotlib.pyplot as plt
n_classes = len(classes_names)
# We will need this for the tests
DS = Datasets.SignalsDataSet(mode='train', transform=None)
#%% transform functions
"""In all following functions, the input parameter (data) is, by default,
a dict of numpy arrays, containing signal names (eg. "Gyr_z") as keys, and 1-dimensional
arrays as values
Most of this part contains basic visualizations to make sure the preprocessing is correct"""
class TemporalTransform():
""" create the base transform to use to each element of the data
Parameters
----------
signal_name: a string (ex: 'Gyr_y', 'Ori_x')
If the string ends by "_norm" (ex: "Mag_norm"), the output will
be the norm of the three (or four) axis of the signal.
Returns
-------
a function with input: a dict of (_, 6000) arrays (key example: 'Gyr_y')
and output: an array with the same shape.
"""
def __init__(self, signal_name):
super(TemporalTransform, self).__init__()
self.signal_name = signal_name
def __call__(self, data):
"""
Parameters
----------
data: a dict of (B, 6000) arrays (key example: 'Gyr_y')
Returns
-------
an array with shape (B, 6000), where B depends on the input shape.
"""
if self.signal_name[-2:] in ['_x', '_y', '_z', '_w'] or self.signal_name == "Pressure":
processed_signal = data[self.signal_name]
elif self.signal_name[-5:] == '_norm':
suffix_location = self.signal_name.index("_") # 4 if signal_name == "LAcc", 3 otherwise
sensor = self.signal_name[:suffix_location] # ex: 'Acc', 'LAcc'
if sensor == "Ori":
# in that case, data[sensor+"_x"]**2 + data[sensor+"_y"]**2 + data[sensor+"_z"]**2 should be 1.0
processed_signal = np.sqrt(data[sensor+"_x"]**2 + data[sensor+"_y"]**2 + data[sensor+"_z"]**2 \
+ data[sensor+"_w"]**2)
else :
processed_signal = np.sqrt(data[sensor+"_x"]**2 + data[sensor+"_y"]**2 + data[sensor+"_z"]**2)
else :
raise ValueError("unknown signal name: '{}'. Signal names should end with either '_x', '_y', '_z', '_w', or '_norm'".format(signal_name))
return processed_signal
def __str__(self):
"""purely for visual purposes, so that we can print() the function"""
str_to_return = "Temporal_transform"
str_to_return += f"\n\t Signal: {self.signal_name}"
return str_to_return
if __name__ == "__main__":
# plot one figure per sensor
# on each figure, one subplot per class,
# to find one instance per each class, we start looking at index = index0
index0 = 0
for tested_signal_name in ["Acc_norm", "Ori_norm", "Mag_norm", "LAcc_x"]:
# plot 1 segment from each class.
plt.figure()
if tested_signal_name != 'Pressure':
suffix_location = tested_signal_name.index("_")
tested_sensor = tested_signal_name[:suffix_location] # ex: 'Acc', 'LAcc'
else:
tested_sensor = 'Pressure'
sensor_axis = [tested_sensor + axis for axis in ["_x", "_y", "_z"]] if tested_sensor != 'Pressure' else ['Pressure']
if tested_sensor == "Ori" : sensor_axis.append(tested_sensor+"_w")
temporal_transform = TemporalTransform(tested_signal_name)
remaining_classes = classes_names.copy()
index = index0
while len(remaining_classes)>0:
data_tensor, class_tensor = DS[index] # data is a dict of 2D tensors (1,nb)
data_cpu = {signal:data_tensor[signal].to(torch.device('cpu')).detach().numpy() for signal in data_tensor.keys()}
class_index = int(class_tensor)
class_name = classes_names[class_index-1]
if class_name in remaining_classes:
remaining_classes.remove(class_name)
plt.subplot(2, 4, n_classes - len(remaining_classes))
for k,signal in enumerate(sensor_axis):
if k==0: # compute the temporal axis once
nb = data_cpu[signal].shape[1]
x_t = np.linspace(0, nb/fs, nb)
plt.plot(x_t, data_cpu[signal][0,:])
selected_signal = temporal_transform(data_cpu)
error_message_dtype = "One of the signals does not have the correct type: {}, {} \n dtype should be float32, is actually {}".format(tested_signal_name, str(temporal_transform), selected_signal.dtype)
assert (selected_signal.dtype == 'float32'), error_message_dtype
plt.plot(x_t, selected_signal[0,:], '--')
plt.xlabel("t (s)")
legend = sensor_axis + [tested_signal_name+' (selected)']
plt.legend(legend)
plt.title("{} ({}, index={})".format(tested_sensor, classes_names[class_index-1], index))
index +=1
plt.show()
#%%
# ---------------- Spectrogram transforms ---------------------
# Interpolation functions
def interpol_log(f, t, spectrogram, out_size):
"""interpolates the spectrogram in input using a linear axis for the timestamps and a LOG axis for the frequencies
Parameters
----------
f : numpy array, shape: (F_in,), frequencies of the spectrogram
t : numpy array, shape: (T_in,), timestamps of the spectrogram
spectrogram : (B, F_in, T_in), B is batch size; 3D numpy array
out_size : couple of ints (F_out, T_out)
Returns
-------
f_interpolated : numpy array, shape: (F_out,), frequencies of the spectrogram AFTER interpolation
t_interpolated : numpy array, shape: (T_out,), timestamps of the spectrogram AFTER interpolation
a spectrogram, where the f axis (second dimension) has been re-interpolated
using a log axis
"""
B = spectrogram.shape[0]
out_f, out_t = out_size
log_f = np.log(f+f[1]) # log between 0.2 Hz and 50.2 Hz
log_f_normalized = (log_f-log_f[0])/(log_f[-1]-log_f[0]) # between 0.0 and 1.0
t_normalized = (t-t[0])/(t[-1]-t[0])
rescaled_f = out_f*log_f_normalized # 0 and 48
# rescaled_f = (out_f-1)*log_f_normalized ??
rescaled_t = out_t*t_normalized
spectrogram_interpolated = np.zeros( (B, out_f, out_t), dtype='float32')
index_f, index_t = np.arange(out_f), np.arange(out_t) # between 0 and 47
for i in range(B):
spectrogram_fn = scipy.interpolate.interp2d(rescaled_t, rescaled_f, spectrogram[i,:,:], copy=False)
# interp2d returns a 2D function
spectrogram_interpolated[i,:,:] = spectrogram_fn(index_t, index_f) # care to the order
f_fn = scipy.interpolate.interp1d(rescaled_f, f, copy=False)
f_interpolated = f_fn(index_f)
t_fn = scipy.interpolate.interp1d(rescaled_t, t, copy=False)
t_interpolated = t_fn(index_t)
return f_interpolated, t_interpolated, spectrogram_interpolated
#%%
# ---------------- The spectrogram class --------------
class SpectrogramTransform():
""" create the transform to work with spectrograms. This class behaves
essentially the same as TempralTransform, except the created transform
returns a dict of 3d array instead of 2d
Parameters
----------
signal_name: a string signal (ex: 'Gyr_y', 'Ori_x')
If the string ends by "_norm" (ex: "Mag_norm"), the output will
be the norm of the three (or four) axis of the signal.
Returns
-------
a function with input: data : a dict of (_, 6000) arrays (key example: 'Gyr_y')
and output: a dictionnary of 2d arrays.
"""
def __init__(self, signal_name):
super(SpectrogramTransform, self).__init__()
self.temporal_transform = TemporalTransform(signal_name)
self.fs = fs
self.duration_window = duration_window
self.duration_overlap = duration_overlap
self.spectro_batch_size = spectro_batch_size # these values were loaded from the param file
self.signal_name = signal_name
self.out_size = (48, 48)
def __call__(self, data):
"""
Parameters
----------
data : a dict of (B, 6000) arrays (key example: 'Gyr_y')
Returns
-------
An array with shape (B, F, T), where B (dataset size) depends on the
input shape, and F and T are equal to 48 here.
"""
temporal_signal = self.temporal_transform(data)
del data # free some memory
fs = self.fs
nperseg = int(self.duration_window * fs)
noverlap = int(self.duration_overlap * fs)
spectro_batch_size = self.spectro_batch_size
# turning 13,000 temporal signals into (550, 500) array
# spectrograms at once is too much: a single (13000, 550, 500) array,
# with simple precision requires 7.15 Go !
# This is why we work with batches of 1000 instead. For each batch,
# we compute the complete sectrogram (1000 x 550 x 500), then
# interpolate it to smaller sizes, before working wit the following batch.
current_spectro_batch_size = temporal_signal.shape[0]
if current_spectro_batch_size < spectro_batch_size :
f, t, spectrogram = scipy.signal.spectrogram(temporal_signal, fs=fs, nperseg=nperseg, noverlap=noverlap)
f_interpolated, t_interpolated, interpolated_spectrogram = interpol_log(f, t, spectrogram, self.out_size)
# f, t, and possibly out_size will be ignored when the function does not need them
else :
n_batches = (current_spectro_batch_size-1)//spectro_batch_size +1
nb_interp_f, nb_interp_t = self.out_size
interpolated_spectrogram = np.zeros((current_spectro_batch_size, nb_interp_f, nb_interp_t), dtype='float32')
for i in range(n_batches):
i_min = i * spectro_batch_size
i_max = (i+1) * spectro_batch_size # does not matter if it goes beyond current_spectro_batch_size
this_temporal_signal = temporal_signal[i_min:i_max,:]
f, t, spectrogram = scipy.signal.spectrogram(this_temporal_signal, fs=fs, nperseg=nperseg, noverlap=noverlap)
f_interpolated, t_interpolated, interpolated_spectrogram[i_min:i_max,:,:] = interpol_log(f, t, spectrogram, self.out_size)
del temporal_signal
np.log(interpolated_spectrogram + 1e-10, dtype='float32', out=interpolated_spectrogram) # in-place operation
self.f_interpolated = f_interpolated
self.t_interpolated = t_interpolated
return interpolated_spectrogram
def __str__(self):
"""purely for visual purposes, so that we can print() the function"""
str_to_return = "Spectrogram transform"
str_to_return += f"\n\t Signals: {self.signal_name}"
str_to_return += f"\n\t Output size: {self.out_size}"
str_to_return += f"\n\t Interpolation: log-interpolation"
str_to_return += "\n\t Log-power"
return str_to_return
# end of class SpectrogramTransform():
#%%
if __name__ == "__main__":
fontdict = {'fontsize':10}
n_ticks = 10
# we plot the raw spectrogram and two interpolated spectrograms for the following classes
selected_classes = ["Run", "Walk"]
remaining_classes = selected_classes.copy()
nsel = len(selected_classes)
index = 3204 # where to tart the search
plt.figure(figsize=(12,8))
signal_name = "Acc_norm"
temporal_transform = TemporalTransform(signal_name) # we will plot the result
spectrogram_transform = SpectrogramTransform(signal_name)
while len(remaining_classes)>0:
data_tensor, class_tensor = DS[index]
data_cpu = {signal:data_tensor[signal].cpu().detach().numpy() for signal in data_tensor.keys()}
class_index = int(class_tensor)
class_name = classes_names[class_index-1]
if class_name in remaining_classes:
remaining_classes.remove(class_name)
i_class = nsel - len(remaining_classes) # between 1 and n
temporal_signal = temporal_transform(data_cpu)
nb = temporal_signal.shape[1]
x_t = np.linspace(0, nb/fs, nb)
plt.subplot(2,nsel,i_class)
plt.plot(x_t, temporal_signal[0,:])
plt.title(f'{class_name} (index={index})', fontdict)
plt.xlabel("t (sec)")
plt.ylabel(signal_name)
data_tensor, _ = DS[index] # we need to recreate data because the variable is deleted
data_cpu = {signal:data_tensor[signal].to(torch.device('cpu')).detach().numpy() for signal in data_tensor.keys()}
spectrogram_interpolated = spectrogram_transform(data_cpu)
f_interpolated = spectrogram_transform.f_interpolated
t_interpolated = spectrogram_transform.t_interpolated
plt.subplot(2,nsel,i_class + nsel)
t_interpolated = spectrogram_transform.t_interpolated
f_interpolated = spectrogram_transform.f_interpolated
matrix_shape = spectrogram_interpolated.shape
time_list = [f'{t_interpolated[i]:.0f}' for i in np.round(np.linspace(0, matrix_shape[2]-1,n_ticks)).astype(int)]
freq_list = [f'{f_interpolated[i]:.1f}' for i in np.round(np.linspace(0, matrix_shape[1]-1,n_ticks)).astype(int)]
plt.xticks(np.linspace(0, matrix_shape[2]-1, n_ticks), time_list)
plt.yticks(np.linspace(0, matrix_shape[1]-1, n_ticks), freq_list)
plt.imshow(spectrogram_interpolated[0,:,:])
plt.ylabel("f (Hz)")
plt.xlabel("t (s)")
plt.colorbar()
index += 1
plt.show()
#%%
| 40.395664 | 216 | 0.619616 |
if __name__ == '__main__':
import sys
sys.path.append("../..")
import numpy as np
import torch
import scipy.signal, scipy.interpolate, scipy.ndimage
from param import classes_names, fs, duration_window, duration_overlap, spectro_batch_size
from models.SHL_2018 import Datasets
if __name__ == "__main__":
import matplotlib.pyplot as plt
n_classes = len(classes_names)
DS = Datasets.SignalsDataSet(mode='train', transform=None)
class TemporalTransform():
def __init__(self, signal_name):
super(TemporalTransform, self).__init__()
self.signal_name = signal_name
def __call__(self, data):
if self.signal_name[-2:] in ['_x', '_y', '_z', '_w'] or self.signal_name == "Pressure":
processed_signal = data[self.signal_name]
elif self.signal_name[-5:] == '_norm':
suffix_location = self.signal_name.index("_")
sensor = self.signal_name[:suffix_location]
if sensor == "Ori":
processed_signal = np.sqrt(data[sensor+"_x"]**2 + data[sensor+"_y"]**2 + data[sensor+"_z"]**2 \
+ data[sensor+"_w"]**2)
else :
processed_signal = np.sqrt(data[sensor+"_x"]**2 + data[sensor+"_y"]**2 + data[sensor+"_z"]**2)
else :
raise ValueError("unknown signal name: '{}'. Signal names should end with either '_x', '_y', '_z', '_w', or '_norm'".format(signal_name))
return processed_signal
def __str__(self):
str_to_return = "Temporal_transform"
str_to_return += f"\n\t Signal: {self.signal_name}"
return str_to_return
if __name__ == "__main__":
index0 = 0
for tested_signal_name in ["Acc_norm", "Ori_norm", "Mag_norm", "LAcc_x"]:
plt.figure()
if tested_signal_name != 'Pressure':
suffix_location = tested_signal_name.index("_")
tested_sensor = tested_signal_name[:suffix_location]
else:
tested_sensor = 'Pressure'
sensor_axis = [tested_sensor + axis for axis in ["_x", "_y", "_z"]] if tested_sensor != 'Pressure' else ['Pressure']
if tested_sensor == "Ori" : sensor_axis.append(tested_sensor+"_w")
temporal_transform = TemporalTransform(tested_signal_name)
remaining_classes = classes_names.copy()
index = index0
while len(remaining_classes)>0:
data_tensor, class_tensor = DS[index]
data_cpu = {signal:data_tensor[signal].to(torch.device('cpu')).detach().numpy() for signal in data_tensor.keys()}
class_index = int(class_tensor)
class_name = classes_names[class_index-1]
if class_name in remaining_classes:
remaining_classes.remove(class_name)
plt.subplot(2, 4, n_classes - len(remaining_classes))
for k,signal in enumerate(sensor_axis):
if k==0:
nb = data_cpu[signal].shape[1]
x_t = np.linspace(0, nb/fs, nb)
plt.plot(x_t, data_cpu[signal][0,:])
selected_signal = temporal_transform(data_cpu)
error_message_dtype = "One of the signals does not have the correct type: {}, {} \n dtype should be float32, is actually {}".format(tested_signal_name, str(temporal_transform), selected_signal.dtype)
assert (selected_signal.dtype == 'float32'), error_message_dtype
plt.plot(x_t, selected_signal[0,:], '--')
plt.xlabel("t (s)")
legend = sensor_axis + [tested_signal_name+' (selected)']
plt.legend(legend)
plt.title("{} ({}, index={})".format(tested_sensor, classes_names[class_index-1], index))
index +=1
plt.show()
def interpol_log(f, t, spectrogram, out_size):
B = spectrogram.shape[0]
out_f, out_t = out_size
log_f = np.log(f+f[1])
log_f_normalized = (log_f-log_f[0])/(log_f[-1]-log_f[0])
t_normalized = (t-t[0])/(t[-1]-t[0])
rescaled_f = out_f*log_f_normalized
rescaled_t = out_t*t_normalized
spectrogram_interpolated = np.zeros( (B, out_f, out_t), dtype='float32')
index_f, index_t = np.arange(out_f), np.arange(out_t)
for i in range(B):
spectrogram_fn = scipy.interpolate.interp2d(rescaled_t, rescaled_f, spectrogram[i,:,:], copy=False)
spectrogram_interpolated[i,:,:] = spectrogram_fn(index_t, index_f)
f_fn = scipy.interpolate.interp1d(rescaled_f, f, copy=False)
f_interpolated = f_fn(index_f)
t_fn = scipy.interpolate.interp1d(rescaled_t, t, copy=False)
t_interpolated = t_fn(index_t)
return f_interpolated, t_interpolated, spectrogram_interpolated
class SpectrogramTransform():
def __init__(self, signal_name):
super(SpectrogramTransform, self).__init__()
self.temporal_transform = TemporalTransform(signal_name)
self.fs = fs
self.duration_window = duration_window
self.duration_overlap = duration_overlap
self.spectro_batch_size = spectro_batch_size
self.signal_name = signal_name
self.out_size = (48, 48)
def __call__(self, data):
temporal_signal = self.temporal_transform(data)
del data
fs = self.fs
nperseg = int(self.duration_window * fs)
noverlap = int(self.duration_overlap * fs)
spectro_batch_size = self.spectro_batch_size
current_spectro_batch_size = temporal_signal.shape[0]
if current_spectro_batch_size < spectro_batch_size :
f, t, spectrogram = scipy.signal.spectrogram(temporal_signal, fs=fs, nperseg=nperseg, noverlap=noverlap)
f_interpolated, t_interpolated, interpolated_spectrogram = interpol_log(f, t, spectrogram, self.out_size)
else :
n_batches = (current_spectro_batch_size-1)//spectro_batch_size +1
nb_interp_f, nb_interp_t = self.out_size
interpolated_spectrogram = np.zeros((current_spectro_batch_size, nb_interp_f, nb_interp_t), dtype='float32')
for i in range(n_batches):
i_min = i * spectro_batch_size
i_max = (i+1) * spectro_batch_size
this_temporal_signal = temporal_signal[i_min:i_max,:]
f, t, spectrogram = scipy.signal.spectrogram(this_temporal_signal, fs=fs, nperseg=nperseg, noverlap=noverlap)
f_interpolated, t_interpolated, interpolated_spectrogram[i_min:i_max,:,:] = interpol_log(f, t, spectrogram, self.out_size)
del temporal_signal
np.log(interpolated_spectrogram + 1e-10, dtype='float32', out=interpolated_spectrogram)
self.f_interpolated = f_interpolated
self.t_interpolated = t_interpolated
return interpolated_spectrogram
def __str__(self):
str_to_return = "Spectrogram transform"
str_to_return += f"\n\t Signals: {self.signal_name}"
str_to_return += f"\n\t Output size: {self.out_size}"
str_to_return += f"\n\t Interpolation: log-interpolation"
str_to_return += "\n\t Log-power"
return str_to_return
if __name__ == "__main__":
fontdict = {'fontsize':10}
n_ticks = 10
selected_classes = ["Run", "Walk"]
remaining_classes = selected_classes.copy()
nsel = len(selected_classes)
index = 3204
plt.figure(figsize=(12,8))
signal_name = "Acc_norm"
temporal_transform = TemporalTransform(signal_name)
spectrogram_transform = SpectrogramTransform(signal_name)
while len(remaining_classes)>0:
data_tensor, class_tensor = DS[index]
data_cpu = {signal:data_tensor[signal].cpu().detach().numpy() for signal in data_tensor.keys()}
class_index = int(class_tensor)
class_name = classes_names[class_index-1]
if class_name in remaining_classes:
remaining_classes.remove(class_name)
i_class = nsel - len(remaining_classes)
temporal_signal = temporal_transform(data_cpu)
nb = temporal_signal.shape[1]
x_t = np.linspace(0, nb/fs, nb)
plt.subplot(2,nsel,i_class)
plt.plot(x_t, temporal_signal[0,:])
plt.title(f'{class_name} (index={index})', fontdict)
plt.xlabel("t (sec)")
plt.ylabel(signal_name)
data_tensor, _ = DS[index]
data_cpu = {signal:data_tensor[signal].to(torch.device('cpu')).detach().numpy() for signal in data_tensor.keys()}
spectrogram_interpolated = spectrogram_transform(data_cpu)
f_interpolated = spectrogram_transform.f_interpolated
t_interpolated = spectrogram_transform.t_interpolated
plt.subplot(2,nsel,i_class + nsel)
t_interpolated = spectrogram_transform.t_interpolated
f_interpolated = spectrogram_transform.f_interpolated
matrix_shape = spectrogram_interpolated.shape
time_list = [f'{t_interpolated[i]:.0f}' for i in np.round(np.linspace(0, matrix_shape[2]-1,n_ticks)).astype(int)]
freq_list = [f'{f_interpolated[i]:.1f}' for i in np.round(np.linspace(0, matrix_shape[1]-1,n_ticks)).astype(int)]
plt.xticks(np.linspace(0, matrix_shape[2]-1, n_ticks), time_list)
plt.yticks(np.linspace(0, matrix_shape[1]-1, n_ticks), freq_list)
plt.imshow(spectrogram_interpolated[0,:,:])
plt.ylabel("f (Hz)")
plt.xlabel("t (s)")
plt.colorbar()
index += 1
plt.show()
| true | true |
f72bc8fafdff910faf1d81e8260be95f5fd4d2f1 | 6,823 | py | Python | lib/rpn_layer/proposal_target_layer.py | aditya2592/PoseCNN | a763120ce0ceb55cf3432980287ef463728f8052 | [
"MIT"
] | 655 | 2018-03-21T19:55:45.000Z | 2022-03-25T20:41:21.000Z | lib/rpn_layer/proposal_target_layer.py | SergioRAgostinho/PoseCNN | da9eaae850eed7521a2a48a4d27474d655caab42 | [
"MIT"
] | 122 | 2018-04-04T13:57:49.000Z | 2022-03-18T09:28:44.000Z | lib/rpn_layer/proposal_target_layer.py | SergioRAgostinho/PoseCNN | da9eaae850eed7521a2a48a4d27474d655caab42 | [
"MIT"
] | 226 | 2018-03-22T01:40:04.000Z | 2022-03-17T11:56:14.000Z | # --------------------------------------------------------
# Faster R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick, Sean Bell and Xinlei Chen
# --------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import numpy.random as npr
from fcn.config import cfg
from utils.bbox_transform import bbox_transform
from utils.cython_bbox import bbox_overlaps
def proposal_target_layer(rpn_rois, rpn_scores, gt_boxes, poses, _num_classes):
"""
Assign object detection proposals to ground-truth targets. Produces proposal
classification labels and bounding-box regression targets.
"""
# Proposal ROIs (0, x1, y1, x2, y2) coming from RPN
# (i.e., rpn.proposal_layer.ProposalLayer), or any other source
all_rois = rpn_rois
all_scores = rpn_scores
# Include ground-truth boxes in the set of candidate rois
if cfg.TRAIN.USE_GT:
zeros = np.zeros((gt_boxes.shape[0], 1), dtype=gt_boxes.dtype)
all_rois = np.vstack(
(all_rois, np.hstack((zeros, gt_boxes[:, :-1])))
)
# not sure if it a wise appending, but anyway i am not using it
all_scores = np.vstack((all_scores, zeros))
num_images = 1
rois_per_image = cfg.TRAIN.BATCH_SIZE / num_images
fg_rois_per_image = np.round(cfg.TRAIN.FG_FRACTION * rois_per_image)
# Sample rois with classification labels and bounding box regression
# targets
labels, rois, roi_scores, bbox_targets, bbox_inside_weights, poses_target, poses_weight = _sample_rois(
all_rois, all_scores, gt_boxes, poses, fg_rois_per_image,
rois_per_image, _num_classes)
rois = rois.reshape(-1, 5)
roi_scores = roi_scores.reshape(-1)
labels = labels.reshape(-1, 1)
bbox_targets = bbox_targets.reshape(-1, _num_classes * 4)
bbox_inside_weights = bbox_inside_weights.reshape(-1, _num_classes * 4)
bbox_outside_weights = np.array(bbox_inside_weights > 0).astype(np.float32)
return rois, roi_scores, labels, bbox_targets, bbox_inside_weights, bbox_outside_weights, poses_target, poses_weight
def _get_bbox_regression_labels(bbox_target_data, num_classes):
"""Bounding-box regression targets (bbox_target_data) are stored in a
compact form N x (class, tx, ty, tw, th)
This function expands those targets into the 4-of-4*K representation used
by the network (i.e. only one class has non-zero targets).
Returns:
bbox_target (ndarray): N x 4K blob of regression targets
bbox_inside_weights (ndarray): N x 4K blob of loss weights
"""
clss = bbox_target_data[:, 0]
bbox_targets = np.zeros((clss.size, 4 * num_classes), dtype=np.float32)
bbox_inside_weights = np.zeros(bbox_targets.shape, dtype=np.float32)
inds = np.where(clss > 0)[0]
for ind in inds:
cls = clss[ind]
start = int(4 * cls)
end = start + 4
bbox_targets[ind, start:end] = bbox_target_data[ind, 1:]
bbox_inside_weights[ind, start:end] = cfg.TRAIN.BBOX_INSIDE_WEIGHTS
return bbox_targets, bbox_inside_weights
def _compute_targets(ex_rois, gt_rois, labels):
"""Compute bounding-box regression targets for an image."""
assert ex_rois.shape[0] == gt_rois.shape[0]
assert ex_rois.shape[1] == 4
assert gt_rois.shape[1] == 4
targets = bbox_transform(ex_rois, gt_rois)
if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED:
# Optionally normalize targets by a precomputed mean and stdev
targets = ((targets - np.array(cfg.TRAIN.BBOX_NORMALIZE_MEANS))
/ np.array(cfg.TRAIN.BBOX_NORMALIZE_STDS))
return np.hstack(
(labels[:, np.newaxis], targets)).astype(np.float32, copy=False)
def _compute_pose_targets(quaternions, labels, num_classes):
"""Compute pose regression targets for an image."""
num = quaternions.shape[0]
poses_target = np.zeros((num, 4 * num_classes), dtype=np.float32)
poses_weight = np.zeros((num, 4 * num_classes), dtype=np.float32)
for i in xrange(num):
cls = labels[i]
if cls > 0:
start = int(4 * cls)
end = start + 4
poses_target[i, start:end] = quaternions[i, :]
poses_weight[i, start:end] = 1.0
return poses_target, poses_weight
def _sample_rois(all_rois, all_scores, gt_boxes, poses, fg_rois_per_image, rois_per_image, num_classes):
"""Generate a random sample of RoIs comprising foreground and background
examples.
"""
# overlaps: (rois x gt_boxes)
overlaps = bbox_overlaps(
np.ascontiguousarray(all_rois[:, 1:5], dtype=np.float),
np.ascontiguousarray(gt_boxes[:, :4], dtype=np.float))
gt_assignment = overlaps.argmax(axis=1)
max_overlaps = overlaps.max(axis=1)
labels = gt_boxes[gt_assignment, 4]
quaternions = poses[gt_assignment, 6:10]
# Select foreground RoIs as those with >= FG_THRESH overlap
fg_inds = np.where(max_overlaps >= cfg.TRAIN.FG_THRESH)[0]
# Guard against the case when an image has fewer than fg_rois_per_image
# Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI)
bg_inds = np.where((max_overlaps < cfg.TRAIN.BG_THRESH_HI) &
(max_overlaps >= cfg.TRAIN.BG_THRESH_LO))[0]
# Small modification to the original version where we ensure a fixed number of regions are sampled
if fg_inds.size > 0 and bg_inds.size > 0:
fg_rois_per_image = min(fg_rois_per_image, fg_inds.size)
fg_inds = npr.choice(fg_inds, size=int(fg_rois_per_image), replace=False)
bg_rois_per_image = rois_per_image - fg_rois_per_image
to_replace = bg_inds.size < bg_rois_per_image
bg_inds = npr.choice(bg_inds, size=int(bg_rois_per_image), replace=to_replace)
elif fg_inds.size > 0:
to_replace = fg_inds.size < rois_per_image
fg_inds = npr.choice(fg_inds, size=int(rois_per_image), replace=to_replace)
fg_rois_per_image = rois_per_image
elif bg_inds.size > 0:
to_replace = bg_inds.size < rois_per_image
bg_inds = npr.choice(bg_inds, size=int(rois_per_image), replace=to_replace)
fg_rois_per_image = 0
else:
import pdb
pdb.set_trace()
# The indices that we're selecting (both fg and bg)
keep_inds = np.append(fg_inds, bg_inds)
# Select sampled values from various arrays:
labels = labels[keep_inds]
# Clamp labels for the background RoIs to 0
labels[int(fg_rois_per_image):] = 0
rois = all_rois[keep_inds]
roi_scores = all_scores[keep_inds]
# pose regression targets and weights
poses_target, poses_weight = _compute_pose_targets(quaternions[keep_inds], labels, num_classes)
bbox_target_data = _compute_targets(
rois[:, 1:5], gt_boxes[gt_assignment[keep_inds], :4], labels)
bbox_targets, bbox_inside_weights = \
_get_bbox_regression_labels(bbox_target_data, num_classes)
return labels, rois, roi_scores, bbox_targets, bbox_inside_weights, poses_target, poses_weight
| 39.212644 | 118 | 0.725194 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import numpy.random as npr
from fcn.config import cfg
from utils.bbox_transform import bbox_transform
from utils.cython_bbox import bbox_overlaps
def proposal_target_layer(rpn_rois, rpn_scores, gt_boxes, poses, _num_classes):
all_rois = rpn_rois
all_scores = rpn_scores
if cfg.TRAIN.USE_GT:
zeros = np.zeros((gt_boxes.shape[0], 1), dtype=gt_boxes.dtype)
all_rois = np.vstack(
(all_rois, np.hstack((zeros, gt_boxes[:, :-1])))
)
all_scores = np.vstack((all_scores, zeros))
num_images = 1
rois_per_image = cfg.TRAIN.BATCH_SIZE / num_images
fg_rois_per_image = np.round(cfg.TRAIN.FG_FRACTION * rois_per_image)
labels, rois, roi_scores, bbox_targets, bbox_inside_weights, poses_target, poses_weight = _sample_rois(
all_rois, all_scores, gt_boxes, poses, fg_rois_per_image,
rois_per_image, _num_classes)
rois = rois.reshape(-1, 5)
roi_scores = roi_scores.reshape(-1)
labels = labels.reshape(-1, 1)
bbox_targets = bbox_targets.reshape(-1, _num_classes * 4)
bbox_inside_weights = bbox_inside_weights.reshape(-1, _num_classes * 4)
bbox_outside_weights = np.array(bbox_inside_weights > 0).astype(np.float32)
return rois, roi_scores, labels, bbox_targets, bbox_inside_weights, bbox_outside_weights, poses_target, poses_weight
def _get_bbox_regression_labels(bbox_target_data, num_classes):
clss = bbox_target_data[:, 0]
bbox_targets = np.zeros((clss.size, 4 * num_classes), dtype=np.float32)
bbox_inside_weights = np.zeros(bbox_targets.shape, dtype=np.float32)
inds = np.where(clss > 0)[0]
for ind in inds:
cls = clss[ind]
start = int(4 * cls)
end = start + 4
bbox_targets[ind, start:end] = bbox_target_data[ind, 1:]
bbox_inside_weights[ind, start:end] = cfg.TRAIN.BBOX_INSIDE_WEIGHTS
return bbox_targets, bbox_inside_weights
def _compute_targets(ex_rois, gt_rois, labels):
assert ex_rois.shape[0] == gt_rois.shape[0]
assert ex_rois.shape[1] == 4
assert gt_rois.shape[1] == 4
targets = bbox_transform(ex_rois, gt_rois)
if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED:
targets = ((targets - np.array(cfg.TRAIN.BBOX_NORMALIZE_MEANS))
/ np.array(cfg.TRAIN.BBOX_NORMALIZE_STDS))
return np.hstack(
(labels[:, np.newaxis], targets)).astype(np.float32, copy=False)
def _compute_pose_targets(quaternions, labels, num_classes):
num = quaternions.shape[0]
poses_target = np.zeros((num, 4 * num_classes), dtype=np.float32)
poses_weight = np.zeros((num, 4 * num_classes), dtype=np.float32)
for i in xrange(num):
cls = labels[i]
if cls > 0:
start = int(4 * cls)
end = start + 4
poses_target[i, start:end] = quaternions[i, :]
poses_weight[i, start:end] = 1.0
return poses_target, poses_weight
def _sample_rois(all_rois, all_scores, gt_boxes, poses, fg_rois_per_image, rois_per_image, num_classes):
overlaps = bbox_overlaps(
np.ascontiguousarray(all_rois[:, 1:5], dtype=np.float),
np.ascontiguousarray(gt_boxes[:, :4], dtype=np.float))
gt_assignment = overlaps.argmax(axis=1)
max_overlaps = overlaps.max(axis=1)
labels = gt_boxes[gt_assignment, 4]
quaternions = poses[gt_assignment, 6:10]
fg_inds = np.where(max_overlaps >= cfg.TRAIN.FG_THRESH)[0]
bg_inds = np.where((max_overlaps < cfg.TRAIN.BG_THRESH_HI) &
(max_overlaps >= cfg.TRAIN.BG_THRESH_LO))[0]
if fg_inds.size > 0 and bg_inds.size > 0:
fg_rois_per_image = min(fg_rois_per_image, fg_inds.size)
fg_inds = npr.choice(fg_inds, size=int(fg_rois_per_image), replace=False)
bg_rois_per_image = rois_per_image - fg_rois_per_image
to_replace = bg_inds.size < bg_rois_per_image
bg_inds = npr.choice(bg_inds, size=int(bg_rois_per_image), replace=to_replace)
elif fg_inds.size > 0:
to_replace = fg_inds.size < rois_per_image
fg_inds = npr.choice(fg_inds, size=int(rois_per_image), replace=to_replace)
fg_rois_per_image = rois_per_image
elif bg_inds.size > 0:
to_replace = bg_inds.size < rois_per_image
bg_inds = npr.choice(bg_inds, size=int(rois_per_image), replace=to_replace)
fg_rois_per_image = 0
else:
import pdb
pdb.set_trace()
keep_inds = np.append(fg_inds, bg_inds)
# Select sampled values from various arrays:
labels = labels[keep_inds]
# Clamp labels for the background RoIs to 0
labels[int(fg_rois_per_image):] = 0
rois = all_rois[keep_inds]
roi_scores = all_scores[keep_inds]
# pose regression targets and weights
poses_target, poses_weight = _compute_pose_targets(quaternions[keep_inds], labels, num_classes)
bbox_target_data = _compute_targets(
rois[:, 1:5], gt_boxes[gt_assignment[keep_inds], :4], labels)
bbox_targets, bbox_inside_weights = \
_get_bbox_regression_labels(bbox_target_data, num_classes)
return labels, rois, roi_scores, bbox_targets, bbox_inside_weights, poses_target, poses_weight
| true | true |
f72bca34371f9e866f547515c6eb963685418158 | 3,878 | py | Python | alveo/apps/whole_app_acceleration/classification/test_classify_pp.py | dendisuhubdy/Vitis-AI | 524f65224c52314155dafc011d488ed30e458fcb | [
"Apache-2.0"
] | 3 | 2020-10-29T15:00:30.000Z | 2021-10-21T08:09:34.000Z | alveo/apps/whole_app_acceleration/classification/test_classify_pp.py | dendisuhubdy/Vitis-AI | 524f65224c52314155dafc011d488ed30e458fcb | [
"Apache-2.0"
] | 20 | 2020-10-31T03:19:03.000Z | 2020-11-02T18:59:49.000Z | alveo/apps/whole_app_acceleration/classification/test_classify_pp.py | dendisuhubdy/Vitis-AI | 524f65224c52314155dafc011d488ed30e458fcb | [
"Apache-2.0"
] | 9 | 2020-10-14T02:04:10.000Z | 2020-12-01T08:23:02.000Z | # Copyright 2019 Xilinx Inc.
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from six import itervalues, iteritems
from ctypes import *
import numpy as np
import os, sys
from vai.dpuv1.rt import xdnn, xdnn_io
from vai.dpuv1.rt.vitis.python.dpu.runner import Runner
import waa_rt
import multiprocessing as mp
import ctypes
def pre_process(q,args):
xclbin_p=str(args['xclbin']+"/xdnn_v3_96x16_2pe_8b_9mb_bank03.xclbin")
kernelName_p="pp_pipeline_accel"
deviceIdx_p=args['deviceid']
fpga_pp = waa_rt.PreProcess(xclbin_p,kernelName_p,deviceIdx_p, 0)
batch_sz = args['batch_sz']
img_paths = xdnn_io.getFilePaths(args['images'])
print("Pre-processing handle created. Populating Queue")
for i in range(0, len(img_paths), batch_sz):
for j, p in enumerate(img_paths[i:i + batch_sz]):
arr, ht = fpga_pp.preprocess_input(p)
q.put(arr)
print("Queue populated")
def process_xdnn(q,args):
runner = Runner(args['vitis_rundir'])
inTensors = runner.get_input_tensors()
outTensors = runner.get_output_tensors()
batch_sz = args['batch_sz']
if batch_sz == -1:
# use Runner's suggested batch size
batch_sz = inTensors[0].dims[0]
if args['golden']:
goldenMap = xdnn_io.getGoldenMap(args['golden'])
top5Count = 0
top1Count = 0
fpgaBlobs = []
for io in [inTensors, outTensors]:
blobs = []
for t in io:
shape = (batch_sz,) + tuple([t.dims[i] for i in range(t.ndims)][1:])
blobs.append(np.empty((shape), dtype=np.float32, order='C'))
fpgaBlobs.append(blobs)
img_paths = xdnn_io.getFilePaths(args['images'])
labels = xdnn_io.get_labels(args['labels'])
xdnnCPUOp = xdnn.XDNNCPUOp("%s/weights.h5" % args['vitis_rundir'])
fcOutput = np.empty((batch_sz, args['outsz'],), dtype=np.float32, order='C')
fpgaInput = fpgaBlobs[0][0]
for i in range(0, len(img_paths), batch_sz):
pl = []
# fill tensor input data from image file
for j, p in enumerate(img_paths[i:i + batch_sz]):
img, _ = q.get(), None
pl.append(p)
np.copyto(fpgaInput[j], img)
jid = runner.execute_async(fpgaBlobs[0], fpgaBlobs[1])
runner.wait(jid)
xdnnCPUOp.computeFC(fpgaBlobs[1][0], fcOutput)
softmaxOut = xdnnCPUOp.computeSoftmax(fcOutput)
if args['golden']:
for j,p in enumerate(img_paths[i:i + batch_sz]):
top1Count += xdnn_io.isTopK(softmaxOut[j], goldenMap, p, labels, 1)
top5Count += xdnn_io.isTopK(softmaxOut[j], goldenMap, p, labels, 5)
else:
xdnn_io.printClassification(softmaxOut, pl, labels)
if args['golden']:
print ( ("\nAverage accuracy (n=%d) Top-1: %.1f%%, Top-5: %.1f%%\n") % (len(img_paths), float(top1Count)/float(len(img_paths))*100., float(top5Count)/float(len(img_paths))*100.) )
if __name__ == '__main__':
print("\n\n\n\n\n\n\n\n" + '\33[32m' + "Running Inference with HW Pre-processing" + '\33[0m')
args = xdnn_io.processCommandLine()
#Create a queue for passing the pre-processed data
q = mp.Queue()
#Creating a process to run HW pre-processing kernel
p_preprocess = mp.Process(target=pre_process,args=(q,args))
#Process to run XDNN
p_xdnn = mp.Process(target=process_xdnn,args=(q,args))
p_preprocess.start()
p_xdnn.start()
p_preprocess.join()
p_xdnn.join()
| 34.318584 | 183 | 0.687726 |
from __future__ import print_function
from six import itervalues, iteritems
from ctypes import *
import numpy as np
import os, sys
from vai.dpuv1.rt import xdnn, xdnn_io
from vai.dpuv1.rt.vitis.python.dpu.runner import Runner
import waa_rt
import multiprocessing as mp
import ctypes
def pre_process(q,args):
xclbin_p=str(args['xclbin']+"/xdnn_v3_96x16_2pe_8b_9mb_bank03.xclbin")
kernelName_p="pp_pipeline_accel"
deviceIdx_p=args['deviceid']
fpga_pp = waa_rt.PreProcess(xclbin_p,kernelName_p,deviceIdx_p, 0)
batch_sz = args['batch_sz']
img_paths = xdnn_io.getFilePaths(args['images'])
print("Pre-processing handle created. Populating Queue")
for i in range(0, len(img_paths), batch_sz):
for j, p in enumerate(img_paths[i:i + batch_sz]):
arr, ht = fpga_pp.preprocess_input(p)
q.put(arr)
print("Queue populated")
def process_xdnn(q,args):
runner = Runner(args['vitis_rundir'])
inTensors = runner.get_input_tensors()
outTensors = runner.get_output_tensors()
batch_sz = args['batch_sz']
if batch_sz == -1:
batch_sz = inTensors[0].dims[0]
if args['golden']:
goldenMap = xdnn_io.getGoldenMap(args['golden'])
top5Count = 0
top1Count = 0
fpgaBlobs = []
for io in [inTensors, outTensors]:
blobs = []
for t in io:
shape = (batch_sz,) + tuple([t.dims[i] for i in range(t.ndims)][1:])
blobs.append(np.empty((shape), dtype=np.float32, order='C'))
fpgaBlobs.append(blobs)
img_paths = xdnn_io.getFilePaths(args['images'])
labels = xdnn_io.get_labels(args['labels'])
xdnnCPUOp = xdnn.XDNNCPUOp("%s/weights.h5" % args['vitis_rundir'])
fcOutput = np.empty((batch_sz, args['outsz'],), dtype=np.float32, order='C')
fpgaInput = fpgaBlobs[0][0]
for i in range(0, len(img_paths), batch_sz):
pl = []
# fill tensor input data from image file
for j, p in enumerate(img_paths[i:i + batch_sz]):
img, _ = q.get(), None
pl.append(p)
np.copyto(fpgaInput[j], img)
jid = runner.execute_async(fpgaBlobs[0], fpgaBlobs[1])
runner.wait(jid)
xdnnCPUOp.computeFC(fpgaBlobs[1][0], fcOutput)
softmaxOut = xdnnCPUOp.computeSoftmax(fcOutput)
if args['golden']:
for j,p in enumerate(img_paths[i:i + batch_sz]):
top1Count += xdnn_io.isTopK(softmaxOut[j], goldenMap, p, labels, 1)
top5Count += xdnn_io.isTopK(softmaxOut[j], goldenMap, p, labels, 5)
else:
xdnn_io.printClassification(softmaxOut, pl, labels)
if args['golden']:
print ( ("\nAverage accuracy (n=%d) Top-1: %.1f%%, Top-5: %.1f%%\n") % (len(img_paths), float(top1Count)/float(len(img_paths))*100., float(top5Count)/float(len(img_paths))*100.) )
if __name__ == '__main__':
print("\n\n\n\n\n\n\n\n" + '\33[32m' + "Running Inference with HW Pre-processing" + '\33[0m')
args = xdnn_io.processCommandLine()
#Create a queue for passing the pre-processed data
q = mp.Queue()
#Creating a process to run HW pre-processing kernel
p_preprocess = mp.Process(target=pre_process,args=(q,args))
#Process to run XDNN
p_xdnn = mp.Process(target=process_xdnn,args=(q,args))
p_preprocess.start()
p_xdnn.start()
p_preprocess.join()
p_xdnn.join()
| true | true |
f72bca80139a374bf9bcf9170ec60403226e508e | 5,931 | py | Python | code/networks/Unet.py | loveredcarrot/ssl_multi_seg | 5315dbcc2c44e8effab28699c1491dd67b7ce00b | [
"Apache-2.0"
] | 3 | 2021-04-28T09:36:32.000Z | 2021-11-17T02:52:07.000Z | code/networks/Unet.py | loveredcarrot/ssl_multi_seg | 5315dbcc2c44e8effab28699c1491dd67b7ce00b | [
"Apache-2.0"
] | null | null | null | code/networks/Unet.py | loveredcarrot/ssl_multi_seg | 5315dbcc2c44e8effab28699c1491dd67b7ce00b | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# @Time : 2021/4/8 15:52
# @Author : aurorazeng
# @File : Unet.py
# @license: (C) Copyright 2021-2026, aurorazeng; No reprobaiction without permission.
"""
The implementation is borrowed from: https://github.com/HiLab-git/PyMIC
"""
from __future__ import division, print_function
import numpy as np
import torch
import torch.nn as nn
from torch.distributions.uniform import Uniform
class ConvBlock(nn.Module):
"""two convolution layers with batch norm and leaky relu"""
def __init__(self, in_channels, out_channels, dropout_p):
super(ConvBlock, self).__init__()
self.conv_conv = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
nn.BatchNorm2d(out_channels),
# nn.LeakyReLU(),
nn.ReLU(),
nn.Dropout(dropout_p),
nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1),
nn.BatchNorm2d(out_channels),
# nn.LeakyReLU()
nn.ReLU()
)
def forward(self, x):
return self.conv_conv(x)
class DownBlock(nn.Module):
"""Downsampling followed by ConvBlock"""
def __init__(self, in_channels, out_channels, dropout_p):
super(DownBlock, self).__init__()
self.maxpool_conv = nn.Sequential(
nn.MaxPool2d(2),
ConvBlock(in_channels, out_channels, dropout_p)
)
def forward(self, x):
return self.maxpool_conv(x)
class UpBlock(nn.Module):
"""Upssampling followed by ConvBlock"""
def __init__(self, in_channels1, in_channels2, out_channels, dropout_p,
bilinear=True):
super(UpBlock, self).__init__()
self.bilinear = bilinear
if bilinear:
self.conv1x1 = nn.Conv2d(in_channels1, in_channels2, kernel_size=1)
self.up = nn.Upsample(
scale_factor=2, mode='bilinear', align_corners=True)
else:
self.up = nn.ConvTranspose2d(
in_channels1, in_channels2, kernel_size=2, stride=2)
self.conv = ConvBlock(in_channels2 * 2, out_channels, dropout_p)
def forward(self, x1, x2):
if self.bilinear:
x1 = self.conv1x1(x1)
x1 = self.up(x1)
x = torch.cat([x2, x1], dim=1)
return self.conv(x)
class Encoder(nn.Module):
def __init__(self, params):
super(Encoder, self).__init__()
self.params = params
self.in_chns = self.params['in_chns']
self.ft_chns = self.params['feature_chns']
self.n_class = self.params['class_num']
self.bilinear = self.params['bilinear']
self.dropout = self.params['dropout']
assert (len(self.ft_chns) == 5)
self.in_conv = ConvBlock(
self.in_chns, self.ft_chns[0], self.dropout[0])
self.down1 = DownBlock(
self.ft_chns[0], self.ft_chns[1], self.dropout[1])
self.down2 = DownBlock(
self.ft_chns[1], self.ft_chns[2], self.dropout[2])
self.down3 = DownBlock(
self.ft_chns[2], self.ft_chns[3], self.dropout[3])
self.down4 = DownBlock(
self.ft_chns[3], self.ft_chns[4], self.dropout[4])
def forward(self, x):
x0 = self.in_conv(x)
x1 = self.down1(x0)
x2 = self.down2(x1)
x3 = self.down3(x2)
x4 = self.down4(x3)
return [x0, x1, x2, x3, x4]
class Decoder(nn.Module):
def __init__(self, params):
super(Decoder, self).__init__()
self.params = params
self.in_chns = self.params['in_chns']
self.ft_chns = self.params['feature_chns']
self.n_class = self.params['class_num']
self.bilinear = self.params['bilinear']
assert (len(self.ft_chns) == 5)
self.up1 = UpBlock(
self.ft_chns[4], self.ft_chns[3], self.ft_chns[3], dropout_p=0.0)
self.up2 = UpBlock(
self.ft_chns[3], self.ft_chns[2], self.ft_chns[2], dropout_p=0.0)
self.up3 = UpBlock(
self.ft_chns[2], self.ft_chns[1], self.ft_chns[1], dropout_p=0.0)
self.up4 = UpBlock(
self.ft_chns[1], self.ft_chns[0], self.ft_chns[0], dropout_p=0.0)
self.out_conv = nn.Conv2d(self.ft_chns[0], self.n_class,
kernel_size=1, padding=0)
def forward(self, feature):
x0 = feature[0]
x1 = feature[1]
x2 = feature[2]
x3 = feature[3]
x4 = feature[4]
x = self.up1(x4, x3)
x = self.up2(x, x2)
x = self.up3(x, x1)
x = self.up4(x, x0)
output = self.out_conv(x)
return output
class UNet(nn.Module):
def __init__(self, in_chns, class_num):
super(UNet, self).__init__()
params = {'in_chns': in_chns,
# 'feature_chns': [16, 32, 64, 128, 256],
'feature_chns': [32, 64, 128, 256, 512],
'dropout': [0, 0, 0, 0, 0],
'class_num': class_num,
'bilinear': False,
'acti_func': 'relu'}
self.encoder = Encoder(params)
self.decoder = Decoder(params)
def forward(self, x):
feature = self.encoder(x)
output = self.decoder(feature)
return output
class UNetWithDrop(nn.Module):
def __init__(self, in_chns, class_num):
super(UNetWithDrop, self).__init__()
params = {'in_chns': in_chns,
# 'feature_chns': [16, 32, 64, 128, 256],
'feature_chns': [32, 64, 128, 256, 512],
'dropout': [0.05, 0.1, 0.2, 0.3, 0.5],
'class_num': class_num,
'bilinear': False,
'acti_func': 'relu'}
self.encoder = Encoder(params)
self.decoder = Decoder(params)
def forward(self, x):
feature = self.encoder(x)
output = self.decoder(feature)
return output
| 31.887097 | 85 | 0.572079 |
from __future__ import division, print_function
import numpy as np
import torch
import torch.nn as nn
from torch.distributions.uniform import Uniform
class ConvBlock(nn.Module):
def __init__(self, in_channels, out_channels, dropout_p):
super(ConvBlock, self).__init__()
self.conv_conv = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
nn.BatchNorm2d(out_channels),
nn.ReLU(),
nn.Dropout(dropout_p),
nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1),
nn.BatchNorm2d(out_channels),
nn.ReLU()
)
def forward(self, x):
return self.conv_conv(x)
class DownBlock(nn.Module):
def __init__(self, in_channels, out_channels, dropout_p):
super(DownBlock, self).__init__()
self.maxpool_conv = nn.Sequential(
nn.MaxPool2d(2),
ConvBlock(in_channels, out_channels, dropout_p)
)
def forward(self, x):
return self.maxpool_conv(x)
class UpBlock(nn.Module):
def __init__(self, in_channels1, in_channels2, out_channels, dropout_p,
bilinear=True):
super(UpBlock, self).__init__()
self.bilinear = bilinear
if bilinear:
self.conv1x1 = nn.Conv2d(in_channels1, in_channels2, kernel_size=1)
self.up = nn.Upsample(
scale_factor=2, mode='bilinear', align_corners=True)
else:
self.up = nn.ConvTranspose2d(
in_channels1, in_channels2, kernel_size=2, stride=2)
self.conv = ConvBlock(in_channels2 * 2, out_channels, dropout_p)
def forward(self, x1, x2):
if self.bilinear:
x1 = self.conv1x1(x1)
x1 = self.up(x1)
x = torch.cat([x2, x1], dim=1)
return self.conv(x)
class Encoder(nn.Module):
def __init__(self, params):
super(Encoder, self).__init__()
self.params = params
self.in_chns = self.params['in_chns']
self.ft_chns = self.params['feature_chns']
self.n_class = self.params['class_num']
self.bilinear = self.params['bilinear']
self.dropout = self.params['dropout']
assert (len(self.ft_chns) == 5)
self.in_conv = ConvBlock(
self.in_chns, self.ft_chns[0], self.dropout[0])
self.down1 = DownBlock(
self.ft_chns[0], self.ft_chns[1], self.dropout[1])
self.down2 = DownBlock(
self.ft_chns[1], self.ft_chns[2], self.dropout[2])
self.down3 = DownBlock(
self.ft_chns[2], self.ft_chns[3], self.dropout[3])
self.down4 = DownBlock(
self.ft_chns[3], self.ft_chns[4], self.dropout[4])
def forward(self, x):
x0 = self.in_conv(x)
x1 = self.down1(x0)
x2 = self.down2(x1)
x3 = self.down3(x2)
x4 = self.down4(x3)
return [x0, x1, x2, x3, x4]
class Decoder(nn.Module):
def __init__(self, params):
super(Decoder, self).__init__()
self.params = params
self.in_chns = self.params['in_chns']
self.ft_chns = self.params['feature_chns']
self.n_class = self.params['class_num']
self.bilinear = self.params['bilinear']
assert (len(self.ft_chns) == 5)
self.up1 = UpBlock(
self.ft_chns[4], self.ft_chns[3], self.ft_chns[3], dropout_p=0.0)
self.up2 = UpBlock(
self.ft_chns[3], self.ft_chns[2], self.ft_chns[2], dropout_p=0.0)
self.up3 = UpBlock(
self.ft_chns[2], self.ft_chns[1], self.ft_chns[1], dropout_p=0.0)
self.up4 = UpBlock(
self.ft_chns[1], self.ft_chns[0], self.ft_chns[0], dropout_p=0.0)
self.out_conv = nn.Conv2d(self.ft_chns[0], self.n_class,
kernel_size=1, padding=0)
def forward(self, feature):
x0 = feature[0]
x1 = feature[1]
x2 = feature[2]
x3 = feature[3]
x4 = feature[4]
x = self.up1(x4, x3)
x = self.up2(x, x2)
x = self.up3(x, x1)
x = self.up4(x, x0)
output = self.out_conv(x)
return output
class UNet(nn.Module):
def __init__(self, in_chns, class_num):
super(UNet, self).__init__()
params = {'in_chns': in_chns,
'feature_chns': [32, 64, 128, 256, 512],
'dropout': [0, 0, 0, 0, 0],
'class_num': class_num,
'bilinear': False,
'acti_func': 'relu'}
self.encoder = Encoder(params)
self.decoder = Decoder(params)
def forward(self, x):
feature = self.encoder(x)
output = self.decoder(feature)
return output
class UNetWithDrop(nn.Module):
def __init__(self, in_chns, class_num):
super(UNetWithDrop, self).__init__()
params = {'in_chns': in_chns,
'feature_chns': [32, 64, 128, 256, 512],
'dropout': [0.05, 0.1, 0.2, 0.3, 0.5],
'class_num': class_num,
'bilinear': False,
'acti_func': 'relu'}
self.encoder = Encoder(params)
self.decoder = Decoder(params)
def forward(self, x):
feature = self.encoder(x)
output = self.decoder(feature)
return output
| true | true |
f72bca982790961f6bbe8bc31f1a60032438e1a2 | 154 | py | Python | chat_assistant/chat_assistant_app/urls.py | mrhegemon/Rasa_zero_rpc_XR_bot | a468cc1f2b1a4e935ce18e97dcb7a11070bbea0b | [
"MIT"
] | 1 | 2021-06-21T10:44:51.000Z | 2021-06-21T10:44:51.000Z | chat_assistant/chat_assistant_app/urls.py | mrhegemon/Rasa_zero_rpc_XR_bot | a468cc1f2b1a4e935ce18e97dcb7a11070bbea0b | [
"MIT"
] | null | null | null | chat_assistant/chat_assistant_app/urls.py | mrhegemon/Rasa_zero_rpc_XR_bot | a468cc1f2b1a4e935ce18e97dcb7a11070bbea0b | [
"MIT"
] | 1 | 2021-06-07T23:09:30.000Z | 2021-06-07T23:09:30.000Z | from django.conf.urls import url
from . import views
urlpatterns = [
url('', views.chat, name='chat'),
url('^chat/', views.chat, name='chat'),
]
| 19.25 | 43 | 0.62987 | from django.conf.urls import url
from . import views
urlpatterns = [
url('', views.chat, name='chat'),
url('^chat/', views.chat, name='chat'),
]
| true | true |
f72bcb05e1a89751517adfac4596a0dd60fe06f8 | 2,516 | py | Python | actions/macro/building_positioning.py | drakonnan1st/JackBot | 345df784098cb9eb055b3901fe7455807c58a4e1 | [
"MIT"
] | null | null | null | actions/macro/building_positioning.py | drakonnan1st/JackBot | 345df784098cb9eb055b3901fe7455807c58a4e1 | [
"MIT"
] | null | null | null | actions/macro/building_positioning.py | drakonnan1st/JackBot | 345df784098cb9eb055b3901fe7455807c58a4e1 | [
"MIT"
] | null | null | null | """Everything related to building positioning goes here"""
from sc2.constants import EVOLUTIONCHAMBER, ENGINEERINGBAY
from sc2.data import ACTION_RESULT
from sc2.position import Point2
class BuildingPositioning:
"""Ok for now"""
async def prepare_building_positions(self, center):
"""Check all possible positions behind the mineral line when a hatchery is built"""
mineral_field = self.state.mineral_field
if mineral_field:
close_points = range(-11, 12)
center_position = center.position
add_positions = self.building_positions.append
# No point in separating it on variables, I united everything, it gets points that are behind minerals
viable_points = [
point
for point in (
Point2((x + center_position.x, y + center_position.y))
for x in close_points
for y in close_points
if 121 >= x * x + y * y >= 81
)
if abs(point.distance_to(mineral_field.closer_than(10, center).closest_to(point)) - 3) < 0.5
]
e_bay_ability = self.game_data.units[ENGINEERINGBAY.value].creation_ability
e_bay_mask = await self.client.query_building_placement(e_bay_ability, viable_points)
evo_ability = self.game_data.units[EVOLUTIONCHAMBER.value].creation_ability
evo_mask = await self.client.query_building_placement(evo_ability, viable_points)
viable_points = [
point
for i, point in enumerate(viable_points)
if e_bay_mask[i] == ACTION_RESULT.Success and evo_mask[i] == ACTION_RESULT.Success
]
for point in viable_points:
if self.building_positions:
if all(
abs(already_found.x - point.x) >= 3 or abs(already_found.y - point.y) >= 3
for already_found in self.building_positions
):
add_positions(point)
else:
add_positions(point)
async def get_production_position(self):
"""Find the safest position looping through all possible ones"""
if self.building_positions:
for building_position in self.building_positions:
if await self.can_place(EVOLUTIONCHAMBER, building_position):
return building_position
return None
| 45.745455 | 114 | 0.604134 | from sc2.constants import EVOLUTIONCHAMBER, ENGINEERINGBAY
from sc2.data import ACTION_RESULT
from sc2.position import Point2
class BuildingPositioning:
async def prepare_building_positions(self, center):
mineral_field = self.state.mineral_field
if mineral_field:
close_points = range(-11, 12)
center_position = center.position
add_positions = self.building_positions.append
viable_points = [
point
for point in (
Point2((x + center_position.x, y + center_position.y))
for x in close_points
for y in close_points
if 121 >= x * x + y * y >= 81
)
if abs(point.distance_to(mineral_field.closer_than(10, center).closest_to(point)) - 3) < 0.5
]
e_bay_ability = self.game_data.units[ENGINEERINGBAY.value].creation_ability
e_bay_mask = await self.client.query_building_placement(e_bay_ability, viable_points)
evo_ability = self.game_data.units[EVOLUTIONCHAMBER.value].creation_ability
evo_mask = await self.client.query_building_placement(evo_ability, viable_points)
viable_points = [
point
for i, point in enumerate(viable_points)
if e_bay_mask[i] == ACTION_RESULT.Success and evo_mask[i] == ACTION_RESULT.Success
]
for point in viable_points:
if self.building_positions:
if all(
abs(already_found.x - point.x) >= 3 or abs(already_found.y - point.y) >= 3
for already_found in self.building_positions
):
add_positions(point)
else:
add_positions(point)
async def get_production_position(self):
if self.building_positions:
for building_position in self.building_positions:
if await self.can_place(EVOLUTIONCHAMBER, building_position):
return building_position
return None
| true | true |
f72bcbbdf799538cf2dba7ece50cc212bc7de632 | 1,341 | py | Python | 2020/day03/toboggan_trajectory.py | rycmak/advent-of-code | 2a3289516f4c1d0bc1d24a38d495a93edcb19e29 | [
"MIT"
] | 1 | 2021-03-03T01:40:09.000Z | 2021-03-03T01:40:09.000Z | 2020/day03/toboggan_trajectory.py | rycmak/advent-of-code | 2a3289516f4c1d0bc1d24a38d495a93edcb19e29 | [
"MIT"
] | null | null | null | 2020/day03/toboggan_trajectory.py | rycmak/advent-of-code | 2a3289516f4c1d0bc1d24a38d495a93edcb19e29 | [
"MIT"
] | null | null | null | # Pseudo code:
# assume original map is narrow (has more rows than columns)
# transform map to array
# no. of steps downwards = no. of rows
# no. of map copies = ceil((no. of steps downwards - 1) * 3 / no. of columns)
# start at (i, j) = (0, 0)
# move across to (i + 3, j + 1)
# if element == '#', increment num_trees
# Let's try to do this without using numpy ;-p
# NB: If using numpy, could make use of concatenate, hstack, etc.
# to stack (repeat) copies of original map to the right.
# But without numpy, we'll try to use zip instead...
file = open("input.txt", "r")
map_original = [] # will be a 2D array containing original map
num_rows = 0
for line in file:
num_rows += 1
map_original.append(list(line.strip()))
map_full = map_original # map_full will be a 2D array containing full (repeated) map
# number of map copies needed horizontally
num_copies = int((num_rows - 1) * 3 / len(map_original[0])) + 1 # if using numpy, use np.ceil instead of +1
for i in range(num_copies):
# append map_full with copy of map_original
map_full = [(map_full + map_original) for map_full, map_original in zip(map_full, map_original)]
# start at position (0, 0)
column = 0
row = 0
num_trees = 0
while row < (num_rows - 1):
column += 3
row += 1
if map_full[row][column] == "#":
num_trees += 1
print("num_trees: ", num_trees)
| 31.928571 | 108 | 0.681581 |
# NB: If using numpy, could make use of concatenate, hstack, etc.
# to stack (repeat) copies of original map to the right.
# But without numpy, we'll try to use zip instead...
file = open("input.txt", "r")
map_original = []
num_rows = 0
for line in file:
num_rows += 1
map_original.append(list(line.strip()))
map_full = map_original
num_copies = int((num_rows - 1) * 3 / len(map_original[0])) + 1
for i in range(num_copies):
map_full = [(map_full + map_original) for map_full, map_original in zip(map_full, map_original)]
column = 0
row = 0
num_trees = 0
while row < (num_rows - 1):
column += 3
row += 1
if map_full[row][column] == "#":
num_trees += 1
print("num_trees: ", num_trees)
| true | true |
f72bcbc28f844d28924574ebaa52197a6519cc46 | 380 | py | Python | api.py | BabakShah/DS-SafetyPrediction | bc4c99512d04fa73994616584e3f4ab78c3e979e | [
"MIT"
] | null | null | null | api.py | BabakShah/DS-SafetyPrediction | bc4c99512d04fa73994616584e3f4ab78c3e979e | [
"MIT"
] | null | null | null | api.py | BabakShah/DS-SafetyPrediction | bc4c99512d04fa73994616584e3f4ab78c3e979e | [
"MIT"
] | 2 | 2021-06-29T17:28:03.000Z | 2022-02-26T08:41:35.000Z | import pickle
import numpy as np
xgboost = pickle.load(open('./xgboost.pkl', 'rb'))
scaler = pickle.load(open('./scaler.pkl', 'rb'))
def transform_input(input):
return scaler.transform([input])
def make_hard_prediction(input):
return xgboost.predict(transform_input(input))
def make_soft_prediction(input):
return xgboost.predict_proba(transform_input(input))[0,1]
| 23.75 | 61 | 0.747368 | import pickle
import numpy as np
xgboost = pickle.load(open('./xgboost.pkl', 'rb'))
scaler = pickle.load(open('./scaler.pkl', 'rb'))
def transform_input(input):
return scaler.transform([input])
def make_hard_prediction(input):
return xgboost.predict(transform_input(input))
def make_soft_prediction(input):
return xgboost.predict_proba(transform_input(input))[0,1]
| true | true |
f72bcd0625e2740abc5c1c5b36b3afdb5cde844c | 6,402 | py | Python | train_dalle.py | Atica57/DALLE-pytorch | 4fa108271aeb1972fcb118390ec15b656f2c328a | [
"MIT"
] | 1 | 2021-03-08T12:26:49.000Z | 2021-03-08T12:26:49.000Z | train_dalle.py | Atica57/DALLE-pytorch | 4fa108271aeb1972fcb118390ec15b656f2c328a | [
"MIT"
] | null | null | null | train_dalle.py | Atica57/DALLE-pytorch | 4fa108271aeb1972fcb118390ec15b656f2c328a | [
"MIT"
] | null | null | null | import argparse
from random import choice
from pathlib import Path
# torch
import torch
from torch.optim import Adam
from torch.nn.utils import clip_grad_norm_
# vision imports
from PIL import Image
from torchvision import transforms as T
from torch.utils.data import DataLoader, Dataset
from torchvision.datasets import ImageFolder
from torchvision.utils import make_grid, save_image
# dalle related classes and utils
from dalle_pytorch import OpenAIDiscreteVAE, DiscreteVAE, DALLE
from dalle_pytorch.simple_tokenizer import tokenize, tokenizer, VOCAB_SIZE
# argument parsing
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required = False)
group.add_argument('--vae_path', type = str,
help='path to your trained discrete VAE')
group.add_argument('--dalle_path', type = str,
help='path to your partially trained DALL-E')
parser.add_argument('--image_text_folder', type = str, required = True,
help='path to your folder of images and text for learning the DALL-E')
args = parser.parse_args()
# helpers
def exists(val):
return val is not None
# constants
VAE_PATH = args.vae_path
DALLE_PATH = args.dalle_path
RESUME = exists(DALLE_PATH)
EPOCHS = 20
BATCH_SIZE = 4
LEARNING_RATE = 3e-4
GRAD_CLIP_NORM = 0.5
MODEL_DIM = 512
TEXT_SEQ_LEN = 256
DEPTH = 2
HEADS = 4
DIM_HEAD = 64
# reconstitute vae
if RESUME:
dalle_path = Path(DALLE_PATH)
assert dalle_path.exists(), 'DALL-E model file does not exist'
loaded_obj = torch.load(str(dalle_path))
dalle_params, vae_params, weights = loaded_obj['hparams'], loaded_obj['vae_params'], loaded_obj['weights']
vae = DiscreteVAE(**vae_params)
dalle_params = dict(
vae = vae,
**dalle_params
)
IMAGE_SIZE = vae_params['image_size']
else:
if exists(VAE_PATH):
vae_path = Path(VAE_PATH)
assert vae_path.exists(), 'VAE model file does not exist'
loaded_obj = torch.load(str(vae_path))
vae_params, weights = loaded_obj['hparams'], loaded_obj['weights']
vae = DiscreteVAE(**vae_params)
vae.load_state_dict(weights)
else:
print('using OpenAIs pretrained VAE for encoding images to tokens')
vae_params = None
vae = OpenAIDiscreteVAE()
IMAGE_SIZE = vae.image_size
dalle_params = dict(
vae = vae,
num_text_tokens = VOCAB_SIZE,
text_seq_len = TEXT_SEQ_LEN,
dim = MODEL_DIM,
depth = DEPTH,
heads = HEADS,
dim_head = DIM_HEAD
)
# helpers
def save_model(path):
save_obj = {
'hparams': dalle_params,
'vae_params': vae_params,
'weights': dalle.state_dict()
}
torch.save(save_obj, path)
# dataset loading
class TextImageDataset(Dataset):
def __init__(self, folder, text_len = 256, image_size = 128):
super().__init__()
path = Path(folder)
text_files = [*path.glob('**/*.txt')]
image_files = [
*path.glob('**/*.png'),
*path.glob('**/*.jpg'),
*path.glob('**/*.jpeg')
]
text_files = {t.stem: t for t in text_files}
image_files = {i.stem: i for i in image_files}
keys = (image_files.keys() & text_files.keys())
self.keys = list(keys)
self.text_files = {k: v for k, v in text_files.items() if k in keys}
self.image_files = {k: v for k, v in image_files.items() if k in keys}
self.image_tranform = T.Compose([
T.Lambda(lambda img: img.convert('RGB') if img.mode != 'RGB' else img),
T.CenterCrop(image_size),
T.Resize(image_size),
T.ToTensor(),
T.Lambda(lambda t: t.expand(3, -1, -1)),
T.Normalize((0.5,) * 3, (0.5,) * 3)
])
def __len__(self):
return len(self.keys)
def __getitem__(self, ind):
key = self.keys[ind]
text_file = self.text_files[key]
image_file = self.image_files[key]
image = Image.open(image_file)
descriptions = text_file.read_text().split('\n')
descriptions = list(filter(lambda t: len(t) > 0, descriptions))
description = choice(descriptions)
tokenized_text = tokenize(description).squeeze(0)
mask = tokenized_text != 0
image_tensor = self.image_tranform(image)
return tokenized_text, image_tensor, mask
# create dataset and dataloader
ds = TextImageDataset(
args.image_text_folder,
text_len = TEXT_SEQ_LEN,
image_size = IMAGE_SIZE
)
assert len(ds) > 0, 'dataset is empty'
print(f'{len(ds)} image-text pairs found for training')
dl = DataLoader(ds, batch_size = BATCH_SIZE, shuffle = True, drop_last = True)
# initialize DALL-E
dalle = DALLE(**dalle_params).cuda()
if RESUME:
dalle.load_state_dict(weights)
# optimizer
opt = Adam(dalle.parameters(), lr = LEARNING_RATE)
# experiment tracker
import wandb
wandb.config.depth = DEPTH
wandb.config.heads = HEADS
wandb.config.dim_head = DIM_HEAD
wandb.init(project = 'dalle_train_transformer', resume = RESUME)
# training
for epoch in range(EPOCHS):
for i, (text, images, mask) in enumerate(dl):
text, images, mask = map(lambda t: t.cuda(), (text, images, mask))
loss = dalle(text, images, mask = mask, return_loss = True)
loss.backward()
clip_grad_norm_(dalle.parameters(), GRAD_CLIP_NORM)
opt.step()
opt.zero_grad()
log = {}
if i % 10 == 0:
print(epoch, i, f'loss - {loss.item()}')
log = {
**log,
'epoch': epoch,
'iter': i,
'loss': loss.item()
}
if i % 100 == 0:
sample_text = text[:1]
token_list = sample_text.masked_select(sample_text != 0).tolist()
decoded_text = tokenizer.decode(token_list)
image = dalle.generate_images(
text[:1],
mask = mask[:1],
filter_thres = 0.9 # topk sampling at 0.9
)
save_model(f'./dalle.pt')
wandb.save(f'./dalle.pt')
log = {
**log,
'image': wandb.Image(image, caption = decoded_text)
}
wandb.log(log)
save_model(f'./dalle-final.pt')
wandb.save('./dalle-final.pt')
wandb.finish()
| 24.813953 | 110 | 0.617619 | import argparse
from random import choice
from pathlib import Path
import torch
from torch.optim import Adam
from torch.nn.utils import clip_grad_norm_
from PIL import Image
from torchvision import transforms as T
from torch.utils.data import DataLoader, Dataset
from torchvision.datasets import ImageFolder
from torchvision.utils import make_grid, save_image
from dalle_pytorch import OpenAIDiscreteVAE, DiscreteVAE, DALLE
from dalle_pytorch.simple_tokenizer import tokenize, tokenizer, VOCAB_SIZE
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required = False)
group.add_argument('--vae_path', type = str,
help='path to your trained discrete VAE')
group.add_argument('--dalle_path', type = str,
help='path to your partially trained DALL-E')
parser.add_argument('--image_text_folder', type = str, required = True,
help='path to your folder of images and text for learning the DALL-E')
args = parser.parse_args()
def exists(val):
return val is not None
VAE_PATH = args.vae_path
DALLE_PATH = args.dalle_path
RESUME = exists(DALLE_PATH)
EPOCHS = 20
BATCH_SIZE = 4
LEARNING_RATE = 3e-4
GRAD_CLIP_NORM = 0.5
MODEL_DIM = 512
TEXT_SEQ_LEN = 256
DEPTH = 2
HEADS = 4
DIM_HEAD = 64
if RESUME:
dalle_path = Path(DALLE_PATH)
assert dalle_path.exists(), 'DALL-E model file does not exist'
loaded_obj = torch.load(str(dalle_path))
dalle_params, vae_params, weights = loaded_obj['hparams'], loaded_obj['vae_params'], loaded_obj['weights']
vae = DiscreteVAE(**vae_params)
dalle_params = dict(
vae = vae,
**dalle_params
)
IMAGE_SIZE = vae_params['image_size']
else:
if exists(VAE_PATH):
vae_path = Path(VAE_PATH)
assert vae_path.exists(), 'VAE model file does not exist'
loaded_obj = torch.load(str(vae_path))
vae_params, weights = loaded_obj['hparams'], loaded_obj['weights']
vae = DiscreteVAE(**vae_params)
vae.load_state_dict(weights)
else:
print('using OpenAIs pretrained VAE for encoding images to tokens')
vae_params = None
vae = OpenAIDiscreteVAE()
IMAGE_SIZE = vae.image_size
dalle_params = dict(
vae = vae,
num_text_tokens = VOCAB_SIZE,
text_seq_len = TEXT_SEQ_LEN,
dim = MODEL_DIM,
depth = DEPTH,
heads = HEADS,
dim_head = DIM_HEAD
)
def save_model(path):
save_obj = {
'hparams': dalle_params,
'vae_params': vae_params,
'weights': dalle.state_dict()
}
torch.save(save_obj, path)
class TextImageDataset(Dataset):
def __init__(self, folder, text_len = 256, image_size = 128):
super().__init__()
path = Path(folder)
text_files = [*path.glob('**/*.txt')]
image_files = [
*path.glob('**/*.png'),
*path.glob('**/*.jpg'),
*path.glob('**/*.jpeg')
]
text_files = {t.stem: t for t in text_files}
image_files = {i.stem: i for i in image_files}
keys = (image_files.keys() & text_files.keys())
self.keys = list(keys)
self.text_files = {k: v for k, v in text_files.items() if k in keys}
self.image_files = {k: v for k, v in image_files.items() if k in keys}
self.image_tranform = T.Compose([
T.Lambda(lambda img: img.convert('RGB') if img.mode != 'RGB' else img),
T.CenterCrop(image_size),
T.Resize(image_size),
T.ToTensor(),
T.Lambda(lambda t: t.expand(3, -1, -1)),
T.Normalize((0.5,) * 3, (0.5,) * 3)
])
def __len__(self):
return len(self.keys)
def __getitem__(self, ind):
key = self.keys[ind]
text_file = self.text_files[key]
image_file = self.image_files[key]
image = Image.open(image_file)
descriptions = text_file.read_text().split('\n')
descriptions = list(filter(lambda t: len(t) > 0, descriptions))
description = choice(descriptions)
tokenized_text = tokenize(description).squeeze(0)
mask = tokenized_text != 0
image_tensor = self.image_tranform(image)
return tokenized_text, image_tensor, mask
ds = TextImageDataset(
args.image_text_folder,
text_len = TEXT_SEQ_LEN,
image_size = IMAGE_SIZE
)
assert len(ds) > 0, 'dataset is empty'
print(f'{len(ds)} image-text pairs found for training')
dl = DataLoader(ds, batch_size = BATCH_SIZE, shuffle = True, drop_last = True)
dalle = DALLE(**dalle_params).cuda()
if RESUME:
dalle.load_state_dict(weights)
opt = Adam(dalle.parameters(), lr = LEARNING_RATE)
import wandb
wandb.config.depth = DEPTH
wandb.config.heads = HEADS
wandb.config.dim_head = DIM_HEAD
wandb.init(project = 'dalle_train_transformer', resume = RESUME)
for epoch in range(EPOCHS):
for i, (text, images, mask) in enumerate(dl):
text, images, mask = map(lambda t: t.cuda(), (text, images, mask))
loss = dalle(text, images, mask = mask, return_loss = True)
loss.backward()
clip_grad_norm_(dalle.parameters(), GRAD_CLIP_NORM)
opt.step()
opt.zero_grad()
log = {}
if i % 10 == 0:
print(epoch, i, f'loss - {loss.item()}')
log = {
**log,
'epoch': epoch,
'iter': i,
'loss': loss.item()
}
if i % 100 == 0:
sample_text = text[:1]
token_list = sample_text.masked_select(sample_text != 0).tolist()
decoded_text = tokenizer.decode(token_list)
image = dalle.generate_images(
text[:1],
mask = mask[:1],
filter_thres = 0.9
)
save_model(f'./dalle.pt')
wandb.save(f'./dalle.pt')
log = {
**log,
'image': wandb.Image(image, caption = decoded_text)
}
wandb.log(log)
save_model(f'./dalle-final.pt')
wandb.save('./dalle-final.pt')
wandb.finish()
| true | true |
f72bcdcacf211151e2476c6876771adc77e8a368 | 930 | py | Python | CartPole/_CartPole_mathematical_helpers.py | jhuebotter/CartpoleSNNdemo | d18a85cbc45bff48295c46c9cd8c9fc00192318c | [
"MIT"
] | null | null | null | CartPole/_CartPole_mathematical_helpers.py | jhuebotter/CartpoleSNNdemo | d18a85cbc45bff48295c46c9cd8c9fc00192318c | [
"MIT"
] | null | null | null | CartPole/_CartPole_mathematical_helpers.py | jhuebotter/CartpoleSNNdemo | d18a85cbc45bff48295c46c9cd8c9fc00192318c | [
"MIT"
] | null | null | null | """
Small general mathematical functions.
This file was necessary to make CartPole module self-contained.
"""
from math import fmod
import numpy as np
# Wraps the angle into range [-π, π]
def wrap_angle_rad(angle: float) -> float:
Modulo = fmod(angle, 2 * np.pi) # positive modulo
if Modulo < -np.pi:
angle = Modulo + 2 * np.pi
elif Modulo > np.pi:
angle = Modulo - 2 * np.pi
else:
angle = Modulo
return angle
def wrap_angle_rad_inplace(angle: np.ndarray) -> None:
Modulo = np.fmod(angle, 2 * np.pi) # positive modulo
neg_wrap, pos_wrap = Modulo < -np.pi, Modulo > np.pi
angle[neg_wrap] = Modulo[neg_wrap] + 2 * np.pi
angle[pos_wrap] = Modulo[pos_wrap] - 2 * np.pi
angle[~(neg_wrap | pos_wrap)] = Modulo[~(neg_wrap | pos_wrap)]
def conditional_decorator(dec, cond):
def decorator(func):
return dec(func) if cond else func
return decorator
| 25.833333 | 66 | 0.649462 |
from math import fmod
import numpy as np
def wrap_angle_rad(angle: float) -> float:
Modulo = fmod(angle, 2 * np.pi)
if Modulo < -np.pi:
angle = Modulo + 2 * np.pi
elif Modulo > np.pi:
angle = Modulo - 2 * np.pi
else:
angle = Modulo
return angle
def wrap_angle_rad_inplace(angle: np.ndarray) -> None:
Modulo = np.fmod(angle, 2 * np.pi)
neg_wrap, pos_wrap = Modulo < -np.pi, Modulo > np.pi
angle[neg_wrap] = Modulo[neg_wrap] + 2 * np.pi
angle[pos_wrap] = Modulo[pos_wrap] - 2 * np.pi
angle[~(neg_wrap | pos_wrap)] = Modulo[~(neg_wrap | pos_wrap)]
def conditional_decorator(dec, cond):
def decorator(func):
return dec(func) if cond else func
return decorator
| true | true |
f72bce4067eef5d32a5ccf8b7a440fc4ed4d0c42 | 1,114 | py | Python | dadmatools/models/flair/parser/utils/vocab.py | njzr/DadmaTools | 64ff407d5d818d5a9216340cccf0d1cc909d3b1b | [
"Apache-2.0"
] | 25 | 2021-12-01T15:19:36.000Z | 2022-03-12T12:50:28.000Z | dadmatools/models/flair/parser/utils/vocab.py | ebad84/DadmaTools | b26ad8aa834f642d49bd120bd7cf1fdf40741be1 | [
"Apache-2.0"
] | 3 | 2021-12-14T06:34:52.000Z | 2022-02-17T08:23:20.000Z | dadmatools/models/flair/parser/utils/vocab.py | ebad84/DadmaTools | b26ad8aa834f642d49bd120bd7cf1fdf40741be1 | [
"Apache-2.0"
] | 6 | 2021-10-12T13:44:17.000Z | 2022-03-07T13:54:17.000Z | # -*- coding: utf-8 -*-
from collections.abc import Iterable
from dadmatools.models.flair.parser.utils.common import unk
class Vocab(object):
def __init__(self, counter, min_freq=1, specials=[]):
self.itos = specials
self.stoi = {token: i for i, token in enumerate(self.itos)}
self.extend([token for token, freq in counter.items()
if freq >= min_freq])
self.unk_index = self.stoi.get(unk, 0)
self.n_init = len(self)
def __len__(self):
return len(self.itos)
def __getitem__(self, key):
return self.stoi[key]
def __contains__(self, token):
return token in self.stoi
def token2id(self, sequence):
return [self.stoi.get(token, self.unk_index) for token in sequence]
def id2token(self, ids):
if isinstance(ids, Iterable):
return [self.itos[i] for i in ids]
else:
return self.itos[ids]
def extend(self, tokens):
self.itos.extend(sorted(set(tokens).difference(self.stoi)))
self.stoi = {token: i for i, token in enumerate(self.itos)}
| 28.564103 | 75 | 0.61939 |
from collections.abc import Iterable
from dadmatools.models.flair.parser.utils.common import unk
class Vocab(object):
def __init__(self, counter, min_freq=1, specials=[]):
self.itos = specials
self.stoi = {token: i for i, token in enumerate(self.itos)}
self.extend([token for token, freq in counter.items()
if freq >= min_freq])
self.unk_index = self.stoi.get(unk, 0)
self.n_init = len(self)
def __len__(self):
return len(self.itos)
def __getitem__(self, key):
return self.stoi[key]
def __contains__(self, token):
return token in self.stoi
def token2id(self, sequence):
return [self.stoi.get(token, self.unk_index) for token in sequence]
def id2token(self, ids):
if isinstance(ids, Iterable):
return [self.itos[i] for i in ids]
else:
return self.itos[ids]
def extend(self, tokens):
self.itos.extend(sorted(set(tokens).difference(self.stoi)))
self.stoi = {token: i for i, token in enumerate(self.itos)}
| true | true |
f72bd002b56ea30cddba78b9525df205cb9088df | 7,561 | py | Python | tests/lint/check_file_type.py | PhilippvK/tvm | e7748aac40bd4c263882323393ea8896837614a9 | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | null | null | null | tests/lint/check_file_type.py | PhilippvK/tvm | e7748aac40bd4c263882323393ea8896837614a9 | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | null | null | null | tests/lint/check_file_type.py | PhilippvK/tvm | e7748aac40bd4c263882323393ea8896837614a9 | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Helper tool to check file types that are allowed to checkin."""
import os
import sys
import subprocess
# List of file types we allow
ALLOW_EXTENSION = {
# source code
"cc",
"c",
"h",
"s",
"rs",
"m",
"mm",
"g4",
"gradle",
"js",
"tcl",
"scala",
"java",
"go",
"ts",
"sh",
"py",
"pyi",
"pxi",
"pyd",
"pyx",
"cu",
"bat",
# relay text format
"rly",
# configurations
"mk",
"in",
"cmake",
"xml",
"toml",
"yml",
"yaml",
"json",
# docs
"txt",
"md",
"rst",
# sgx
"edl",
"lds",
# ios
"pbxproj",
"plist",
"xcworkspacedata",
"storyboard",
# hw/chisel
"sbt",
"properties",
"v",
"sdc",
# generated parser
"interp",
"tokens",
# interface definition
"idl",
# opencl file
"cl",
# zephyr config file
"conf",
# linker scripts
"ld",
}
# List of file names allowed
ALLOW_FILE_NAME = {
".gitignore",
".eslintignore",
".gitattributes",
"README",
"Makefile",
"Doxyfile",
"pylintrc",
"rat-excludes",
"log4j.properties",
".clang-format",
".gitmodules",
"CODEOWNERS",
".scalafmt.conf",
"Cargo.lock",
"with_the_same_user",
}
# List of specific files allowed in relpath to <proj_root>
ALLOW_SPECIFIC_FILE = {
"LICENSE",
"NOTICE",
"KEYS",
"DISCLAIMER",
"Jenkinsfile",
"mypy.ini",
# cargo config
"rust/runtime/tests/test_wasm32/.cargo/config",
"rust/tvm-graph-rt/tests/test_wasm32/.cargo/config",
"apps/sgx/.cargo/config",
"apps/wasm-standalone/wasm-graph/.cargo/config",
# html for demo purposes
"web/apps/browser/rpc_server.html",
# images are normally not allowed
# discuss with committers before add more images
"apps/android_rpc/app/src/main/res/mipmap-hdpi/ic_launcher.png",
"apps/android_rpc/app/src/main/res/mipmap-mdpi/ic_launcher.png",
# documentation related files
"docs/_static/css/tvm_theme.css",
"docs/_static/img/tvm-logo-small.png",
"docs/_static/img/tvm-logo-square.png",
# pytest config
"pytest.ini",
# microTVM tests
"tests/micro/zephyr/testdata/digit-2.jpg",
"tests/micro/zephyr/testdata/digit-9.jpg",
"tests/micro/zephyr/testdata/mnist-8.onnx",
"tests/micro/zephyr/testdata/ic_sample_fp32_8.npy",
# microTVM Zephyr runtime
"apps/microtvm/zephyr/template_project/CMakeLists.txt.template",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-arm",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-xilinx-aarch64",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-i386",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-riscv32",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-riscv64",
# microTVM Virtual Machines
"apps/microtvm/reference-vm/zephyr/Vagrantfile",
"apps/microtvm/reference-vm/zephyr/base-box/Vagrantfile.packer-template",
}
def filename_allowed(name):
"""Check if name is allowed by the current policy.
Paramaters
----------
name : str
Input name
Returns
-------
allowed : bool
Whether the filename is allowed.
"""
arr = name.rsplit(".", 1)
if arr[-1] in ALLOW_EXTENSION:
return True
if os.path.basename(name) in ALLOW_FILE_NAME:
return True
if os.path.basename(name).startswith("Dockerfile"):
return True
if name.startswith("3rdparty"):
return True
if name in ALLOW_SPECIFIC_FILE:
return True
return False
def copyright_line(line):
# Following two items are intentionally break apart
# so that the copyright detector won't detect the file itself.
if line.find("Copyright " + "(c)") != -1:
return True
# break pattern into two lines to avoid false-negative check
spattern1 = "Copyright"
if line.find(spattern1) != -1 and line.find("by") != -1:
return True
return False
def check_asf_copyright(fname):
if fname.endswith(".png"):
return True
if not os.path.isfile(fname):
return True
has_asf_header = False
has_copyright = False
try:
for line in open(fname):
if line.find("Licensed to the Apache Software Foundation") != -1:
has_asf_header = True
if copyright_line(line):
has_copyright = True
if has_asf_header and has_copyright:
return False
except UnicodeDecodeError:
pass
return True
def main():
cmd = ["git", "ls-files"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
(out, _) = proc.communicate()
assert proc.returncode == 0, f'{" ".join(cmd)} errored: {out}'
res = out.decode("utf-8")
flist = res.split()
error_list = []
for fname in flist:
if not filename_allowed(fname):
error_list.append(fname)
if error_list:
report = "------File type check report----\n"
report += "\n".join(error_list)
report += "\nFound %d files that are now allowed\n" % len(error_list)
report += (
"We do not check in binary files into the repo.\n"
"If necessary, please discuss with committers and"
"modify tests/lint/check_file_type.py to enable the file you need.\n"
)
sys.stderr.write(report)
sys.stderr.flush()
sys.exit(-1)
asf_copyright_list = []
for fname in res.split():
if not check_asf_copyright(fname):
asf_copyright_list.append(fname)
if asf_copyright_list:
report = "------File type check report----\n"
report += "\n".join(asf_copyright_list) + "\n"
report += "------Found %d files that has ASF header with copyright message----\n" % len(
asf_copyright_list
)
report += "--- Files with ASF header do not need Copyright lines.\n"
report += "--- Contributors retain copyright to their contribution by default.\n"
report += "--- If a file comes with a different license, consider put it under the 3rdparty folder instead.\n"
report += "---\n"
report += "--- You can use the following steps to remove the copyright lines\n"
report += "--- Create file_list.txt in your text editor\n"
report += "--- Copy paste the above content in file-list into file_list.txt\n"
report += "--- python3 tests/lint/add_asf_header.py file_list.txt\n"
sys.stderr.write(report)
sys.stderr.flush()
sys.exit(-1)
print("check_file_type.py: all checks passed..")
if __name__ == "__main__":
main()
| 28.003704 | 118 | 0.624653 |
import os
import sys
import subprocess
ALLOW_EXTENSION = {
"cc",
"c",
"h",
"s",
"rs",
"m",
"mm",
"g4",
"gradle",
"js",
"tcl",
"scala",
"java",
"go",
"ts",
"sh",
"py",
"pyi",
"pxi",
"pyd",
"pyx",
"cu",
"bat",
"rly",
"mk",
"in",
"cmake",
"xml",
"toml",
"yml",
"yaml",
"json",
"txt",
"md",
"rst",
"edl",
"lds",
"pbxproj",
"plist",
"xcworkspacedata",
"storyboard",
"sbt",
"properties",
"v",
"sdc",
"interp",
"tokens",
"idl",
"cl",
"conf",
"ld",
}
ALLOW_FILE_NAME = {
".gitignore",
".eslintignore",
".gitattributes",
"README",
"Makefile",
"Doxyfile",
"pylintrc",
"rat-excludes",
"log4j.properties",
".clang-format",
".gitmodules",
"CODEOWNERS",
".scalafmt.conf",
"Cargo.lock",
"with_the_same_user",
}
ALLOW_SPECIFIC_FILE = {
"LICENSE",
"NOTICE",
"KEYS",
"DISCLAIMER",
"Jenkinsfile",
"mypy.ini",
"rust/runtime/tests/test_wasm32/.cargo/config",
"rust/tvm-graph-rt/tests/test_wasm32/.cargo/config",
"apps/sgx/.cargo/config",
"apps/wasm-standalone/wasm-graph/.cargo/config",
"web/apps/browser/rpc_server.html",
"apps/android_rpc/app/src/main/res/mipmap-hdpi/ic_launcher.png",
"apps/android_rpc/app/src/main/res/mipmap-mdpi/ic_launcher.png",
"docs/_static/css/tvm_theme.css",
"docs/_static/img/tvm-logo-small.png",
"docs/_static/img/tvm-logo-square.png",
"pytest.ini",
"tests/micro/zephyr/testdata/digit-2.jpg",
"tests/micro/zephyr/testdata/digit-9.jpg",
"tests/micro/zephyr/testdata/mnist-8.onnx",
"tests/micro/zephyr/testdata/ic_sample_fp32_8.npy",
"apps/microtvm/zephyr/template_project/CMakeLists.txt.template",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-arm",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-xilinx-aarch64",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-i386",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-riscv32",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-riscv64",
"apps/microtvm/reference-vm/zephyr/Vagrantfile",
"apps/microtvm/reference-vm/zephyr/base-box/Vagrantfile.packer-template",
}
def filename_allowed(name):
arr = name.rsplit(".", 1)
if arr[-1] in ALLOW_EXTENSION:
return True
if os.path.basename(name) in ALLOW_FILE_NAME:
return True
if os.path.basename(name).startswith("Dockerfile"):
return True
if name.startswith("3rdparty"):
return True
if name in ALLOW_SPECIFIC_FILE:
return True
return False
def copyright_line(line):
if line.find("Copyright " + "(c)") != -1:
return True
# break pattern into two lines to avoid false-negative check
spattern1 = "Copyright"
if line.find(spattern1) != -1 and line.find("by") != -1:
return True
return False
def check_asf_copyright(fname):
if fname.endswith(".png"):
return True
if not os.path.isfile(fname):
return True
has_asf_header = False
has_copyright = False
try:
for line in open(fname):
if line.find("Licensed to the Apache Software Foundation") != -1:
has_asf_header = True
if copyright_line(line):
has_copyright = True
if has_asf_header and has_copyright:
return False
except UnicodeDecodeError:
pass
return True
def main():
cmd = ["git", "ls-files"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
(out, _) = proc.communicate()
assert proc.returncode == 0, f'{" ".join(cmd)} errored: {out}'
res = out.decode("utf-8")
flist = res.split()
error_list = []
for fname in flist:
if not filename_allowed(fname):
error_list.append(fname)
if error_list:
report = "------File type check report----\n"
report += "\n".join(error_list)
report += "\nFound %d files that are now allowed\n" % len(error_list)
report += (
"We do not check in binary files into the repo.\n"
"If necessary, please discuss with committers and"
"modify tests/lint/check_file_type.py to enable the file you need.\n"
)
sys.stderr.write(report)
sys.stderr.flush()
sys.exit(-1)
asf_copyright_list = []
for fname in res.split():
if not check_asf_copyright(fname):
asf_copyright_list.append(fname)
if asf_copyright_list:
report = "------File type check report----\n"
report += "\n".join(asf_copyright_list) + "\n"
report += "------Found %d files that has ASF header with copyright message----\n" % len(
asf_copyright_list
)
report += "--- Files with ASF header do not need Copyright lines.\n"
report += "--- Contributors retain copyright to their contribution by default.\n"
report += "--- If a file comes with a different license, consider put it under the 3rdparty folder instead.\n"
report += "---\n"
report += "--- You can use the following steps to remove the copyright lines\n"
report += "--- Create file_list.txt in your text editor\n"
report += "--- Copy paste the above content in file-list into file_list.txt\n"
report += "--- python3 tests/lint/add_asf_header.py file_list.txt\n"
sys.stderr.write(report)
sys.stderr.flush()
sys.exit(-1)
print("check_file_type.py: all checks passed..")
if __name__ == "__main__":
main()
| true | true |
f72bd0034cfdcd02a88b42c82d265f8271a74c19 | 30,361 | py | Python | covid/wallet/cc_wallet/cc_wallet.py | grayfallstown/covid-blockchain | 194d5351c70d3ee5d928f767e21c7894cfbb59a7 | [
"Apache-2.0"
] | null | null | null | covid/wallet/cc_wallet/cc_wallet.py | grayfallstown/covid-blockchain | 194d5351c70d3ee5d928f767e21c7894cfbb59a7 | [
"Apache-2.0"
] | null | null | null | covid/wallet/cc_wallet/cc_wallet.py | grayfallstown/covid-blockchain | 194d5351c70d3ee5d928f767e21c7894cfbb59a7 | [
"Apache-2.0"
] | null | null | null | from __future__ import annotations
import logging
import time
from dataclasses import replace
from secrets import token_bytes
from typing import Any, Dict, List, Optional, Set
from blspy import AugSchemeMPL, G2Element
from covid.consensus.cost_calculator import calculate_cost_of_program, NPCResult
from covid.full_node.bundle_tools import simple_solution_generator
from covid.full_node.mempool_check_conditions import get_name_puzzle_conditions
from covid.protocols.wallet_protocol import PuzzleSolutionResponse
from covid.types.blockchain_format.coin import Coin
from covid.types.blockchain_format.program import Program
from covid.types.blockchain_format.sized_bytes import bytes32
from covid.types.coin_spend import CoinSpend
from covid.types.generator_types import BlockGenerator
from covid.types.spend_bundle import SpendBundle
from covid.util.byte_types import hexstr_to_bytes
from covid.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
from covid.util.ints import uint8, uint32, uint64, uint128
from covid.util.json_util import dict_to_json_str
from covid.wallet.block_record import HeaderBlockRecord
from covid.wallet.cc_wallet.cc_info import CCInfo
from covid.wallet.cc_wallet.cc_utils import (
CC_MOD,
SpendableCC,
cc_puzzle_for_inner_puzzle,
cc_puzzle_hash_for_inner_puzzle_hash,
get_lineage_proof_from_coin_and_puz,
spend_bundle_for_spendable_ccs,
uncurry_cc,
)
from covid.wallet.derivation_record import DerivationRecord
from covid.wallet.puzzles.genesis_by_coin_id_with_0 import (
create_genesis_or_zero_coin_checker,
genesis_coin_id_for_genesis_coin_checker,
lineage_proof_for_genesis,
)
from covid.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
DEFAULT_HIDDEN_PUZZLE_HASH,
calculate_synthetic_secret_key,
)
from covid.wallet.transaction_record import TransactionRecord
from covid.wallet.util.transaction_type import TransactionType
from covid.wallet.util.wallet_types import WalletType
from covid.wallet.wallet import Wallet
from covid.wallet.wallet_coin_record import WalletCoinRecord
from covid.wallet.wallet_info import WalletInfo
class CCWallet:
wallet_state_manager: Any
log: logging.Logger
wallet_info: WalletInfo
cc_coin_record: WalletCoinRecord
cc_info: CCInfo
standard_wallet: Wallet
base_puzzle_program: Optional[bytes]
base_inner_puzzle_hash: Optional[bytes32]
cost_of_single_tx: Optional[int]
@staticmethod
async def create_new_cc(
wallet_state_manager: Any,
wallet: Wallet,
amount: uint64,
):
self = CCWallet()
self.cost_of_single_tx = None
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
self.standard_wallet = wallet
self.log = logging.getLogger(__name__)
self.wallet_state_manager = wallet_state_manager
self.cc_info = CCInfo(None, [])
info_as_string = bytes(self.cc_info).hex()
self.wallet_info = await wallet_state_manager.user_store.create_wallet(
"CC Wallet", WalletType.COLOURED_COIN, info_as_string
)
if self.wallet_info is None:
raise ValueError("Internal Error")
try:
spend_bundle = await self.generate_new_coloured_coin(amount)
except Exception:
await wallet_state_manager.user_store.delete_wallet(self.id())
raise
await self.wallet_state_manager.add_new_wallet(self, self.id())
# Change and actual coloured coin
non_ephemeral_spends: List[Coin] = spend_bundle.not_ephemeral_additions()
cc_coin = None
puzzle_store = self.wallet_state_manager.puzzle_store
for c in non_ephemeral_spends:
info = await puzzle_store.wallet_info_for_puzzle_hash(c.puzzle_hash)
if info is None:
raise ValueError("Internal Error")
id, wallet_type = info
if id == self.id():
cc_coin = c
if cc_coin is None:
raise ValueError("Internal Error, unable to generate new coloured coin")
regular_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_coin.puzzle_hash,
amount=uint64(cc_coin.amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.wallet_state_manager.main_wallet.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.OUTGOING_TX.value),
name=token_bytes(),
)
cc_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_coin.puzzle_hash,
amount=uint64(cc_coin.amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=None,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=token_bytes(),
)
await self.standard_wallet.push_transaction(regular_record)
await self.standard_wallet.push_transaction(cc_record)
return self
@staticmethod
async def create_wallet_for_cc(
wallet_state_manager: Any,
wallet: Wallet,
genesis_checker_hex: str,
) -> CCWallet:
self = CCWallet()
self.cost_of_single_tx = None
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
self.standard_wallet = wallet
self.log = logging.getLogger(__name__)
self.wallet_state_manager = wallet_state_manager
self.cc_info = CCInfo(Program.from_bytes(bytes.fromhex(genesis_checker_hex)), [])
info_as_string = bytes(self.cc_info).hex()
self.wallet_info = await wallet_state_manager.user_store.create_wallet(
"CC Wallet", WalletType.COLOURED_COIN, info_as_string
)
if self.wallet_info is None:
raise Exception("wallet_info is None")
await self.wallet_state_manager.add_new_wallet(self, self.id())
return self
@staticmethod
async def create(
wallet_state_manager: Any,
wallet: Wallet,
wallet_info: WalletInfo,
) -> CCWallet:
self = CCWallet()
self.log = logging.getLogger(__name__)
self.cost_of_single_tx = None
self.wallet_state_manager = wallet_state_manager
self.wallet_info = wallet_info
self.standard_wallet = wallet
self.cc_info = CCInfo.from_bytes(hexstr_to_bytes(self.wallet_info.data))
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
return self
@classmethod
def type(cls) -> uint8:
return uint8(WalletType.COLOURED_COIN)
def id(self) -> uint32:
return self.wallet_info.id
async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint64:
if record_list is None:
record_list = await self.wallet_state_manager.coin_store.get_unspent_coins_for_wallet(self.id())
amount: uint64 = uint64(0)
for record in record_list:
lineage = await self.get_lineage_proof_for_coin(record.coin)
if lineage is not None:
amount = uint64(amount + record.coin.amount)
self.log.info(f"Confirmed balance for cc wallet {self.id()} is {amount}")
return uint64(amount)
async def get_unconfirmed_balance(self, unspent_records=None) -> uint128:
confirmed = await self.get_confirmed_balance(unspent_records)
unconfirmed_tx: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(
self.id()
)
addition_amount = 0
removal_amount = 0
for record in unconfirmed_tx:
if TransactionType(record.type) is TransactionType.INCOMING_TX:
addition_amount += record.amount
else:
removal_amount += record.amount
result = confirmed - removal_amount + addition_amount
self.log.info(f"Unconfirmed balance for cc wallet {self.id()} is {result}")
return uint128(result)
async def get_max_send_amount(self, records=None):
spendable: List[WalletCoinRecord] = list(
await self.wallet_state_manager.get_spendable_coins_for_wallet(self.id(), records)
)
if len(spendable) == 0:
return 0
spendable.sort(reverse=True, key=lambda record: record.coin.amount)
if self.cost_of_single_tx is None:
coin = spendable[0].coin
tx = await self.generate_signed_transaction(
[coin.amount], [coin.puzzle_hash], coins={coin}, ignore_max_send_amount=True
)
program: BlockGenerator = simple_solution_generator(tx.spend_bundle)
# npc contains names of the coins removed, puzzle_hashes and their spend conditions
result: NPCResult = get_name_puzzle_conditions(
program,
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=self.wallet_state_manager.constants.COST_PER_BYTE,
safe_mode=True,
)
cost_result: uint64 = calculate_cost_of_program(
program.program, result, self.wallet_state_manager.constants.COST_PER_BYTE
)
self.cost_of_single_tx = cost_result
self.log.info(f"Cost of a single tx for standard wallet: {self.cost_of_single_tx}")
max_cost = self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM / 2 # avoid full block TXs
current_cost = 0
total_amount = 0
total_coin_count = 0
for record in spendable:
current_cost += self.cost_of_single_tx
total_amount += record.coin.amount
total_coin_count += 1
if current_cost + self.cost_of_single_tx > max_cost:
break
return total_amount
async def get_name(self):
return self.wallet_info.name
async def set_name(self, new_name: str):
new_info = replace(self.wallet_info, name=new_name)
self.wallet_info = new_info
await self.wallet_state_manager.user_store.update_wallet(self.wallet_info, False)
def get_colour(self) -> str:
assert self.cc_info.my_genesis_checker is not None
return bytes(self.cc_info.my_genesis_checker).hex()
async def coin_added(self, coin: Coin, height: uint32):
"""Notification from wallet state manager that wallet has been received."""
self.log.info(f"CC wallet has been notified that {coin} was added")
search_for_parent: bool = True
inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
lineage_proof = Program.to((1, [coin.parent_coin_info, inner_puzzle.get_tree_hash(), coin.amount]))
await self.add_lineage(coin.name(), lineage_proof, True)
for name, lineage_proofs in self.cc_info.lineage_proofs:
if coin.parent_coin_info == name:
search_for_parent = False
break
if search_for_parent:
data: Dict[str, Any] = {
"data": {
"action_data": {
"api_name": "request_puzzle_solution",
"height": height,
"coin_name": coin.parent_coin_info,
"received_coin": coin.name(),
}
}
}
data_str = dict_to_json_str(data)
await self.wallet_state_manager.create_action(
name="request_puzzle_solution",
wallet_id=self.id(),
wallet_type=self.type(),
callback="puzzle_solution_received",
done=False,
data=data_str,
in_transaction=True,
)
async def puzzle_solution_received(self, response: PuzzleSolutionResponse, action_id: int):
coin_name = response.coin_name
height = response.height
puzzle: Program = response.puzzle
r = uncurry_cc(puzzle)
header_hash = self.wallet_state_manager.blockchain.height_to_hash(height)
block: Optional[
HeaderBlockRecord
] = await self.wallet_state_manager.blockchain.block_store.get_header_block_record(header_hash)
if block is None:
return None
removals = block.removals
if r is not None:
mod_hash, genesis_coin_checker, inner_puzzle = r
self.log.info(f"parent: {coin_name} inner_puzzle for parent is {inner_puzzle}")
parent_coin = None
for coin in removals:
if coin.name() == coin_name:
parent_coin = coin
if parent_coin is None:
raise ValueError("Error in finding parent")
lineage_proof = get_lineage_proof_from_coin_and_puz(parent_coin, puzzle)
await self.add_lineage(coin_name, lineage_proof)
await self.wallet_state_manager.action_store.action_done(action_id)
async def get_new_inner_hash(self) -> bytes32:
return await self.standard_wallet.get_new_puzzlehash()
async def get_new_inner_puzzle(self) -> Program:
return await self.standard_wallet.get_new_puzzle()
async def get_puzzle_hash(self, new: bool):
return await self.standard_wallet.get_puzzle_hash(new)
async def get_new_puzzlehash(self) -> bytes32:
return await self.standard_wallet.get_new_puzzlehash()
def puzzle_for_pk(self, pubkey) -> Program:
inner_puzzle = self.standard_wallet.puzzle_for_pk(bytes(pubkey))
cc_puzzle: Program = cc_puzzle_for_inner_puzzle(CC_MOD, self.cc_info.my_genesis_checker, inner_puzzle)
self.base_puzzle_program = bytes(cc_puzzle)
self.base_inner_puzzle_hash = inner_puzzle.get_tree_hash()
return cc_puzzle
async def get_new_cc_puzzle_hash(self):
return (await self.wallet_state_manager.get_unused_derivation_record(self.id())).puzzle_hash
# Create a new coin of value 0 with a given colour
async def generate_zero_val_coin(self, send=True, exclude: List[Coin] = None) -> SpendBundle:
if self.cc_info.my_genesis_checker is None:
raise ValueError("My genesis checker is None")
if exclude is None:
exclude = []
coins = await self.standard_wallet.select_coins(0, exclude)
assert coins != set()
origin = coins.copy().pop()
origin_id = origin.name()
cc_inner = await self.get_new_inner_hash()
cc_puzzle_hash: Program = cc_puzzle_hash_for_inner_puzzle_hash(
CC_MOD, self.cc_info.my_genesis_checker, cc_inner
)
tx: TransactionRecord = await self.standard_wallet.generate_signed_transaction(
uint64(0), cc_puzzle_hash, uint64(0), origin_id, coins
)
assert tx.spend_bundle is not None
full_spend: SpendBundle = tx.spend_bundle
self.log.info(f"Generate zero val coin: cc_puzzle_hash is {cc_puzzle_hash}")
# generate eve coin so we can add future lineage_proofs even if we don't eve spend
eve_coin = Coin(origin_id, cc_puzzle_hash, uint64(0))
await self.add_lineage(
eve_coin.name(),
Program.to(
(
1,
[eve_coin.parent_coin_info, cc_inner, eve_coin.amount],
)
),
)
await self.add_lineage(eve_coin.parent_coin_info, Program.to((0, [origin.as_list(), 1])))
if send:
regular_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_puzzle_hash,
amount=uint64(0),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=full_spend,
additions=full_spend.additions(),
removals=full_spend.removals(),
wallet_id=uint32(1),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=token_bytes(),
)
cc_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_puzzle_hash,
amount=uint64(0),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=full_spend,
additions=full_spend.additions(),
removals=full_spend.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=full_spend.name(),
)
await self.wallet_state_manager.add_transaction(regular_record)
await self.wallet_state_manager.add_pending_transaction(cc_record)
return full_spend
async def get_spendable_balance(self, records=None) -> uint64:
coins = await self.get_cc_spendable_coins(records)
amount = 0
for record in coins:
amount += record.coin.amount
return uint64(amount)
async def get_pending_change_balance(self) -> uint64:
unconfirmed_tx = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(self.id())
addition_amount = 0
for record in unconfirmed_tx:
if not record.is_in_mempool():
continue
our_spend = False
for coin in record.removals:
# Don't count eve spend as change
if coin.parent_coin_info.hex() == self.get_colour():
continue
if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id()):
our_spend = True
break
if our_spend is not True:
continue
for coin in record.additions:
if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id()):
addition_amount += coin.amount
return uint64(addition_amount)
async def get_cc_spendable_coins(self, records=None) -> List[WalletCoinRecord]:
result: List[WalletCoinRecord] = []
record_list: Set[WalletCoinRecord] = await self.wallet_state_manager.get_spendable_coins_for_wallet(
self.id(), records
)
for record in record_list:
lineage = await self.get_lineage_proof_for_coin(record.coin)
if lineage is not None:
result.append(record)
return result
async def select_coins(self, amount: uint64) -> Set[Coin]:
"""
Returns a set of coins that can be used for generating a new transaction.
Note: Must be called under wallet state manager lock
"""
spendable_am = await self.get_confirmed_balance()
if amount > spendable_am:
error_msg = f"Can't select amount higher than our spendable balance {amount}, spendable {spendable_am}"
self.log.warning(error_msg)
raise ValueError(error_msg)
self.log.info(f"About to select coins for amount {amount}")
spendable: List[WalletCoinRecord] = await self.get_cc_spendable_coins()
sum = 0
used_coins: Set = set()
# Use older coins first
spendable.sort(key=lambda r: r.confirmed_block_height)
# Try to use coins from the store, if there isn't enough of "unused"
# coins use change coins that are not confirmed yet
unconfirmed_removals: Dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet(
self.id()
)
for coinrecord in spendable:
if sum >= amount and len(used_coins) > 0:
break
if coinrecord.coin.name() in unconfirmed_removals:
continue
sum += coinrecord.coin.amount
used_coins.add(coinrecord.coin)
self.log.info(f"Selected coin: {coinrecord.coin.name()} at height {coinrecord.confirmed_block_height}!")
# This happens when we couldn't use one of the coins because it's already used
# but unconfirmed, and we are waiting for the change. (unconfirmed_additions)
if sum < amount:
raise ValueError(
"Can't make this transaction at the moment. Waiting for the change from the previous transaction."
)
self.log.info(f"Successfully selected coins: {used_coins}")
return used_coins
async def get_sigs(self, innerpuz: Program, innersol: Program, coin_name: bytes32) -> List[G2Element]:
puzzle_hash = innerpuz.get_tree_hash()
pubkey, private = await self.wallet_state_manager.get_keys(puzzle_hash)
synthetic_secret_key = calculate_synthetic_secret_key(private, DEFAULT_HIDDEN_PUZZLE_HASH)
sigs: List[G2Element] = []
error, conditions, cost = conditions_dict_for_solution(
innerpuz, innersol, self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM
)
if conditions is not None:
for _, msg in pkm_pairs_for_conditions_dict(
conditions, coin_name, self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA
):
signature = AugSchemeMPL.sign(synthetic_secret_key, msg)
sigs.append(signature)
return sigs
async def inner_puzzle_for_cc_puzhash(self, cc_hash: bytes32) -> Program:
record: DerivationRecord = await self.wallet_state_manager.puzzle_store.get_derivation_record_for_puzzle_hash(
cc_hash.hex()
)
inner_puzzle: Program = self.standard_wallet.puzzle_for_pk(bytes(record.pubkey))
return inner_puzzle
async def get_lineage_proof_for_coin(self, coin) -> Optional[Program]:
for name, proof in self.cc_info.lineage_proofs:
if name == coin.parent_coin_info:
return proof
return None
async def generate_signed_transaction(
self,
amounts: List[uint64],
puzzle_hashes: List[bytes32],
fee: uint64 = uint64(0),
origin_id: bytes32 = None,
coins: Set[Coin] = None,
ignore_max_send_amount: bool = False,
) -> TransactionRecord:
# Get coins and calculate amount of change required
outgoing_amount = uint64(sum(amounts))
total_outgoing = outgoing_amount + fee
if not ignore_max_send_amount:
max_send = await self.get_max_send_amount()
if total_outgoing > max_send:
raise ValueError(f"Can't send more than {max_send} in a single transaction")
if coins is None:
selected_coins: Set[Coin] = await self.select_coins(uint64(total_outgoing))
else:
selected_coins = coins
total_amount = sum([x.amount for x in selected_coins])
change = total_amount - total_outgoing
primaries = []
for amount, puzzle_hash in zip(amounts, puzzle_hashes):
primaries.append({"puzzlehash": puzzle_hash, "amount": amount})
if change > 0:
changepuzzlehash = await self.get_new_inner_hash()
primaries.append({"puzzlehash": changepuzzlehash, "amount": change})
coin = list(selected_coins)[0]
inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
if self.cc_info.my_genesis_checker is None:
raise ValueError("My genesis checker is None")
genesis_id = genesis_coin_id_for_genesis_coin_checker(self.cc_info.my_genesis_checker)
spendable_cc_list = []
innersol_list = []
sigs: List[G2Element] = []
first = True
for coin in selected_coins:
coin_inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
if first:
first = False
if fee > 0:
innersol = self.standard_wallet.make_solution(primaries=primaries, fee=fee)
else:
innersol = self.standard_wallet.make_solution(primaries=primaries)
else:
innersol = self.standard_wallet.make_solution()
innersol_list.append(innersol)
lineage_proof = await self.get_lineage_proof_for_coin(coin)
assert lineage_proof is not None
spendable_cc_list.append(SpendableCC(coin, genesis_id, inner_puzzle, lineage_proof))
sigs = sigs + await self.get_sigs(coin_inner_puzzle, innersol, coin.name())
spend_bundle = spend_bundle_for_spendable_ccs(
CC_MOD,
self.cc_info.my_genesis_checker,
spendable_cc_list,
innersol_list,
sigs,
)
# TODO add support for array in stored records
return TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=puzzle_hashes[0],
amount=uint64(outgoing_amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.OUTGOING_TX.value),
name=spend_bundle.name(),
)
async def add_lineage(self, name: bytes32, lineage: Optional[Program], in_transaction=False):
self.log.info(f"Adding parent {name}: {lineage}")
current_list = self.cc_info.lineage_proofs.copy()
current_list.append((name, lineage))
cc_info: CCInfo = CCInfo(self.cc_info.my_genesis_checker, current_list)
await self.save_info(cc_info, in_transaction)
async def save_info(self, cc_info: CCInfo, in_transaction):
self.cc_info = cc_info
current_info = self.wallet_info
data_str = bytes(cc_info).hex()
wallet_info = WalletInfo(current_info.id, current_info.name, current_info.type, data_str)
self.wallet_info = wallet_info
await self.wallet_state_manager.user_store.update_wallet(wallet_info, in_transaction)
async def generate_new_coloured_coin(self, amount: uint64) -> SpendBundle:
coins = await self.standard_wallet.select_coins(amount)
origin = coins.copy().pop()
origin_id = origin.name()
cc_inner_hash = await self.get_new_inner_hash()
await self.add_lineage(origin_id, Program.to((0, [origin.as_list(), 0])))
genesis_coin_checker = create_genesis_or_zero_coin_checker(origin_id)
minted_cc_puzzle_hash = cc_puzzle_hash_for_inner_puzzle_hash(CC_MOD, genesis_coin_checker, cc_inner_hash)
tx_record: TransactionRecord = await self.standard_wallet.generate_signed_transaction(
amount, minted_cc_puzzle_hash, uint64(0), origin_id, coins
)
assert tx_record.spend_bundle is not None
lineage_proof: Optional[Program] = lineage_proof_for_genesis(origin)
lineage_proofs = [(origin_id, lineage_proof)]
cc_info: CCInfo = CCInfo(genesis_coin_checker, lineage_proofs)
await self.save_info(cc_info, False)
return tx_record.spend_bundle
async def create_spend_bundle_relative_amount(self, cc_amount, zero_coin: Coin = None) -> Optional[SpendBundle]:
# If we're losing value then get coloured coins with at least that much value
# If we're gaining value then our amount doesn't matter
if cc_amount < 0:
cc_spends = await self.select_coins(abs(cc_amount))
else:
if zero_coin is None:
return None
cc_spends = set()
cc_spends.add(zero_coin)
if cc_spends is None:
return None
# Calculate output amount given relative difference and sum of actual values
spend_value = sum([coin.amount for coin in cc_spends])
cc_amount = spend_value + cc_amount
# Loop through coins and create solution for innerpuzzle
list_of_solutions = []
output_created = None
sigs: List[G2Element] = []
for coin in cc_spends:
if output_created is None:
newinnerpuzhash = await self.get_new_inner_hash()
innersol = self.standard_wallet.make_solution(
primaries=[{"puzzlehash": newinnerpuzhash, "amount": cc_amount}]
)
output_created = coin
else:
innersol = self.standard_wallet.make_solution(consumed=[output_created.name()])
innerpuz: Program = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
sigs = sigs + await self.get_sigs(innerpuz, innersol, coin.name())
lineage_proof = await self.get_lineage_proof_for_coin(coin)
puzzle_reveal = cc_puzzle_for_inner_puzzle(CC_MOD, self.cc_info.my_genesis_checker, innerpuz)
# Use coin info to create solution and add coin and solution to list of CoinSpends
solution = [
innersol,
coin.as_list(),
lineage_proof,
None,
None,
None,
None,
None,
]
list_of_solutions.append(CoinSpend(coin, puzzle_reveal, Program.to(solution)))
aggsig = AugSchemeMPL.aggregate(sigs)
return SpendBundle(list_of_solutions, aggsig)
| 40.589572 | 118 | 0.647969 | from __future__ import annotations
import logging
import time
from dataclasses import replace
from secrets import token_bytes
from typing import Any, Dict, List, Optional, Set
from blspy import AugSchemeMPL, G2Element
from covid.consensus.cost_calculator import calculate_cost_of_program, NPCResult
from covid.full_node.bundle_tools import simple_solution_generator
from covid.full_node.mempool_check_conditions import get_name_puzzle_conditions
from covid.protocols.wallet_protocol import PuzzleSolutionResponse
from covid.types.blockchain_format.coin import Coin
from covid.types.blockchain_format.program import Program
from covid.types.blockchain_format.sized_bytes import bytes32
from covid.types.coin_spend import CoinSpend
from covid.types.generator_types import BlockGenerator
from covid.types.spend_bundle import SpendBundle
from covid.util.byte_types import hexstr_to_bytes
from covid.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
from covid.util.ints import uint8, uint32, uint64, uint128
from covid.util.json_util import dict_to_json_str
from covid.wallet.block_record import HeaderBlockRecord
from covid.wallet.cc_wallet.cc_info import CCInfo
from covid.wallet.cc_wallet.cc_utils import (
CC_MOD,
SpendableCC,
cc_puzzle_for_inner_puzzle,
cc_puzzle_hash_for_inner_puzzle_hash,
get_lineage_proof_from_coin_and_puz,
spend_bundle_for_spendable_ccs,
uncurry_cc,
)
from covid.wallet.derivation_record import DerivationRecord
from covid.wallet.puzzles.genesis_by_coin_id_with_0 import (
create_genesis_or_zero_coin_checker,
genesis_coin_id_for_genesis_coin_checker,
lineage_proof_for_genesis,
)
from covid.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
DEFAULT_HIDDEN_PUZZLE_HASH,
calculate_synthetic_secret_key,
)
from covid.wallet.transaction_record import TransactionRecord
from covid.wallet.util.transaction_type import TransactionType
from covid.wallet.util.wallet_types import WalletType
from covid.wallet.wallet import Wallet
from covid.wallet.wallet_coin_record import WalletCoinRecord
from covid.wallet.wallet_info import WalletInfo
class CCWallet:
wallet_state_manager: Any
log: logging.Logger
wallet_info: WalletInfo
cc_coin_record: WalletCoinRecord
cc_info: CCInfo
standard_wallet: Wallet
base_puzzle_program: Optional[bytes]
base_inner_puzzle_hash: Optional[bytes32]
cost_of_single_tx: Optional[int]
@staticmethod
async def create_new_cc(
wallet_state_manager: Any,
wallet: Wallet,
amount: uint64,
):
self = CCWallet()
self.cost_of_single_tx = None
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
self.standard_wallet = wallet
self.log = logging.getLogger(__name__)
self.wallet_state_manager = wallet_state_manager
self.cc_info = CCInfo(None, [])
info_as_string = bytes(self.cc_info).hex()
self.wallet_info = await wallet_state_manager.user_store.create_wallet(
"CC Wallet", WalletType.COLOURED_COIN, info_as_string
)
if self.wallet_info is None:
raise ValueError("Internal Error")
try:
spend_bundle = await self.generate_new_coloured_coin(amount)
except Exception:
await wallet_state_manager.user_store.delete_wallet(self.id())
raise
await self.wallet_state_manager.add_new_wallet(self, self.id())
non_ephemeral_spends: List[Coin] = spend_bundle.not_ephemeral_additions()
cc_coin = None
puzzle_store = self.wallet_state_manager.puzzle_store
for c in non_ephemeral_spends:
info = await puzzle_store.wallet_info_for_puzzle_hash(c.puzzle_hash)
if info is None:
raise ValueError("Internal Error")
id, wallet_type = info
if id == self.id():
cc_coin = c
if cc_coin is None:
raise ValueError("Internal Error, unable to generate new coloured coin")
regular_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_coin.puzzle_hash,
amount=uint64(cc_coin.amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.wallet_state_manager.main_wallet.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.OUTGOING_TX.value),
name=token_bytes(),
)
cc_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_coin.puzzle_hash,
amount=uint64(cc_coin.amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=None,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=token_bytes(),
)
await self.standard_wallet.push_transaction(regular_record)
await self.standard_wallet.push_transaction(cc_record)
return self
@staticmethod
async def create_wallet_for_cc(
wallet_state_manager: Any,
wallet: Wallet,
genesis_checker_hex: str,
) -> CCWallet:
self = CCWallet()
self.cost_of_single_tx = None
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
self.standard_wallet = wallet
self.log = logging.getLogger(__name__)
self.wallet_state_manager = wallet_state_manager
self.cc_info = CCInfo(Program.from_bytes(bytes.fromhex(genesis_checker_hex)), [])
info_as_string = bytes(self.cc_info).hex()
self.wallet_info = await wallet_state_manager.user_store.create_wallet(
"CC Wallet", WalletType.COLOURED_COIN, info_as_string
)
if self.wallet_info is None:
raise Exception("wallet_info is None")
await self.wallet_state_manager.add_new_wallet(self, self.id())
return self
@staticmethod
async def create(
wallet_state_manager: Any,
wallet: Wallet,
wallet_info: WalletInfo,
) -> CCWallet:
self = CCWallet()
self.log = logging.getLogger(__name__)
self.cost_of_single_tx = None
self.wallet_state_manager = wallet_state_manager
self.wallet_info = wallet_info
self.standard_wallet = wallet
self.cc_info = CCInfo.from_bytes(hexstr_to_bytes(self.wallet_info.data))
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
return self
@classmethod
def type(cls) -> uint8:
return uint8(WalletType.COLOURED_COIN)
def id(self) -> uint32:
return self.wallet_info.id
async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint64:
if record_list is None:
record_list = await self.wallet_state_manager.coin_store.get_unspent_coins_for_wallet(self.id())
amount: uint64 = uint64(0)
for record in record_list:
lineage = await self.get_lineage_proof_for_coin(record.coin)
if lineage is not None:
amount = uint64(amount + record.coin.amount)
self.log.info(f"Confirmed balance for cc wallet {self.id()} is {amount}")
return uint64(amount)
async def get_unconfirmed_balance(self, unspent_records=None) -> uint128:
confirmed = await self.get_confirmed_balance(unspent_records)
unconfirmed_tx: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(
self.id()
)
addition_amount = 0
removal_amount = 0
for record in unconfirmed_tx:
if TransactionType(record.type) is TransactionType.INCOMING_TX:
addition_amount += record.amount
else:
removal_amount += record.amount
result = confirmed - removal_amount + addition_amount
self.log.info(f"Unconfirmed balance for cc wallet {self.id()} is {result}")
return uint128(result)
async def get_max_send_amount(self, records=None):
spendable: List[WalletCoinRecord] = list(
await self.wallet_state_manager.get_spendable_coins_for_wallet(self.id(), records)
)
if len(spendable) == 0:
return 0
spendable.sort(reverse=True, key=lambda record: record.coin.amount)
if self.cost_of_single_tx is None:
coin = spendable[0].coin
tx = await self.generate_signed_transaction(
[coin.amount], [coin.puzzle_hash], coins={coin}, ignore_max_send_amount=True
)
program: BlockGenerator = simple_solution_generator(tx.spend_bundle)
result: NPCResult = get_name_puzzle_conditions(
program,
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=self.wallet_state_manager.constants.COST_PER_BYTE,
safe_mode=True,
)
cost_result: uint64 = calculate_cost_of_program(
program.program, result, self.wallet_state_manager.constants.COST_PER_BYTE
)
self.cost_of_single_tx = cost_result
self.log.info(f"Cost of a single tx for standard wallet: {self.cost_of_single_tx}")
max_cost = self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM / 2
current_cost = 0
total_amount = 0
total_coin_count = 0
for record in spendable:
current_cost += self.cost_of_single_tx
total_amount += record.coin.amount
total_coin_count += 1
if current_cost + self.cost_of_single_tx > max_cost:
break
return total_amount
async def get_name(self):
return self.wallet_info.name
async def set_name(self, new_name: str):
new_info = replace(self.wallet_info, name=new_name)
self.wallet_info = new_info
await self.wallet_state_manager.user_store.update_wallet(self.wallet_info, False)
def get_colour(self) -> str:
assert self.cc_info.my_genesis_checker is not None
return bytes(self.cc_info.my_genesis_checker).hex()
async def coin_added(self, coin: Coin, height: uint32):
self.log.info(f"CC wallet has been notified that {coin} was added")
search_for_parent: bool = True
inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
lineage_proof = Program.to((1, [coin.parent_coin_info, inner_puzzle.get_tree_hash(), coin.amount]))
await self.add_lineage(coin.name(), lineage_proof, True)
for name, lineage_proofs in self.cc_info.lineage_proofs:
if coin.parent_coin_info == name:
search_for_parent = False
break
if search_for_parent:
data: Dict[str, Any] = {
"data": {
"action_data": {
"api_name": "request_puzzle_solution",
"height": height,
"coin_name": coin.parent_coin_info,
"received_coin": coin.name(),
}
}
}
data_str = dict_to_json_str(data)
await self.wallet_state_manager.create_action(
name="request_puzzle_solution",
wallet_id=self.id(),
wallet_type=self.type(),
callback="puzzle_solution_received",
done=False,
data=data_str,
in_transaction=True,
)
async def puzzle_solution_received(self, response: PuzzleSolutionResponse, action_id: int):
coin_name = response.coin_name
height = response.height
puzzle: Program = response.puzzle
r = uncurry_cc(puzzle)
header_hash = self.wallet_state_manager.blockchain.height_to_hash(height)
block: Optional[
HeaderBlockRecord
] = await self.wallet_state_manager.blockchain.block_store.get_header_block_record(header_hash)
if block is None:
return None
removals = block.removals
if r is not None:
mod_hash, genesis_coin_checker, inner_puzzle = r
self.log.info(f"parent: {coin_name} inner_puzzle for parent is {inner_puzzle}")
parent_coin = None
for coin in removals:
if coin.name() == coin_name:
parent_coin = coin
if parent_coin is None:
raise ValueError("Error in finding parent")
lineage_proof = get_lineage_proof_from_coin_and_puz(parent_coin, puzzle)
await self.add_lineage(coin_name, lineage_proof)
await self.wallet_state_manager.action_store.action_done(action_id)
async def get_new_inner_hash(self) -> bytes32:
return await self.standard_wallet.get_new_puzzlehash()
async def get_new_inner_puzzle(self) -> Program:
return await self.standard_wallet.get_new_puzzle()
async def get_puzzle_hash(self, new: bool):
return await self.standard_wallet.get_puzzle_hash(new)
async def get_new_puzzlehash(self) -> bytes32:
return await self.standard_wallet.get_new_puzzlehash()
def puzzle_for_pk(self, pubkey) -> Program:
inner_puzzle = self.standard_wallet.puzzle_for_pk(bytes(pubkey))
cc_puzzle: Program = cc_puzzle_for_inner_puzzle(CC_MOD, self.cc_info.my_genesis_checker, inner_puzzle)
self.base_puzzle_program = bytes(cc_puzzle)
self.base_inner_puzzle_hash = inner_puzzle.get_tree_hash()
return cc_puzzle
async def get_new_cc_puzzle_hash(self):
return (await self.wallet_state_manager.get_unused_derivation_record(self.id())).puzzle_hash
async def generate_zero_val_coin(self, send=True, exclude: List[Coin] = None) -> SpendBundle:
if self.cc_info.my_genesis_checker is None:
raise ValueError("My genesis checker is None")
if exclude is None:
exclude = []
coins = await self.standard_wallet.select_coins(0, exclude)
assert coins != set()
origin = coins.copy().pop()
origin_id = origin.name()
cc_inner = await self.get_new_inner_hash()
cc_puzzle_hash: Program = cc_puzzle_hash_for_inner_puzzle_hash(
CC_MOD, self.cc_info.my_genesis_checker, cc_inner
)
tx: TransactionRecord = await self.standard_wallet.generate_signed_transaction(
uint64(0), cc_puzzle_hash, uint64(0), origin_id, coins
)
assert tx.spend_bundle is not None
full_spend: SpendBundle = tx.spend_bundle
self.log.info(f"Generate zero val coin: cc_puzzle_hash is {cc_puzzle_hash}")
eve_coin = Coin(origin_id, cc_puzzle_hash, uint64(0))
await self.add_lineage(
eve_coin.name(),
Program.to(
(
1,
[eve_coin.parent_coin_info, cc_inner, eve_coin.amount],
)
),
)
await self.add_lineage(eve_coin.parent_coin_info, Program.to((0, [origin.as_list(), 1])))
if send:
regular_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_puzzle_hash,
amount=uint64(0),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=full_spend,
additions=full_spend.additions(),
removals=full_spend.removals(),
wallet_id=uint32(1),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=token_bytes(),
)
cc_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_puzzle_hash,
amount=uint64(0),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=full_spend,
additions=full_spend.additions(),
removals=full_spend.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=full_spend.name(),
)
await self.wallet_state_manager.add_transaction(regular_record)
await self.wallet_state_manager.add_pending_transaction(cc_record)
return full_spend
async def get_spendable_balance(self, records=None) -> uint64:
coins = await self.get_cc_spendable_coins(records)
amount = 0
for record in coins:
amount += record.coin.amount
return uint64(amount)
async def get_pending_change_balance(self) -> uint64:
unconfirmed_tx = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(self.id())
addition_amount = 0
for record in unconfirmed_tx:
if not record.is_in_mempool():
continue
our_spend = False
for coin in record.removals:
# Don't count eve spend as change
if coin.parent_coin_info.hex() == self.get_colour():
continue
if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id()):
our_spend = True
break
if our_spend is not True:
continue
for coin in record.additions:
if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id()):
addition_amount += coin.amount
return uint64(addition_amount)
async def get_cc_spendable_coins(self, records=None) -> List[WalletCoinRecord]:
result: List[WalletCoinRecord] = []
record_list: Set[WalletCoinRecord] = await self.wallet_state_manager.get_spendable_coins_for_wallet(
self.id(), records
)
for record in record_list:
lineage = await self.get_lineage_proof_for_coin(record.coin)
if lineage is not None:
result.append(record)
return result
async def select_coins(self, amount: uint64) -> Set[Coin]:
spendable_am = await self.get_confirmed_balance()
if amount > spendable_am:
error_msg = f"Can't select amount higher than our spendable balance {amount}, spendable {spendable_am}"
self.log.warning(error_msg)
raise ValueError(error_msg)
self.log.info(f"About to select coins for amount {amount}")
spendable: List[WalletCoinRecord] = await self.get_cc_spendable_coins()
sum = 0
used_coins: Set = set()
# Use older coins first
spendable.sort(key=lambda r: r.confirmed_block_height)
# Try to use coins from the store, if there isn't enough of "unused"
unconfirmed_removals: Dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet(
self.id()
)
for coinrecord in spendable:
if sum >= amount and len(used_coins) > 0:
break
if coinrecord.coin.name() in unconfirmed_removals:
continue
sum += coinrecord.coin.amount
used_coins.add(coinrecord.coin)
self.log.info(f"Selected coin: {coinrecord.coin.name()} at height {coinrecord.confirmed_block_height}!")
if sum < amount:
raise ValueError(
"Can't make this transaction at the moment. Waiting for the change from the previous transaction."
)
self.log.info(f"Successfully selected coins: {used_coins}")
return used_coins
async def get_sigs(self, innerpuz: Program, innersol: Program, coin_name: bytes32) -> List[G2Element]:
puzzle_hash = innerpuz.get_tree_hash()
pubkey, private = await self.wallet_state_manager.get_keys(puzzle_hash)
synthetic_secret_key = calculate_synthetic_secret_key(private, DEFAULT_HIDDEN_PUZZLE_HASH)
sigs: List[G2Element] = []
error, conditions, cost = conditions_dict_for_solution(
innerpuz, innersol, self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM
)
if conditions is not None:
for _, msg in pkm_pairs_for_conditions_dict(
conditions, coin_name, self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA
):
signature = AugSchemeMPL.sign(synthetic_secret_key, msg)
sigs.append(signature)
return sigs
async def inner_puzzle_for_cc_puzhash(self, cc_hash: bytes32) -> Program:
record: DerivationRecord = await self.wallet_state_manager.puzzle_store.get_derivation_record_for_puzzle_hash(
cc_hash.hex()
)
inner_puzzle: Program = self.standard_wallet.puzzle_for_pk(bytes(record.pubkey))
return inner_puzzle
async def get_lineage_proof_for_coin(self, coin) -> Optional[Program]:
for name, proof in self.cc_info.lineage_proofs:
if name == coin.parent_coin_info:
return proof
return None
async def generate_signed_transaction(
self,
amounts: List[uint64],
puzzle_hashes: List[bytes32],
fee: uint64 = uint64(0),
origin_id: bytes32 = None,
coins: Set[Coin] = None,
ignore_max_send_amount: bool = False,
) -> TransactionRecord:
# Get coins and calculate amount of change required
outgoing_amount = uint64(sum(amounts))
total_outgoing = outgoing_amount + fee
if not ignore_max_send_amount:
max_send = await self.get_max_send_amount()
if total_outgoing > max_send:
raise ValueError(f"Can't send more than {max_send} in a single transaction")
if coins is None:
selected_coins: Set[Coin] = await self.select_coins(uint64(total_outgoing))
else:
selected_coins = coins
total_amount = sum([x.amount for x in selected_coins])
change = total_amount - total_outgoing
primaries = []
for amount, puzzle_hash in zip(amounts, puzzle_hashes):
primaries.append({"puzzlehash": puzzle_hash, "amount": amount})
if change > 0:
changepuzzlehash = await self.get_new_inner_hash()
primaries.append({"puzzlehash": changepuzzlehash, "amount": change})
coin = list(selected_coins)[0]
inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
if self.cc_info.my_genesis_checker is None:
raise ValueError("My genesis checker is None")
genesis_id = genesis_coin_id_for_genesis_coin_checker(self.cc_info.my_genesis_checker)
spendable_cc_list = []
innersol_list = []
sigs: List[G2Element] = []
first = True
for coin in selected_coins:
coin_inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
if first:
first = False
if fee > 0:
innersol = self.standard_wallet.make_solution(primaries=primaries, fee=fee)
else:
innersol = self.standard_wallet.make_solution(primaries=primaries)
else:
innersol = self.standard_wallet.make_solution()
innersol_list.append(innersol)
lineage_proof = await self.get_lineage_proof_for_coin(coin)
assert lineage_proof is not None
spendable_cc_list.append(SpendableCC(coin, genesis_id, inner_puzzle, lineage_proof))
sigs = sigs + await self.get_sigs(coin_inner_puzzle, innersol, coin.name())
spend_bundle = spend_bundle_for_spendable_ccs(
CC_MOD,
self.cc_info.my_genesis_checker,
spendable_cc_list,
innersol_list,
sigs,
)
return TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=puzzle_hashes[0],
amount=uint64(outgoing_amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.OUTGOING_TX.value),
name=spend_bundle.name(),
)
async def add_lineage(self, name: bytes32, lineage: Optional[Program], in_transaction=False):
self.log.info(f"Adding parent {name}: {lineage}")
current_list = self.cc_info.lineage_proofs.copy()
current_list.append((name, lineage))
cc_info: CCInfo = CCInfo(self.cc_info.my_genesis_checker, current_list)
await self.save_info(cc_info, in_transaction)
async def save_info(self, cc_info: CCInfo, in_transaction):
self.cc_info = cc_info
current_info = self.wallet_info
data_str = bytes(cc_info).hex()
wallet_info = WalletInfo(current_info.id, current_info.name, current_info.type, data_str)
self.wallet_info = wallet_info
await self.wallet_state_manager.user_store.update_wallet(wallet_info, in_transaction)
async def generate_new_coloured_coin(self, amount: uint64) -> SpendBundle:
coins = await self.standard_wallet.select_coins(amount)
origin = coins.copy().pop()
origin_id = origin.name()
cc_inner_hash = await self.get_new_inner_hash()
await self.add_lineage(origin_id, Program.to((0, [origin.as_list(), 0])))
genesis_coin_checker = create_genesis_or_zero_coin_checker(origin_id)
minted_cc_puzzle_hash = cc_puzzle_hash_for_inner_puzzle_hash(CC_MOD, genesis_coin_checker, cc_inner_hash)
tx_record: TransactionRecord = await self.standard_wallet.generate_signed_transaction(
amount, minted_cc_puzzle_hash, uint64(0), origin_id, coins
)
assert tx_record.spend_bundle is not None
lineage_proof: Optional[Program] = lineage_proof_for_genesis(origin)
lineage_proofs = [(origin_id, lineage_proof)]
cc_info: CCInfo = CCInfo(genesis_coin_checker, lineage_proofs)
await self.save_info(cc_info, False)
return tx_record.spend_bundle
async def create_spend_bundle_relative_amount(self, cc_amount, zero_coin: Coin = None) -> Optional[SpendBundle]:
# If we're gaining value then our amount doesn't matter
if cc_amount < 0:
cc_spends = await self.select_coins(abs(cc_amount))
else:
if zero_coin is None:
return None
cc_spends = set()
cc_spends.add(zero_coin)
if cc_spends is None:
return None
# Calculate output amount given relative difference and sum of actual values
spend_value = sum([coin.amount for coin in cc_spends])
cc_amount = spend_value + cc_amount
# Loop through coins and create solution for innerpuzzle
list_of_solutions = []
output_created = None
sigs: List[G2Element] = []
for coin in cc_spends:
if output_created is None:
newinnerpuzhash = await self.get_new_inner_hash()
innersol = self.standard_wallet.make_solution(
primaries=[{"puzzlehash": newinnerpuzhash, "amount": cc_amount}]
)
output_created = coin
else:
innersol = self.standard_wallet.make_solution(consumed=[output_created.name()])
innerpuz: Program = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
sigs = sigs + await self.get_sigs(innerpuz, innersol, coin.name())
lineage_proof = await self.get_lineage_proof_for_coin(coin)
puzzle_reveal = cc_puzzle_for_inner_puzzle(CC_MOD, self.cc_info.my_genesis_checker, innerpuz)
# Use coin info to create solution and add coin and solution to list of CoinSpends
solution = [
innersol,
coin.as_list(),
lineage_proof,
None,
None,
None,
None,
None,
]
list_of_solutions.append(CoinSpend(coin, puzzle_reveal, Program.to(solution)))
aggsig = AugSchemeMPL.aggregate(sigs)
return SpendBundle(list_of_solutions, aggsig)
| true | true |
f72bd026e80c9b36f6569dca3e1e436f15855ecb | 417 | py | Python | tests/integration/test_segway_train.py | procha2/segway-pipeline | d0d3b8603eea9c9cbe92b56899c670dc41e89ca8 | [
"MIT"
] | 1 | 2021-03-13T11:34:45.000Z | 2021-03-13T11:34:45.000Z | tests/integration/test_segway_train.py | procha2/segway-pipeline | d0d3b8603eea9c9cbe92b56899c670dc41e89ca8 | [
"MIT"
] | null | null | null | tests/integration/test_segway_train.py | procha2/segway-pipeline | d0d3b8603eea9c9cbe92b56899c670dc41e89ca8 | [
"MIT"
] | 1 | 2020-10-01T11:48:17.000Z | 2020-10-01T11:48:17.000Z | from pathlib import Path
import pytest
@pytest.mark.workflow("test_segway_train")
def test_segway_train_traindirs_match(test_data_dir, workflow_dir, traindirs_match):
actual_traindir_path = workflow_dir / Path("test-output/traindir.tar.gz")
expected_traindir_path = test_data_dir / Path("segway_train_traindir.tar.gz")
assert traindirs_match(actual_traindir_path, expected_traindir_path, workflow_dir)
| 37.909091 | 86 | 0.822542 | from pathlib import Path
import pytest
@pytest.mark.workflow("test_segway_train")
def test_segway_train_traindirs_match(test_data_dir, workflow_dir, traindirs_match):
actual_traindir_path = workflow_dir / Path("test-output/traindir.tar.gz")
expected_traindir_path = test_data_dir / Path("segway_train_traindir.tar.gz")
assert traindirs_match(actual_traindir_path, expected_traindir_path, workflow_dir)
| true | true |
f72bd02dde43d0b610faacf919e7788a178ef0a9 | 1,359 | py | Python | vsts/vsts/service_endpoint/v4_1/models/service_endpoint_execution_owner.py | kenkuo/azure-devops-python-api | 9e920bd25e938fa89ff7f60153e5b9e113ca839d | [
"MIT"
] | null | null | null | vsts/vsts/service_endpoint/v4_1/models/service_endpoint_execution_owner.py | kenkuo/azure-devops-python-api | 9e920bd25e938fa89ff7f60153e5b9e113ca839d | [
"MIT"
] | null | null | null | vsts/vsts/service_endpoint/v4_1/models/service_endpoint_execution_owner.py | kenkuo/azure-devops-python-api | 9e920bd25e938fa89ff7f60153e5b9e113ca839d | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class ServiceEndpointExecutionOwner(Model):
"""ServiceEndpointExecutionOwner.
:param _links:
:type _links: :class:`ReferenceLinks <service-endpoint.v4_1.models.ReferenceLinks>`
:param id: Gets or sets the Id of service endpoint execution owner.
:type id: int
:param name: Gets or sets the name of service endpoint execution owner.
:type name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, _links=None, id=None, name=None):
super(ServiceEndpointExecutionOwner, self).__init__()
self._links = _links
self.id = id
self.name = name
| 39.970588 | 94 | 0.532009 |
from msrest.serialization import Model
class ServiceEndpointExecutionOwner(Model):
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, _links=None, id=None, name=None):
super(ServiceEndpointExecutionOwner, self).__init__()
self._links = _links
self.id = id
self.name = name
| true | true |
f72bd05b364336d0dfa60b4f0cb91cab8257f3a8 | 17,511 | py | Python | cirq-google/cirq_google/serialization/op_serializer_test.py | augustehirth/Cirq | e616710a0fa243524a9f6d7bc0d35e6b952fe3d0 | [
"Apache-2.0"
] | null | null | null | cirq-google/cirq_google/serialization/op_serializer_test.py | augustehirth/Cirq | e616710a0fa243524a9f6d7bc0d35e6b952fe3d0 | [
"Apache-2.0"
] | null | null | null | cirq-google/cirq_google/serialization/op_serializer_test.py | augustehirth/Cirq | e616710a0fa243524a9f6d7bc0d35e6b952fe3d0 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict, List
import copy
import numpy as np
import pytest
import sympy
from google.protobuf import json_format
import cirq
import cirq_google as cg
from cirq_google.api import v2
DEFAULT_TOKEN = 'test_tag'
def op_proto(json: Dict) -> v2.program_pb2.Operation:
op = v2.program_pb2.Operation()
json_format.ParseDict(json, op)
return op
class GateWithAttribute(cirq.SingleQubitGate):
def __init__(self, val):
self.val = val
class GateWithProperty(cirq.SingleQubitGate):
def __init__(self, val, not_req=None):
self._val = val
self._not_req = not_req
@property
def val(self):
return self._val
class GateWithMethod(cirq.SingleQubitGate):
def __init__(self, val):
self._val = val
def get_val(self):
return self._val
class SubclassGate(GateWithAttribute):
pass
def get_val(op):
return op.gate.get_val()
TEST_CASES = (
(float, 1.0, {'arg_value': {'float_value': 1.0}}),
(str, 'abc', {'arg_value': {'string_value': 'abc'}}),
(float, 1, {'arg_value': {'float_value': 1.0}}),
(List[bool], [True, False], {'arg_value': {'bool_values': {'values': [True, False]}}}),
(List[bool], (True, False), {'arg_value': {'bool_values': {'values': [True, False]}}}),
(
List[bool],
np.array([True, False], dtype=bool),
{'arg_value': {'bool_values': {'values': [True, False]}}},
),
(sympy.Symbol, sympy.Symbol('x'), {'symbol': 'x'}),
(float, sympy.Symbol('x'), {'symbol': 'x'}),
(
float,
sympy.Symbol('x') - sympy.Symbol('y'),
{
'func': {
'type': 'add',
'args': [
{'symbol': 'x'},
{
'func': {
'type': 'mul',
'args': [{'arg_value': {'float_value': -1.0}}, {'symbol': 'y'}],
}
},
],
}
},
),
)
@pytest.mark.parametrize(('val_type', 'val', 'arg_value'), TEST_CASES)
def test_to_proto_attribute(val_type, val, arg_value):
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter='val')
],
)
q = cirq.GridQubit(1, 2)
result = serializer.to_proto(GateWithAttribute(val)(q), arg_function_language='linear')
expected = op_proto(
{'gate': {'id': 'my_gate'}, 'args': {'my_val': arg_value}, 'qubits': [{'id': '1_2'}]}
)
assert result == expected
@pytest.mark.parametrize(('val_type', 'val', 'arg_value'), TEST_CASES)
def test_to_proto_property(val_type, val, arg_value):
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter='val')
],
)
q = cirq.GridQubit(1, 2)
result = serializer.to_proto(GateWithProperty(val)(q), arg_function_language='linear')
expected = op_proto(
{'gate': {'id': 'my_gate'}, 'args': {'my_val': arg_value}, 'qubits': [{'id': '1_2'}]}
)
assert result == expected
@pytest.mark.parametrize(('val_type', 'val', 'arg_value'), TEST_CASES)
def test_to_proto_callable(val_type, val, arg_value):
serializer = cg.GateOpSerializer(
gate_type=GateWithMethod,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter=get_val)
],
)
q = cirq.GridQubit(1, 2)
result = serializer.to_proto(GateWithMethod(val)(q), arg_function_language='linear')
expected = op_proto(
{'gate': {'id': 'my_gate'}, 'args': {'my_val': arg_value}, 'qubits': [{'id': '1_2'}]}
)
assert result == expected
def test_to_proto_gate_predicate():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
can_serialize_predicate=lambda x: x.gate.val == 1,
)
q = cirq.GridQubit(1, 2)
assert serializer.to_proto(GateWithAttribute(0)(q)) is None
assert serializer.to_proto(GateWithAttribute(1)(q)) is not None
assert not serializer.can_serialize_operation(GateWithAttribute(0)(q))
assert serializer.can_serialize_operation(GateWithAttribute(1)(q))
def test_to_proto_gate_mismatch():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='GateWithAttribute.*GateWithProperty'):
serializer.to_proto(GateWithAttribute(1.0)(q))
def test_to_proto_unsupported_type():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=bytes, op_getter='val')],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='bytes'):
serializer.to_proto(GateWithProperty(b's')(q))
def test_to_proto_named_qubit_supported():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.NamedQubit('a')
arg_value = 1.0
result = serializer.to_proto(GateWithProperty(arg_value)(q))
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': arg_value}}},
'qubits': [{'id': 'a'}],
}
)
assert result == expected
def test_to_proto_line_qubit_supported():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.LineQubit('10')
arg_value = 1.0
result = serializer.to_proto(GateWithProperty(arg_value)(q))
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': arg_value}}},
'qubits': [{'id': '10'}],
}
)
assert result == expected
def test_to_proto_required_but_not_present():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(
serialized_name='my_val', serialized_type=float, op_getter=lambda x: None
)
],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='required'):
serializer.to_proto(GateWithProperty(1.0)(q))
def test_to_proto_no_getattr():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='nope')],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='does not have'):
serializer.to_proto(GateWithProperty(1.0)(q))
def test_to_proto_not_required_ok():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val'),
cg.SerializingArg(
serialized_name='not_req',
serialized_type=float,
op_getter='not_req',
required=False,
),
],
)
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
}
)
q = cirq.GridQubit(1, 2)
assert serializer.to_proto(GateWithProperty(0.125)(q)) == expected
@pytest.mark.parametrize(
('val_type', 'val'),
(
(float, 's'),
(str, 1.0),
(sympy.Symbol, 1.0),
(List[bool], [1.0]),
(List[bool], 'a'),
(List[bool], (1.0,)),
),
)
def test_to_proto_type_mismatch(val_type, val):
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter='val')
],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match=str(type(val))):
serializer.to_proto(GateWithProperty(val)(q))
def test_can_serialize_operation_subclass():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
can_serialize_predicate=lambda x: x.gate.val == 1,
)
q = cirq.GridQubit(1, 1)
assert serializer.can_serialize_operation(SubclassGate(1)(q))
assert not serializer.can_serialize_operation(SubclassGate(0)(q))
def test_defaults_not_serialized():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(
serialized_name='my_val', serialized_type=float, default=1.0, op_getter='val'
)
],
)
q = cirq.GridQubit(1, 2)
no_default = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
}
)
assert no_default == serializer.to_proto(GateWithAttribute(0.125)(q))
with_default = op_proto({'gate': {'id': 'my_gate'}, 'qubits': [{'id': '1_2'}]})
assert with_default == serializer.to_proto(GateWithAttribute(1.0)(q))
def test_token_serialization():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.GridQubit(1, 2)
tag = cg.CalibrationTag('my_token')
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
'token_value': 'my_token',
}
)
assert expected == serializer.to_proto(GateWithAttribute(0.125)(q).with_tags(tag))
ONE_CONSTANT = [v2.program_pb2.Constant(string_value='my_token')]
TWO_CONSTANTS = [
v2.program_pb2.Constant(string_value='other_token'),
v2.program_pb2.Constant(string_value='my_token'),
]
@pytest.mark.parametrize(
('constants', 'expected_index', 'expected_constants'),
(
([], 0, ONE_CONSTANT),
(ONE_CONSTANT, 0, ONE_CONSTANT),
(TWO_CONSTANTS, 1, TWO_CONSTANTS),
),
)
def test_token_serialization_with_constant_reference(constants, expected_index, expected_constants):
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
# Make a local copy since we are modifying the array in-place.
constants = copy.copy(constants)
q = cirq.GridQubit(1, 2)
tag = cg.CalibrationTag('my_token')
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
'token_constant_index': expected_index,
}
)
assert expected == serializer.to_proto(
GateWithAttribute(0.125)(q).with_tags(tag), constants=constants
)
assert constants == expected_constants
def default_circuit_proto():
op1 = v2.program_pb2.Operation()
op1.gate.id = 'x_pow'
op1.args['half_turns'].arg_value.string_value = 'k'
op1.qubits.add().id = '1_1'
op2 = v2.program_pb2.Operation()
op2.gate.id = 'x_pow'
op2.args['half_turns'].arg_value.float_value = 1.0
op2.qubits.add().id = '1_2'
op2.token_constant_index = 0
return v2.program_pb2.Circuit(
scheduling_strategy=v2.program_pb2.Circuit.MOMENT_BY_MOMENT,
moments=[
v2.program_pb2.Moment(
operations=[op1, op2],
),
],
)
def default_circuit():
return cirq.FrozenCircuit(
cirq.X(cirq.GridQubit(1, 1)) ** sympy.Symbol('k'),
cirq.X(cirq.GridQubit(1, 2)).with_tags(DEFAULT_TOKEN),
cirq.measure(cirq.GridQubit(1, 1), key='m'),
)
def test_circuit_op_serializer_properties():
serializer = cg.CircuitOpSerializer()
assert serializer.internal_type == cirq.FrozenCircuit
assert serializer.serialized_id == 'circuit'
def test_can_serialize_circuit_op():
serializer = cg.CircuitOpSerializer()
assert serializer.can_serialize_operation(cirq.CircuitOperation(default_circuit()))
assert not serializer.can_serialize_operation(cirq.X(cirq.GridQubit(1, 1)))
def test_circuit_op_to_proto_errors():
serializer = cg.CircuitOpSerializer()
to_serialize = cirq.CircuitOperation(default_circuit())
constants = [
v2.program_pb2.Constant(string_value=DEFAULT_TOKEN),
v2.program_pb2.Constant(circuit_value=default_circuit_proto()),
]
raw_constants = {
DEFAULT_TOKEN: 0,
default_circuit(): 1,
}
with pytest.raises(ValueError, match='CircuitOp serialization requires a constants list'):
serializer.to_proto(to_serialize)
with pytest.raises(ValueError, match='CircuitOp serialization requires a constants list'):
serializer.to_proto(to_serialize, constants=constants)
with pytest.raises(ValueError, match='CircuitOp serialization requires a constants list'):
serializer.to_proto(to_serialize, raw_constants=raw_constants)
with pytest.raises(ValueError, match='Serializer expected CircuitOperation'):
serializer.to_proto(
v2.program_pb2.Operation(), constants=constants, raw_constants=raw_constants
)
bad_raw_constants = {cirq.FrozenCircuit(): 0}
with pytest.raises(ValueError, match='Encountered a circuit not in the constants table'):
serializer.to_proto(to_serialize, constants=constants, raw_constants=bad_raw_constants)
with pytest.raises(ValueError, match='Cannot serialize repetitions of type'):
serializer.to_proto(
to_serialize ** sympy.Symbol('a'), constants=constants, raw_constants=raw_constants
)
@pytest.mark.parametrize('repetitions', [1, 5, ['a', 'b', 'c']])
def test_circuit_op_to_proto(repetitions):
serializer = cg.CircuitOpSerializer()
if isinstance(repetitions, int):
repetition_ids = None
else:
repetition_ids = repetitions
repetitions = len(repetition_ids)
to_serialize = cirq.CircuitOperation(
circuit=default_circuit(),
qubit_map={cirq.GridQubit(1, 1): cirq.GridQubit(1, 2)},
measurement_key_map={'m': 'results'},
param_resolver={'k': 1.0},
repetitions=repetitions,
repetition_ids=repetition_ids,
)
constants = [
v2.program_pb2.Constant(string_value=DEFAULT_TOKEN),
v2.program_pb2.Constant(circuit_value=default_circuit_proto()),
]
raw_constants = {
DEFAULT_TOKEN: 0,
default_circuit(): 1,
}
repetition_spec = v2.program_pb2.RepetitionSpecification()
if repetition_ids is None:
repetition_spec.repetition_count = repetitions
else:
for rep_id in repetition_ids:
repetition_spec.repetition_ids.ids.append(rep_id)
qubit_map = v2.program_pb2.QubitMapping()
q_p1 = qubit_map.entries.add()
q_p1.key.id = '1_1'
q_p1.value.id = '1_2'
measurement_key_map = v2.program_pb2.MeasurementKeyMapping()
meas_p1 = measurement_key_map.entries.add()
meas_p1.key.string_key = 'm'
meas_p1.value.string_key = 'results'
arg_map = v2.program_pb2.ArgMapping()
arg_p1 = arg_map.entries.add()
arg_p1.key.arg_value.string_value = 'k'
arg_p1.value.arg_value.float_value = 1.0
expected = v2.program_pb2.CircuitOperation(
circuit_constant_index=1,
repetition_specification=repetition_spec,
qubit_map=qubit_map,
measurement_key_map=measurement_key_map,
arg_map=arg_map,
)
actual = serializer.to_proto(to_serialize, constants=constants, raw_constants=raw_constants)
assert actual == expected
| 32.669776 | 100 | 0.640911 |
from typing import Dict, List
import copy
import numpy as np
import pytest
import sympy
from google.protobuf import json_format
import cirq
import cirq_google as cg
from cirq_google.api import v2
DEFAULT_TOKEN = 'test_tag'
def op_proto(json: Dict) -> v2.program_pb2.Operation:
op = v2.program_pb2.Operation()
json_format.ParseDict(json, op)
return op
class GateWithAttribute(cirq.SingleQubitGate):
def __init__(self, val):
self.val = val
class GateWithProperty(cirq.SingleQubitGate):
def __init__(self, val, not_req=None):
self._val = val
self._not_req = not_req
@property
def val(self):
return self._val
class GateWithMethod(cirq.SingleQubitGate):
def __init__(self, val):
self._val = val
def get_val(self):
return self._val
class SubclassGate(GateWithAttribute):
pass
def get_val(op):
return op.gate.get_val()
TEST_CASES = (
(float, 1.0, {'arg_value': {'float_value': 1.0}}),
(str, 'abc', {'arg_value': {'string_value': 'abc'}}),
(float, 1, {'arg_value': {'float_value': 1.0}}),
(List[bool], [True, False], {'arg_value': {'bool_values': {'values': [True, False]}}}),
(List[bool], (True, False), {'arg_value': {'bool_values': {'values': [True, False]}}}),
(
List[bool],
np.array([True, False], dtype=bool),
{'arg_value': {'bool_values': {'values': [True, False]}}},
),
(sympy.Symbol, sympy.Symbol('x'), {'symbol': 'x'}),
(float, sympy.Symbol('x'), {'symbol': 'x'}),
(
float,
sympy.Symbol('x') - sympy.Symbol('y'),
{
'func': {
'type': 'add',
'args': [
{'symbol': 'x'},
{
'func': {
'type': 'mul',
'args': [{'arg_value': {'float_value': -1.0}}, {'symbol': 'y'}],
}
},
],
}
},
),
)
@pytest.mark.parametrize(('val_type', 'val', 'arg_value'), TEST_CASES)
def test_to_proto_attribute(val_type, val, arg_value):
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter='val')
],
)
q = cirq.GridQubit(1, 2)
result = serializer.to_proto(GateWithAttribute(val)(q), arg_function_language='linear')
expected = op_proto(
{'gate': {'id': 'my_gate'}, 'args': {'my_val': arg_value}, 'qubits': [{'id': '1_2'}]}
)
assert result == expected
@pytest.mark.parametrize(('val_type', 'val', 'arg_value'), TEST_CASES)
def test_to_proto_property(val_type, val, arg_value):
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter='val')
],
)
q = cirq.GridQubit(1, 2)
result = serializer.to_proto(GateWithProperty(val)(q), arg_function_language='linear')
expected = op_proto(
{'gate': {'id': 'my_gate'}, 'args': {'my_val': arg_value}, 'qubits': [{'id': '1_2'}]}
)
assert result == expected
@pytest.mark.parametrize(('val_type', 'val', 'arg_value'), TEST_CASES)
def test_to_proto_callable(val_type, val, arg_value):
serializer = cg.GateOpSerializer(
gate_type=GateWithMethod,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter=get_val)
],
)
q = cirq.GridQubit(1, 2)
result = serializer.to_proto(GateWithMethod(val)(q), arg_function_language='linear')
expected = op_proto(
{'gate': {'id': 'my_gate'}, 'args': {'my_val': arg_value}, 'qubits': [{'id': '1_2'}]}
)
assert result == expected
def test_to_proto_gate_predicate():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
can_serialize_predicate=lambda x: x.gate.val == 1,
)
q = cirq.GridQubit(1, 2)
assert serializer.to_proto(GateWithAttribute(0)(q)) is None
assert serializer.to_proto(GateWithAttribute(1)(q)) is not None
assert not serializer.can_serialize_operation(GateWithAttribute(0)(q))
assert serializer.can_serialize_operation(GateWithAttribute(1)(q))
def test_to_proto_gate_mismatch():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='GateWithAttribute.*GateWithProperty'):
serializer.to_proto(GateWithAttribute(1.0)(q))
def test_to_proto_unsupported_type():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=bytes, op_getter='val')],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='bytes'):
serializer.to_proto(GateWithProperty(b's')(q))
def test_to_proto_named_qubit_supported():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.NamedQubit('a')
arg_value = 1.0
result = serializer.to_proto(GateWithProperty(arg_value)(q))
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': arg_value}}},
'qubits': [{'id': 'a'}],
}
)
assert result == expected
def test_to_proto_line_qubit_supported():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.LineQubit('10')
arg_value = 1.0
result = serializer.to_proto(GateWithProperty(arg_value)(q))
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': arg_value}}},
'qubits': [{'id': '10'}],
}
)
assert result == expected
def test_to_proto_required_but_not_present():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(
serialized_name='my_val', serialized_type=float, op_getter=lambda x: None
)
],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='required'):
serializer.to_proto(GateWithProperty(1.0)(q))
def test_to_proto_no_getattr():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='nope')],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='does not have'):
serializer.to_proto(GateWithProperty(1.0)(q))
def test_to_proto_not_required_ok():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val'),
cg.SerializingArg(
serialized_name='not_req',
serialized_type=float,
op_getter='not_req',
required=False,
),
],
)
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
}
)
q = cirq.GridQubit(1, 2)
assert serializer.to_proto(GateWithProperty(0.125)(q)) == expected
@pytest.mark.parametrize(
('val_type', 'val'),
(
(float, 's'),
(str, 1.0),
(sympy.Symbol, 1.0),
(List[bool], [1.0]),
(List[bool], 'a'),
(List[bool], (1.0,)),
),
)
def test_to_proto_type_mismatch(val_type, val):
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter='val')
],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match=str(type(val))):
serializer.to_proto(GateWithProperty(val)(q))
def test_can_serialize_operation_subclass():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
can_serialize_predicate=lambda x: x.gate.val == 1,
)
q = cirq.GridQubit(1, 1)
assert serializer.can_serialize_operation(SubclassGate(1)(q))
assert not serializer.can_serialize_operation(SubclassGate(0)(q))
def test_defaults_not_serialized():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(
serialized_name='my_val', serialized_type=float, default=1.0, op_getter='val'
)
],
)
q = cirq.GridQubit(1, 2)
no_default = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
}
)
assert no_default == serializer.to_proto(GateWithAttribute(0.125)(q))
with_default = op_proto({'gate': {'id': 'my_gate'}, 'qubits': [{'id': '1_2'}]})
assert with_default == serializer.to_proto(GateWithAttribute(1.0)(q))
def test_token_serialization():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.GridQubit(1, 2)
tag = cg.CalibrationTag('my_token')
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
'token_value': 'my_token',
}
)
assert expected == serializer.to_proto(GateWithAttribute(0.125)(q).with_tags(tag))
ONE_CONSTANT = [v2.program_pb2.Constant(string_value='my_token')]
TWO_CONSTANTS = [
v2.program_pb2.Constant(string_value='other_token'),
v2.program_pb2.Constant(string_value='my_token'),
]
@pytest.mark.parametrize(
('constants', 'expected_index', 'expected_constants'),
(
([], 0, ONE_CONSTANT),
(ONE_CONSTANT, 0, ONE_CONSTANT),
(TWO_CONSTANTS, 1, TWO_CONSTANTS),
),
)
def test_token_serialization_with_constant_reference(constants, expected_index, expected_constants):
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
constants = copy.copy(constants)
q = cirq.GridQubit(1, 2)
tag = cg.CalibrationTag('my_token')
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
'token_constant_index': expected_index,
}
)
assert expected == serializer.to_proto(
GateWithAttribute(0.125)(q).with_tags(tag), constants=constants
)
assert constants == expected_constants
def default_circuit_proto():
op1 = v2.program_pb2.Operation()
op1.gate.id = 'x_pow'
op1.args['half_turns'].arg_value.string_value = 'k'
op1.qubits.add().id = '1_1'
op2 = v2.program_pb2.Operation()
op2.gate.id = 'x_pow'
op2.args['half_turns'].arg_value.float_value = 1.0
op2.qubits.add().id = '1_2'
op2.token_constant_index = 0
return v2.program_pb2.Circuit(
scheduling_strategy=v2.program_pb2.Circuit.MOMENT_BY_MOMENT,
moments=[
v2.program_pb2.Moment(
operations=[op1, op2],
),
],
)
def default_circuit():
return cirq.FrozenCircuit(
cirq.X(cirq.GridQubit(1, 1)) ** sympy.Symbol('k'),
cirq.X(cirq.GridQubit(1, 2)).with_tags(DEFAULT_TOKEN),
cirq.measure(cirq.GridQubit(1, 1), key='m'),
)
def test_circuit_op_serializer_properties():
serializer = cg.CircuitOpSerializer()
assert serializer.internal_type == cirq.FrozenCircuit
assert serializer.serialized_id == 'circuit'
def test_can_serialize_circuit_op():
serializer = cg.CircuitOpSerializer()
assert serializer.can_serialize_operation(cirq.CircuitOperation(default_circuit()))
assert not serializer.can_serialize_operation(cirq.X(cirq.GridQubit(1, 1)))
def test_circuit_op_to_proto_errors():
serializer = cg.CircuitOpSerializer()
to_serialize = cirq.CircuitOperation(default_circuit())
constants = [
v2.program_pb2.Constant(string_value=DEFAULT_TOKEN),
v2.program_pb2.Constant(circuit_value=default_circuit_proto()),
]
raw_constants = {
DEFAULT_TOKEN: 0,
default_circuit(): 1,
}
with pytest.raises(ValueError, match='CircuitOp serialization requires a constants list'):
serializer.to_proto(to_serialize)
with pytest.raises(ValueError, match='CircuitOp serialization requires a constants list'):
serializer.to_proto(to_serialize, constants=constants)
with pytest.raises(ValueError, match='CircuitOp serialization requires a constants list'):
serializer.to_proto(to_serialize, raw_constants=raw_constants)
with pytest.raises(ValueError, match='Serializer expected CircuitOperation'):
serializer.to_proto(
v2.program_pb2.Operation(), constants=constants, raw_constants=raw_constants
)
bad_raw_constants = {cirq.FrozenCircuit(): 0}
with pytest.raises(ValueError, match='Encountered a circuit not in the constants table'):
serializer.to_proto(to_serialize, constants=constants, raw_constants=bad_raw_constants)
with pytest.raises(ValueError, match='Cannot serialize repetitions of type'):
serializer.to_proto(
to_serialize ** sympy.Symbol('a'), constants=constants, raw_constants=raw_constants
)
@pytest.mark.parametrize('repetitions', [1, 5, ['a', 'b', 'c']])
def test_circuit_op_to_proto(repetitions):
serializer = cg.CircuitOpSerializer()
if isinstance(repetitions, int):
repetition_ids = None
else:
repetition_ids = repetitions
repetitions = len(repetition_ids)
to_serialize = cirq.CircuitOperation(
circuit=default_circuit(),
qubit_map={cirq.GridQubit(1, 1): cirq.GridQubit(1, 2)},
measurement_key_map={'m': 'results'},
param_resolver={'k': 1.0},
repetitions=repetitions,
repetition_ids=repetition_ids,
)
constants = [
v2.program_pb2.Constant(string_value=DEFAULT_TOKEN),
v2.program_pb2.Constant(circuit_value=default_circuit_proto()),
]
raw_constants = {
DEFAULT_TOKEN: 0,
default_circuit(): 1,
}
repetition_spec = v2.program_pb2.RepetitionSpecification()
if repetition_ids is None:
repetition_spec.repetition_count = repetitions
else:
for rep_id in repetition_ids:
repetition_spec.repetition_ids.ids.append(rep_id)
qubit_map = v2.program_pb2.QubitMapping()
q_p1 = qubit_map.entries.add()
q_p1.key.id = '1_1'
q_p1.value.id = '1_2'
measurement_key_map = v2.program_pb2.MeasurementKeyMapping()
meas_p1 = measurement_key_map.entries.add()
meas_p1.key.string_key = 'm'
meas_p1.value.string_key = 'results'
arg_map = v2.program_pb2.ArgMapping()
arg_p1 = arg_map.entries.add()
arg_p1.key.arg_value.string_value = 'k'
arg_p1.value.arg_value.float_value = 1.0
expected = v2.program_pb2.CircuitOperation(
circuit_constant_index=1,
repetition_specification=repetition_spec,
qubit_map=qubit_map,
measurement_key_map=measurement_key_map,
arg_map=arg_map,
)
actual = serializer.to_proto(to_serialize, constants=constants, raw_constants=raw_constants)
assert actual == expected
| true | true |
f72bd06d2f9f175effe7beb9a4509946d29b33e0 | 158 | py | Python | build.py | pengguanjun/zeno_learn | bd5298f14180f1b2ce1edb83305bbc2ce4c7d0c8 | [
"MIT"
] | 4 | 2021-08-03T16:26:52.000Z | 2022-03-30T10:32:23.000Z | build.py | pengguanjun/zeno_learn | bd5298f14180f1b2ce1edb83305bbc2ce4c7d0c8 | [
"MIT"
] | 1 | 2021-11-09T10:54:24.000Z | 2021-11-09T10:54:24.000Z | build.py | pengguanjun/zeno_learn | bd5298f14180f1b2ce1edb83305bbc2ce4c7d0c8 | [
"MIT"
] | 3 | 2021-11-09T10:48:16.000Z | 2021-11-09T15:18:02.000Z | #!/usr/bin/env python3
import subprocess
subprocess.check_call(['cmake', '-B', 'build'])
subprocess.check_call(['cmake', '--build', 'build', '--parallel'])
| 22.571429 | 66 | 0.670886 |
import subprocess
subprocess.check_call(['cmake', '-B', 'build'])
subprocess.check_call(['cmake', '--build', 'build', '--parallel'])
| true | true |
f72bd075c6cfb39433750fb4807078e2c2bb2e91 | 4,623 | py | Python | SuperB/api/serializers.py | rialrustamov/SuperB-E-Commerce-RR | 099fcfb50bd1623237fd352a87d19926dda52904 | [
"MIT"
] | null | null | null | SuperB/api/serializers.py | rialrustamov/SuperB-E-Commerce-RR | 099fcfb50bd1623237fd352a87d19926dda52904 | [
"MIT"
] | null | null | null | SuperB/api/serializers.py | rialrustamov/SuperB-E-Commerce-RR | 099fcfb50bd1623237fd352a87d19926dda52904 | [
"MIT"
] | null | null | null | from rest_framework import serializers
from django.db.models import fields
from product.models import Product, ProductVersion, Category, Review, Image
from order.models import ShoppingCart, Wishlist, CartItem
from blog.models import Category as BlogCategory, Blog
from django.contrib.auth import get_user_model
from user.models import *
from core.models import *
User = get_user_model()
class ProductOverviewSerializer(serializers.ModelSerializer):
review = serializers.SerializerMethodField()
class Meta:
model = Product
fields = '__all__'
def get_review(self, obj):
qs = obj.product_review.all()
return ReviewSerializer(qs, many=True).data
class ProductSerializer(serializers.ModelSerializer):
total_quantity = serializers.SerializerMethodField()
main_product = serializers.SerializerMethodField()
versions = serializers.SerializerMethodField()
main_image = serializers.SerializerMethodField()
product_review = serializers.SerializerMethodField()
class Meta:
model = Product
fields = ['title', 'price', 'discount_price', 'category', 'brand', 'description',
'total_quantity', 'main_product', 'main_image', 'versions', 'product_tag', 'product_review']
def get_total_quantity(self, obj):
return obj.total_quantity
def get_main_product(self, obj):
return ProductVersionSerializer(obj.main_product).data
def get_versions(self, obj):
qs = obj.versions.exclude(id=obj.main_product.id)
return ProductVersionSerializer(qs, many=True).data
def get_main_image(self, obj):
if obj.main_product.main_photo.image:
return obj.main_product.main_photo.image.url
return None
def get_product_review(self, obj):
qs = obj.product_review.all()
return ReviewSerializer(qs, many=True).data
class ImageSerializer(serializers.ModelSerializer):
class Meta:
model = Image
fields = "__all__"
class ProductVersionSerializer(serializers.ModelSerializer):
product = ProductOverviewSerializer()
image = serializers.SerializerMethodField()
class Meta:
model = ProductVersion
fields = '__all__'
def get_image(self, obj):
qs = obj.product_photo.all()
return ImageSerializer(qs, many=True).data
class ReviewSerializer(serializers.ModelSerializer):
class Meta:
model = Review
fields = "__all__"
class UserSerializer(serializers.ModelSerializer):
password_confirmation = serializers.CharField(
style={'input_type': 'password'}, write_only=True)
password = serializers.CharField(
style={'input_type': 'password'}, write_only=True)
def validate(self, attrs):
password = attrs['password']
password_confirmation = attrs.pop('password_confirmation')
if password != password_confirmation:
raise serializers.ValidationError(
{'password': 'Passwords must match.'})
return super().validate(attrs)
def create(self, validated_data):
password = validated_data.pop('password')
validated_data['username'] = validated_data['email']
user = super().create(validated_data=validated_data)
user.set_password(password)
user.save()
return user
class Meta:
model = User
fields = ('email', 'password', 'password_confirmation',
'first_name', 'last_name')
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = '__all__'
class BlogCategorySerializer(serializers.ModelSerializer):
class Meta:
model = BlogCategory
fields = '__all__'
class BlogSerializer(serializers.ModelSerializer):
class Meta:
model = Blog
fields = '__all__'
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = '__all__'
class CardSerializer(serializers.ModelSerializer):
product = ProductVersionSerializer(many=True)
class Meta:
model = ShoppingCart
fields = '__all__'
class CardItemSerializer(serializers.ModelSerializer):
product = ProductVersionSerializer()
class Meta:
model = CartItem
fields = '__all__'
class WishlistSerializer(serializers.ModelSerializer):
product = ProductVersionSerializer(many=True)
class Meta:
model = Wishlist
fields = '__all__'
class SubscriberSerializer(serializers.ModelSerializer):
class Meta:
model = Subscriber
fields = '__all__' | 28.361963 | 110 | 0.689812 | from rest_framework import serializers
from django.db.models import fields
from product.models import Product, ProductVersion, Category, Review, Image
from order.models import ShoppingCart, Wishlist, CartItem
from blog.models import Category as BlogCategory, Blog
from django.contrib.auth import get_user_model
from user.models import *
from core.models import *
User = get_user_model()
class ProductOverviewSerializer(serializers.ModelSerializer):
review = serializers.SerializerMethodField()
class Meta:
model = Product
fields = '__all__'
def get_review(self, obj):
qs = obj.product_review.all()
return ReviewSerializer(qs, many=True).data
class ProductSerializer(serializers.ModelSerializer):
total_quantity = serializers.SerializerMethodField()
main_product = serializers.SerializerMethodField()
versions = serializers.SerializerMethodField()
main_image = serializers.SerializerMethodField()
product_review = serializers.SerializerMethodField()
class Meta:
model = Product
fields = ['title', 'price', 'discount_price', 'category', 'brand', 'description',
'total_quantity', 'main_product', 'main_image', 'versions', 'product_tag', 'product_review']
def get_total_quantity(self, obj):
return obj.total_quantity
def get_main_product(self, obj):
return ProductVersionSerializer(obj.main_product).data
def get_versions(self, obj):
qs = obj.versions.exclude(id=obj.main_product.id)
return ProductVersionSerializer(qs, many=True).data
def get_main_image(self, obj):
if obj.main_product.main_photo.image:
return obj.main_product.main_photo.image.url
return None
def get_product_review(self, obj):
qs = obj.product_review.all()
return ReviewSerializer(qs, many=True).data
class ImageSerializer(serializers.ModelSerializer):
class Meta:
model = Image
fields = "__all__"
class ProductVersionSerializer(serializers.ModelSerializer):
product = ProductOverviewSerializer()
image = serializers.SerializerMethodField()
class Meta:
model = ProductVersion
fields = '__all__'
def get_image(self, obj):
qs = obj.product_photo.all()
return ImageSerializer(qs, many=True).data
class ReviewSerializer(serializers.ModelSerializer):
class Meta:
model = Review
fields = "__all__"
class UserSerializer(serializers.ModelSerializer):
password_confirmation = serializers.CharField(
style={'input_type': 'password'}, write_only=True)
password = serializers.CharField(
style={'input_type': 'password'}, write_only=True)
def validate(self, attrs):
password = attrs['password']
password_confirmation = attrs.pop('password_confirmation')
if password != password_confirmation:
raise serializers.ValidationError(
{'password': 'Passwords must match.'})
return super().validate(attrs)
def create(self, validated_data):
password = validated_data.pop('password')
validated_data['username'] = validated_data['email']
user = super().create(validated_data=validated_data)
user.set_password(password)
user.save()
return user
class Meta:
model = User
fields = ('email', 'password', 'password_confirmation',
'first_name', 'last_name')
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = '__all__'
class BlogCategorySerializer(serializers.ModelSerializer):
class Meta:
model = BlogCategory
fields = '__all__'
class BlogSerializer(serializers.ModelSerializer):
class Meta:
model = Blog
fields = '__all__'
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = '__all__'
class CardSerializer(serializers.ModelSerializer):
product = ProductVersionSerializer(many=True)
class Meta:
model = ShoppingCart
fields = '__all__'
class CardItemSerializer(serializers.ModelSerializer):
product = ProductVersionSerializer()
class Meta:
model = CartItem
fields = '__all__'
class WishlistSerializer(serializers.ModelSerializer):
product = ProductVersionSerializer(many=True)
class Meta:
model = Wishlist
fields = '__all__'
class SubscriberSerializer(serializers.ModelSerializer):
class Meta:
model = Subscriber
fields = '__all__' | true | true |
f72bd109a480d8206801746e5910512f5d37e064 | 444 | py | Python | tests/test_cpplibhub.py | iotanbo/cpplibhub | 0758d416c8d2c0b29d70300f25ccc898a7ad64df | [
"MIT"
] | null | null | null | tests/test_cpplibhub.py | iotanbo/cpplibhub | 0758d416c8d2c0b29d70300f25ccc898a7ad64df | [
"MIT"
] | null | null | null | tests/test_cpplibhub.py | iotanbo/cpplibhub | 0758d416c8d2c0b29d70300f25ccc898a7ad64df | [
"MIT"
] | null | null | null |
import pytest
from click.testing import CliRunner
from cpplibhub.cli import main
@pytest.fixture(scope="module")
def runner():
return CliRunner()
def test_main(runner):
# assert main([]) == 0 # run without click
result = runner.invoke(main)
# result = runner.invoke(main, ['--name', 'Amy'])
assert result.exit_code == 0
# assert result.output == 'Hello Amy!\n'
# TODO: test more command line options and args
| 22.2 | 53 | 0.671171 |
import pytest
from click.testing import CliRunner
from cpplibhub.cli import main
@pytest.fixture(scope="module")
def runner():
return CliRunner()
def test_main(runner):
r.invoke(main)
assert result.exit_code == 0
| true | true |
f72bd1370d655bb4ca4fe412e26c161f507ca79b | 7,667 | py | Python | lib/dblatex-0.3.2/lib/dbtexmf/dblatex/grubber/plugins.py | jonathanmorley/HR-XSL | 799b1075cbec4cda3d686d588eea92a62d59963f | [
"Apache-2.0"
] | 1 | 2017-12-29T23:23:14.000Z | 2017-12-29T23:23:14.000Z | lib/dblatex-0.3.2/lib/dbtexmf/dblatex/grubber/plugins.py | jonathanmorley/HR-XSL | 799b1075cbec4cda3d686d588eea92a62d59963f | [
"Apache-2.0"
] | null | null | null | lib/dblatex-0.3.2/lib/dbtexmf/dblatex/grubber/plugins.py | jonathanmorley/HR-XSL | 799b1075cbec4cda3d686d588eea92a62d59963f | [
"Apache-2.0"
] | null | null | null | # This file is part of Rubber and thus covered by the GPL
# (c) Emmanuel Beffara, 2002--2006
"""
Mechanisms to dynamically load extra modules to help the LaTeX compilation.
All the modules must be derived from the TexModule class.
"""
import imp
from os.path import *
from msg import _, msg
import sys
class TexModule (object):
"""
This is the base class for modules. Each module should define a class
named 'Module' that derives from this one. The default implementation
provides all required methods with no effects.
"""
def __init__ (self, env, dict):
"""
The constructor receives two arguments: 'env' is the compiling
environment, 'dict' is a dictionary that describes the command that
caused the module to load.
"""
def pre_compile (self):
"""
This method is called before the first LaTeX compilation. It is
supposed to build any file that LaTeX would require to compile the
document correctly. The method must return true on failure.
"""
return 0
def post_compile (self):
"""
This method is called after each LaTeX compilation. It is supposed to
process the compilation results and possibly request a new
compilation. The method must return true on failure.
"""
return 0
def last_compile (self):
"""
This method is called after the last LaTeX compilation.
It is supposed to terminate the compilation for its specific needs.
The method must return true on failure.
"""
return 0
def clean (self):
"""
This method is called when cleaning the compiled files. It is supposed
to remove all the files that this modules generates.
"""
def command (self, cmd, args):
"""
This is called when a directive for the module is found in the source.
The method can raise 'AttributeError' when the directive does not
exist and 'TypeError' if the syntax is wrong. By default, when called
with argument "foo" it calls the method "do_foo" if it exists, and
fails otherwise.
"""
getattr(self, "do_" + cmd)(*args)
def get_errors (self):
"""
This is called if something has failed during an operation performed
by this module. The method returns a generator with items of the same
form as in LaTeXDep.get_errors.
"""
if None:
yield None
class Plugins (object):
"""
This class gathers operations related to the management of external Python
modules. Modules are requested through the `register' method, and
they are searched for first in the current directory, then in the
(possibly) specified Python package (using Python's path).
"""
def __init__ (self, path=None):
"""
Initialize the module set, possibly setting a path name in which
modules will be searched for.
"""
self.modules = {}
if not path:
self.path = [dirname(__file__)]
sys.path.append(self.path[0])
else:
self.path = path
def __getitem__ (self, name):
"""
Return the module object of the given name.
"""
return self.modules[name]
def register (self, name):
"""
Attempt to register a module with the specified name. If an
appropriate module is found, load it and store it in the object's
dictionary. Return 0 if no module was found, 1 if a module was found
and loaded, and 2 if the module was found but already loaded.
"""
if self.modules.has_key(name):
return 2
try:
file, path, descr = imp.find_module(name, [""])
except ImportError:
if not self.path:
return 0
try:
file, path, descr = imp.find_module(name, self.path)
except ImportError:
return 0
module = imp.load_module(name, file, path, descr)
file.close()
self.modules[name] = module
return 1
def clear(self):
"""
Empty the module table, unregistering every module registered. No
modules are unloaded, however, but this has no other effect than
speeding the registration if the modules are loaded again.
"""
self.modules.clear()
class Modules (Plugins):
"""
This class gathers all operations related to the management of modules.
The modules are searched for first in the current directory, then as
scripts in the 'modules' directory in the program's data directort, then
as a Python module in the package `rubber.latex'.
"""
def __init__ (self, env):
#Plugins.__init__(self, rubber.rules.latex.__path__)
Plugins.__init__(self)
self.env = env
self.objects = {}
self.commands = {}
def __getitem__ (self, name):
"""
Return the module object of the given name.
"""
return self.objects[name]
def has_key (self, name):
"""
Check if a given module is loaded.
"""
return self.objects.has_key(name)
def register (self, name, dict={}):
"""
Attempt to register a package with the specified name. If a module is
found, create an object from the module's class called `Module',
passing it the environment and `dict' as arguments, and execute all
delayed commands for this module. The dictionary describes the
command that caused the registration.
"""
if self.has_key(name):
msg.debug(_("module %s already registered") % name)
return 2
# First look for a script
moddir = ""
mod = None
for path in "", join(moddir, "modules"):
file = join(path, name + ".rub")
if exists(file):
mod = ScriptModule(self.env, file)
msg.log(_("script module %s registered") % name)
break
# Then look for a Python module
if not mod:
if Plugins.register(self, name) == 0:
msg.debug(_("no support found for %s") % name)
return 0
mod = self.modules[name].Module(self.env, dict)
msg.log(_("built-in module %s registered") % name)
# Run any delayed commands.
if self.commands.has_key(name):
for (cmd, args, vars) in self.commands[name]:
msg.push_pos(vars)
try:
mod.command(cmd, args)
except AttributeError:
msg.warn(_("unknown directive '%s.%s'") % (name, cmd))
except TypeError:
msg.warn(_("wrong syntax for '%s.%s'") % (name, cmd))
msg.pop_pos()
del self.commands[name]
self.objects[name] = mod
return 1
def clear (self):
"""
Unregister all modules.
"""
Plugins.clear(self)
self.objects = {}
self.commands = {}
def command (self, mod, cmd, args):
"""
Send a command to a particular module. If this module is not loaded,
store the command so that it will be sent when the module is register.
"""
if self.objects.has_key(mod):
self.objects[mod].command(cmd, args)
else:
if not self.commands.has_key(mod):
self.commands[mod] = []
self.commands[mod].append((cmd, args, self.env.vars.copy()))
| 33.480349 | 78 | 0.589148 |
import imp
from os.path import *
from msg import _, msg
import sys
class TexModule (object):
def __init__ (self, env, dict):
def pre_compile (self):
return 0
def post_compile (self):
return 0
def last_compile (self):
return 0
def clean (self):
def command (self, cmd, args):
getattr(self, "do_" + cmd)(*args)
def get_errors (self):
if None:
yield None
class Plugins (object):
def __init__ (self, path=None):
self.modules = {}
if not path:
self.path = [dirname(__file__)]
sys.path.append(self.path[0])
else:
self.path = path
def __getitem__ (self, name):
return self.modules[name]
def register (self, name):
if self.modules.has_key(name):
return 2
try:
file, path, descr = imp.find_module(name, [""])
except ImportError:
if not self.path:
return 0
try:
file, path, descr = imp.find_module(name, self.path)
except ImportError:
return 0
module = imp.load_module(name, file, path, descr)
file.close()
self.modules[name] = module
return 1
def clear(self):
self.modules.clear()
class Modules (Plugins):
def __init__ (self, env):
Plugins.__init__(self)
self.env = env
self.objects = {}
self.commands = {}
def __getitem__ (self, name):
return self.objects[name]
def has_key (self, name):
return self.objects.has_key(name)
def register (self, name, dict={}):
if self.has_key(name):
msg.debug(_("module %s already registered") % name)
return 2
moddir = ""
mod = None
for path in "", join(moddir, "modules"):
file = join(path, name + ".rub")
if exists(file):
mod = ScriptModule(self.env, file)
msg.log(_("script module %s registered") % name)
break
if not mod:
if Plugins.register(self, name) == 0:
msg.debug(_("no support found for %s") % name)
return 0
mod = self.modules[name].Module(self.env, dict)
msg.log(_("built-in module %s registered") % name)
if self.commands.has_key(name):
for (cmd, args, vars) in self.commands[name]:
msg.push_pos(vars)
try:
mod.command(cmd, args)
except AttributeError:
msg.warn(_("unknown directive '%s.%s'") % (name, cmd))
except TypeError:
msg.warn(_("wrong syntax for '%s.%s'") % (name, cmd))
msg.pop_pos()
del self.commands[name]
self.objects[name] = mod
return 1
def clear (self):
Plugins.clear(self)
self.objects = {}
self.commands = {}
def command (self, mod, cmd, args):
if self.objects.has_key(mod):
self.objects[mod].command(cmd, args)
else:
if not self.commands.has_key(mod):
self.commands[mod] = []
self.commands[mod].append((cmd, args, self.env.vars.copy()))
| true | true |
f72bd2060174e51a551060a884d7fdfeb1276fa6 | 15,646 | py | Python | pandaharvester/harvesterbody/master.py | nikmagini/harvester | 1d62dd0e35b53a51919b0250fffec478778f460a | [
"Apache-2.0"
] | 11 | 2017-06-01T10:16:58.000Z | 2019-11-22T08:41:36.000Z | pandaharvester/harvesterbody/master.py | nikmagini/harvester | 1d62dd0e35b53a51919b0250fffec478778f460a | [
"Apache-2.0"
] | 34 | 2016-10-25T19:15:24.000Z | 2021-03-05T12:59:04.000Z | pandaharvester/harvesterbody/master.py | nikmagini/harvester | 1d62dd0e35b53a51919b0250fffec478778f460a | [
"Apache-2.0"
] | 17 | 2016-10-24T13:29:45.000Z | 2021-03-23T17:35:27.000Z | import os
import pwd
import grp
import sys
import socket
import signal
import logging
import daemon.pidfile
import argparse
import threading
import cProfile
import atexit
from future.utils import iteritems
try:
import pprofile
except Exception:
pass
from pandalogger import logger_config
from pandaharvester import commit_timestamp
from pandaharvester import panda_pkg_info
from pandaharvester.harvesterconfig import harvester_config
from pandaharvester.harvestercore import core_utils
from pandaharvester.harvestermisc.apfmon import Apfmon
# logger
_logger = core_utils.setup_logger('master')
# for singleton
master_instance = False
master_lock = threading.Lock()
# the master class which runs the main process
class Master(object):
# constructor
def __init__(self, single_mode=False, stop_event=None, daemon_mode=True):
# initialize database and config
self.singleMode = single_mode
self.stopEvent = stop_event
self.daemonMode = daemon_mode
from pandaharvester.harvestercore.communicator_pool import CommunicatorPool
self.communicatorPool = CommunicatorPool()
from pandaharvester.harvestercore.queue_config_mapper import QueueConfigMapper
self.queueConfigMapper = QueueConfigMapper()
from pandaharvester.harvestercore.db_proxy_pool import DBProxyPool as DBProxy
dbProxy = DBProxy()
dbProxy.make_tables(self.queueConfigMapper)
# main loop
def start(self):
# thread list
thrList = []
# Credential Manager
from pandaharvester.harvesterbody.cred_manager import CredManager
thr = CredManager(single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.execute()
thr.start()
thrList.append(thr)
# Command manager
from pandaharvester.harvesterbody.command_manager import CommandManager
thr = CommandManager(self.communicatorPool, self.queueConfigMapper, single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Cacher
from pandaharvester.harvesterbody.cacher import Cacher
thr = Cacher(self.communicatorPool, single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.execute(force_update=True, skip_lock=True)
thr.start()
thrList.append(thr)
# Watcher
from pandaharvester.harvesterbody.watcher import Watcher
thr = Watcher(single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Job Fetcher
from pandaharvester.harvesterbody.job_fetcher import JobFetcher
nThr = harvester_config.jobfetcher.nThreads
for iThr in range(nThr):
thr = JobFetcher(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Propagator
from pandaharvester.harvesterbody.propagator import Propagator
nThr = harvester_config.propagator.nThreads
for iThr in range(nThr):
thr = Propagator(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Monitor
from pandaharvester.harvesterbody.monitor import Monitor
nThr = harvester_config.monitor.nThreads
for iThr in range(nThr):
thr = Monitor(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Preparator
from pandaharvester.harvesterbody.preparator import Preparator
nThr = harvester_config.preparator.nThreads
for iThr in range(nThr):
thr = Preparator(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Submitter
from pandaharvester.harvesterbody.submitter import Submitter
nThr = harvester_config.submitter.nThreads
for iThr in range(nThr):
thr = Submitter(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Stager
from pandaharvester.harvesterbody.stager import Stager
nThr = harvester_config.stager.nThreads
for iThr in range(nThr):
thr = Stager(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# EventFeeder
from pandaharvester.harvesterbody.event_feeder import EventFeeder
nThr = harvester_config.eventfeeder.nThreads
for iThr in range(nThr):
thr = EventFeeder(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Sweeper
from pandaharvester.harvesterbody.sweeper import Sweeper
nThr = harvester_config.sweeper.nThreads
for iThr in range(nThr):
thr = Sweeper(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Service monitor
try:
sm_active = harvester_config.service_monitor.active
except:
sm_active = False
if sm_active:
from pandaharvester.harvesterbody.service_monitor import ServiceMonitor
thr = ServiceMonitor(options.pid, single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Report itself to APF Mon
apf_mon = Apfmon(self.queueConfigMapper)
apf_mon.create_factory()
apf_mon.create_labels()
##################
# loop on stop event to be interruptable since thr.join blocks signal capture in python 2.7
while True:
if self.singleMode or not self.daemonMode:
break
self.stopEvent.wait(1)
if self.stopEvent.is_set():
break
##################
# join
if self.daemonMode:
for thr in thrList:
thr.join()
# dummy context
class DummyContext(object):
def __enter__(self):
return self
def __exit__(self, *x):
pass
# wrapper for stderr
class StdErrWrapper(object):
def write(self, message):
# set a header and footer to the message to make it easier to parse
wrapped_message = '#####START#####\n{0}#####END#####\n'.format(message)
_logger.error(wrapped_message)
def flush(self):
_logger.handlers[0].flush()
def fileno(self):
return _logger.handlers[0].stream.fileno()
def isatty(self):
return _logger.handlers[0].stream.isatty()
# profiler
prof = None
# options
options = None
# main
def main(daemon_mode=True):
global prof
global options
# parse option
parser = argparse.ArgumentParser()
parser.add_argument('--pid', action='store', dest='pid', default=None,
help='pid filename')
parser.add_argument('--single', action='store_true', dest='singleMode', default=False,
help='use single mode')
parser.add_argument('--hostname_file', action='store', dest='hostNameFile', default=None,
help='to record the hostname where harvester is launched')
parser.add_argument('--rotate_log', action='store_true', dest='rotateLog', default=False,
help='rollover log files before launching harvester')
parser.add_argument('--version', action='store_true', dest='showVersion', default=False,
help='show version information and exit')
parser.add_argument('--profile_output', action='store', dest='profileOutput', default=None,
help='filename to save the results of profiler')
parser.add_argument('--profile_mode', action='store', dest='profileMode', default='s',
help='profile mode. s (statistic), d (deterministic), or t (thread-aware)')
parser.add_argument('--memory_logging', action='store_true', dest='memLogging', default=False,
help='add information of memory usage in each logging message')
parser.add_argument('--foreground', action='store_true', dest='foreground', default=False,
help='run in the foreground not to be daemonized')
options = parser.parse_args()
# show version information
if options.showVersion:
print ("Version : {0}".format(panda_pkg_info.release_version))
print ("Last commit : {0}".format(commit_timestamp.timestamp))
return
# check pid
if options.pid is not None and os.path.exists(options.pid):
print ("ERROR: Cannot start since lock file {0} already exists".format(options.pid))
return
# uid and gid
uid = pwd.getpwnam(harvester_config.master.uname).pw_uid
gid = grp.getgrnam(harvester_config.master.gname).gr_gid
# get umask
umask = os.umask(0)
os.umask(umask)
# memory logging
if options.memLogging:
core_utils.enable_memory_profiling()
# hostname
if options.hostNameFile is not None:
with open(options.hostNameFile, 'w') as f:
f.write(socket.getfqdn())
# rollover log files
if options.rotateLog:
core_utils.do_log_rollover()
if hasattr(_logger.handlers[0], 'doRollover'):
_logger.handlers[0].doRollover()
if daemon_mode and not options.foreground:
# redirect messages to stdout
stdoutHandler = logging.StreamHandler(sys.stdout)
stdoutHandler.setFormatter(_logger.handlers[0].formatter)
_logger.addHandler(stdoutHandler)
# collect streams not to be closed by daemon
files_preserve = []
for loggerName, loggerObj in iteritems(logging.Logger.manager.loggerDict):
if loggerName.startswith('panda'):
for handler in loggerObj.handlers:
if hasattr(handler, 'stream'):
files_preserve.append(handler.stream)
sys.stderr = StdErrWrapper()
# make daemon context
dc = daemon.DaemonContext(stdout=sys.stdout,
stderr=sys.stderr,
uid=uid,
gid=gid,
umask=umask,
files_preserve=files_preserve,
pidfile=daemon.pidfile.PIDLockFile(options.pid))
else:
dc = DummyContext()
with dc:
# remove pidfile to prevent child processes crashing in atexit
if not options.singleMode:
dc.pidfile = None
if options.pid:
core_utils.set_file_permission(options.pid)
core_utils.set_file_permission(logger_config.daemon['logdir'])
_logger.info("start : version = {0}, last_commit = {1}".format(panda_pkg_info.release_version,
commit_timestamp.timestamp))
# stop event
stopEvent = threading.Event()
# profiler
prof = None
if options.profileOutput is not None:
# run with profiler
if options.profileMode == 'd':
# deterministic
prof = pprofile.Profile()
elif options.profileMode == 't':
# thread-aware
prof = pprofile.ThreadProfile()
else:
# statistic
prof = cProfile.Profile()
# post process for profiler
def disable_profiler():
global prof
if prof is not None:
# disable profiler
prof.disable()
# dump results
prof.dump_stats(options.profileOutput)
prof = None
# delete PID
def delete_pid(pid):
try:
os.remove(pid)
except Exception:
pass
# signal handlers
def catch_sigkill(sig, frame):
disable_profiler()
_logger.info('got signal={0} to be killed'.format(sig))
try:
os.remove(options.pid)
except Exception:
pass
try:
if os.getppid() == 1:
os.killpg(os.getpgrp(), signal.SIGKILL)
else:
os.kill(os.getpid(), signal.SIGKILL)
except Exception:
core_utils.dump_error_message(_logger)
_logger.error('failed to be killed')
'''
def catch_sigterm(sig, frame):
_logger.info('got signal={0} to be terminated'.format(sig))
stopEvent.set()
# register del function
if os.getppid() == 1 and options.pid:
atexit.register(delete_pid, options.pid)
# set alarm just in case
signal.alarm(30)
'''
def catch_debug(sig, frame):
_logger.info('got signal={0} to go into debugger mode'.format(sig))
from trepan.interfaces import server
from trepan.api import debug
try:
portNum = harvester_config.master.debugger_port
except Exception:
portNum = 19550
connection_opts = {'IO': 'TCP', 'PORT': portNum}
interface = server.ServerInterface(connection_opts=connection_opts)
dbg_opts = {'interface': interface}
_logger.info('starting debugger on port {0}'.format(portNum))
debug(dbg_opts=dbg_opts)
# set handler
if daemon_mode:
signal.signal(signal.SIGINT, catch_sigkill)
signal.signal(signal.SIGHUP, catch_sigkill)
signal.signal(signal.SIGTERM, catch_sigkill)
signal.signal(signal.SIGALRM, catch_sigkill)
signal.signal(signal.SIGUSR1, catch_debug)
signal.signal(signal.SIGUSR2, catch_sigkill)
# start master
master = Master(single_mode=options.singleMode, stop_event=stopEvent, daemon_mode=daemon_mode)
if master is None:
prof = None
else:
# enable profiler
if prof is not None:
prof.enable()
# run master
master.start()
# disable profiler
disable_profiler()
if daemon_mode:
_logger.info('terminated')
if __name__ == "__main__":
main()
else:
# started by WSGI
with master_lock:
if not master_instance:
main(daemon_mode=False)
master_instance = True
# import application entry for WSGI
from pandaharvester.harvestermessenger.apache_messenger import application
| 37.430622 | 104 | 0.604244 | import os
import pwd
import grp
import sys
import socket
import signal
import logging
import daemon.pidfile
import argparse
import threading
import cProfile
import atexit
from future.utils import iteritems
try:
import pprofile
except Exception:
pass
from pandalogger import logger_config
from pandaharvester import commit_timestamp
from pandaharvester import panda_pkg_info
from pandaharvester.harvesterconfig import harvester_config
from pandaharvester.harvestercore import core_utils
from pandaharvester.harvestermisc.apfmon import Apfmon
_logger = core_utils.setup_logger('master')
master_instance = False
master_lock = threading.Lock()
class Master(object):
def __init__(self, single_mode=False, stop_event=None, daemon_mode=True):
self.singleMode = single_mode
self.stopEvent = stop_event
self.daemonMode = daemon_mode
from pandaharvester.harvestercore.communicator_pool import CommunicatorPool
self.communicatorPool = CommunicatorPool()
from pandaharvester.harvestercore.queue_config_mapper import QueueConfigMapper
self.queueConfigMapper = QueueConfigMapper()
from pandaharvester.harvestercore.db_proxy_pool import DBProxyPool as DBProxy
dbProxy = DBProxy()
dbProxy.make_tables(self.queueConfigMapper)
def start(self):
thrList = []
from pandaharvester.harvesterbody.cred_manager import CredManager
thr = CredManager(single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.execute()
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.command_manager import CommandManager
thr = CommandManager(self.communicatorPool, self.queueConfigMapper, single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.cacher import Cacher
thr = Cacher(self.communicatorPool, single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.execute(force_update=True, skip_lock=True)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.watcher import Watcher
thr = Watcher(single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.job_fetcher import JobFetcher
nThr = harvester_config.jobfetcher.nThreads
for iThr in range(nThr):
thr = JobFetcher(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.propagator import Propagator
nThr = harvester_config.propagator.nThreads
for iThr in range(nThr):
thr = Propagator(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.monitor import Monitor
nThr = harvester_config.monitor.nThreads
for iThr in range(nThr):
thr = Monitor(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.preparator import Preparator
nThr = harvester_config.preparator.nThreads
for iThr in range(nThr):
thr = Preparator(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.submitter import Submitter
nThr = harvester_config.submitter.nThreads
for iThr in range(nThr):
thr = Submitter(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.stager import Stager
nThr = harvester_config.stager.nThreads
for iThr in range(nThr):
thr = Stager(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.event_feeder import EventFeeder
nThr = harvester_config.eventfeeder.nThreads
for iThr in range(nThr):
thr = EventFeeder(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.sweeper import Sweeper
nThr = harvester_config.sweeper.nThreads
for iThr in range(nThr):
thr = Sweeper(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
try:
sm_active = harvester_config.service_monitor.active
except:
sm_active = False
if sm_active:
from pandaharvester.harvesterbody.service_monitor import ServiceMonitor
thr = ServiceMonitor(options.pid, single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
apf_mon = Apfmon(self.queueConfigMapper)
apf_mon.create_factory()
apf_mon.create_labels()
if self.stopEvent.is_set():
break
return self
def __exit__(self, *x):
pass
class StdErrWrapper(object):
def write(self, message):
wrapped_message = '#####START#####\n{0}#####END#####\n'.format(message)
_logger.error(wrapped_message)
def flush(self):
_logger.handlers[0].flush()
def fileno(self):
return _logger.handlers[0].stream.fileno()
def isatty(self):
return _logger.handlers[0].stream.isatty()
prof = None
options = None
def main(daemon_mode=True):
global prof
global options
parser = argparse.ArgumentParser()
parser.add_argument('--pid', action='store', dest='pid', default=None,
help='pid filename')
parser.add_argument('--single', action='store_true', dest='singleMode', default=False,
help='use single mode')
parser.add_argument('--hostname_file', action='store', dest='hostNameFile', default=None,
help='to record the hostname where harvester is launched')
parser.add_argument('--rotate_log', action='store_true', dest='rotateLog', default=False,
help='rollover log files before launching harvester')
parser.add_argument('--version', action='store_true', dest='showVersion', default=False,
help='show version information and exit')
parser.add_argument('--profile_output', action='store', dest='profileOutput', default=None,
help='filename to save the results of profiler')
parser.add_argument('--profile_mode', action='store', dest='profileMode', default='s',
help='profile mode. s (statistic), d (deterministic), or t (thread-aware)')
parser.add_argument('--memory_logging', action='store_true', dest='memLogging', default=False,
help='add information of memory usage in each logging message')
parser.add_argument('--foreground', action='store_true', dest='foreground', default=False,
help='run in the foreground not to be daemonized')
options = parser.parse_args()
if options.showVersion:
print ("Version : {0}".format(panda_pkg_info.release_version))
print ("Last commit : {0}".format(commit_timestamp.timestamp))
return
if options.pid is not None and os.path.exists(options.pid):
print ("ERROR: Cannot start since lock file {0} already exists".format(options.pid))
return
uid = pwd.getpwnam(harvester_config.master.uname).pw_uid
gid = grp.getgrnam(harvester_config.master.gname).gr_gid
umask = os.umask(0)
os.umask(umask)
if options.memLogging:
core_utils.enable_memory_profiling()
if options.hostNameFile is not None:
with open(options.hostNameFile, 'w') as f:
f.write(socket.getfqdn())
if options.rotateLog:
core_utils.do_log_rollover()
if hasattr(_logger.handlers[0], 'doRollover'):
_logger.handlers[0].doRollover()
if daemon_mode and not options.foreground:
stdoutHandler = logging.StreamHandler(sys.stdout)
stdoutHandler.setFormatter(_logger.handlers[0].formatter)
_logger.addHandler(stdoutHandler)
files_preserve = []
for loggerName, loggerObj in iteritems(logging.Logger.manager.loggerDict):
if loggerName.startswith('panda'):
for handler in loggerObj.handlers:
if hasattr(handler, 'stream'):
files_preserve.append(handler.stream)
sys.stderr = StdErrWrapper()
dc = daemon.DaemonContext(stdout=sys.stdout,
stderr=sys.stderr,
uid=uid,
gid=gid,
umask=umask,
files_preserve=files_preserve,
pidfile=daemon.pidfile.PIDLockFile(options.pid))
else:
dc = DummyContext()
with dc:
if not options.singleMode:
dc.pidfile = None
if options.pid:
core_utils.set_file_permission(options.pid)
core_utils.set_file_permission(logger_config.daemon['logdir'])
_logger.info("start : version = {0}, last_commit = {1}".format(panda_pkg_info.release_version,
commit_timestamp.timestamp))
stopEvent = threading.Event()
prof = None
if options.profileOutput is not None:
if options.profileMode == 'd':
prof = pprofile.Profile()
elif options.profileMode == 't':
prof = pprofile.ThreadProfile()
else:
prof = cProfile.Profile()
def disable_profiler():
global prof
if prof is not None:
prof.disable()
prof.dump_stats(options.profileOutput)
prof = None
def delete_pid(pid):
try:
os.remove(pid)
except Exception:
pass
def catch_sigkill(sig, frame):
disable_profiler()
_logger.info('got signal={0} to be killed'.format(sig))
try:
os.remove(options.pid)
except Exception:
pass
try:
if os.getppid() == 1:
os.killpg(os.getpgrp(), signal.SIGKILL)
else:
os.kill(os.getpid(), signal.SIGKILL)
except Exception:
core_utils.dump_error_message(_logger)
_logger.error('failed to be killed')
def catch_debug(sig, frame):
_logger.info('got signal={0} to go into debugger mode'.format(sig))
from trepan.interfaces import server
from trepan.api import debug
try:
portNum = harvester_config.master.debugger_port
except Exception:
portNum = 19550
connection_opts = {'IO': 'TCP', 'PORT': portNum}
interface = server.ServerInterface(connection_opts=connection_opts)
dbg_opts = {'interface': interface}
_logger.info('starting debugger on port {0}'.format(portNum))
debug(dbg_opts=dbg_opts)
if daemon_mode:
signal.signal(signal.SIGINT, catch_sigkill)
signal.signal(signal.SIGHUP, catch_sigkill)
signal.signal(signal.SIGTERM, catch_sigkill)
signal.signal(signal.SIGALRM, catch_sigkill)
signal.signal(signal.SIGUSR1, catch_debug)
signal.signal(signal.SIGUSR2, catch_sigkill)
master = Master(single_mode=options.singleMode, stop_event=stopEvent, daemon_mode=daemon_mode)
if master is None:
prof = None
else:
if prof is not None:
prof.enable()
master.start()
disable_profiler()
if daemon_mode:
_logger.info('terminated')
if __name__ == "__main__":
main()
else:
with master_lock:
if not master_instance:
main(daemon_mode=False)
master_instance = True
from pandaharvester.harvestermessenger.apache_messenger import application
| true | true |
f72bd257412e17141e60758b6a2232418acfb73b | 895 | py | Python | sdks/python/test/test_PurgeResponse.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | null | null | null | sdks/python/test/test_PurgeResponse.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | 6 | 2019-10-23T06:38:53.000Z | 2022-01-22T07:57:58.000Z | sdks/python/test/test_PurgeResponse.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | 2 | 2019-10-23T06:31:05.000Z | 2021-08-21T17:32:47.000Z | # coding: utf-8
"""
App Center Client
Microsoft Visual Studio App Center API # noqa: E501
OpenAPI spec version: preview
Contact: benedetto.abbenanti@gmail.com
Project Repository: https://github.com/b3nab/appcenter-sdks
"""
from __future__ import absolute_import
import unittest
import appcenter_sdk
from PurgeResponse.clsPurgeResponse import PurgeResponse # noqa: E501
from appcenter_sdk.rest import ApiException
class TestPurgeResponse(unittest.TestCase):
"""PurgeResponse unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPurgeResponse(self):
"""Test PurgeResponse"""
# FIXME: construct object with mandatory attributes with example values
# model = appcenter_sdk.models.clsPurgeResponse.PurgeResponse() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 22.375 | 85 | 0.709497 |
from __future__ import absolute_import
import unittest
import appcenter_sdk
from PurgeResponse.clsPurgeResponse import PurgeResponse
from appcenter_sdk.rest import ApiException
class TestPurgeResponse(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testPurgeResponse(self):
s
if __name__ == '__main__':
unittest.main()
| true | true |
f72bd2eca21eed63e0f6123f34b5568c86396f4a | 176,314 | py | Python | mkt/reviewers/tests/test_views.py | jasonthomas/zamboni | 948247609cb4b2ed72e6daa4da5257927bfe0c17 | [
"BSD-3-Clause"
] | null | null | null | mkt/reviewers/tests/test_views.py | jasonthomas/zamboni | 948247609cb4b2ed72e6daa4da5257927bfe0c17 | [
"BSD-3-Clause"
] | null | null | null | mkt/reviewers/tests/test_views.py | jasonthomas/zamboni | 948247609cb4b2ed72e6daa4da5257927bfe0c17 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import json
import re
import time
from datetime import datetime, timedelta
from itertools import cycle
from os import path
from django import test
from django.conf import settings
from django.core import mail
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django.utils import translation
import mock
import requests
import waffle
from cache_nuggets.lib import Token
from jingo.helpers import urlparams
from nose import SkipTest
from nose.tools import eq_, ok_
from post_request_task import task as post_request_task
from pyquery import PyQuery as pq
from requests.structures import CaseInsensitiveDict
import mkt
import mkt.ratings
import mkt.site.tests
from lib.crypto import packaged
from lib.crypto.tests import mock_sign
from mkt.abuse.models import AbuseReport
from mkt.api.tests.test_oauth import RestOAuth
from mkt.comm.tests.test_views import CommTestMixin
from mkt.comm.utils import create_comm_note
from mkt.constants import MANIFEST_CONTENT_TYPE, comm
from mkt.developers.models import ActivityLog, AppLog
from mkt.files.models import File
from mkt.ratings.models import Review, ReviewFlag
from mkt.reviewers.models import (SHOWCASE_TAG, CannedResponse,
EscalationQueue, RereviewQueue,
ReviewerScore)
from mkt.reviewers.utils import ReviewersQueuesHelper
from mkt.reviewers.views import (_progress, app_review, queue_apps,
route_reviewer)
from mkt.site.fixtures import fixture
from mkt.site.helpers import absolutify, isotime
from mkt.site.storage_utils import private_storage, public_storage
from mkt.site.tests import (check_links, days_ago, formset, initial,
req_factory_factory, user_factory)
from mkt.site.utils import app_factory, make_rated, paginate, version_factory
from mkt.submit.tests.test_views import BasePackagedAppTest, SetupFilesMixin
from mkt.tags.models import Tag
from mkt.users.models import UserProfile
from mkt.versions.models import Version
from mkt.webapps.indexers import WebappIndexer
from mkt.webapps.models import AddonDeviceType, Webapp
from mkt.webapps.tasks import unindex_webapps
from mkt.websites.utils import website_factory
from mkt.zadmin.models import get_config, set_config
TIME_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
TEST_PATH = path.dirname(path.abspath(__file__))
ATTACHMENTS_DIR = path.abspath(path.join(TEST_PATH, '..', '..', 'comm',
'tests', 'attachments'))
class AttachmentManagementMixin(object):
def _attachment_management_form(self, num=1):
"""
Generate and return data for a management form for `num` attachments
"""
return {'attachment-TOTAL_FORMS': max(1, num),
'attachment-INITIAL_FORMS': 0,
'attachment-MAX_NUM_FORMS': 1000}
def _attachments(self, num):
"""Generate and return data for `num` attachments """
data = {}
files = ['bacon.jpg', 'bacon.txt']
descriptions = ['mmm, bacon', '']
if num > 0:
for n in xrange(num):
i = 0 if n % 2 else 1
attachment = open(path.join(ATTACHMENTS_DIR, files[i]), 'r+')
data.update({
'attachment-%d-attachment' % n: attachment,
'attachment-%d-description' % n: descriptions[i]
})
return data
class TestedonManagementMixin(object):
def _testedon_management_form(self, num=0):
"""
Generate and return data for a management form for `num` tested on
platforms.
"""
return {'testedon-TOTAL_FORMS': max(1, num),
'testedon-INITIAL_FORMS': 0,
'testedon-MAX_NUM_FORMS': 1000}
def _platforms(self, num, device_types=[u'\xd0esktop', u'FirefoxOS'],
devices=[u'PC ', u'ZT\xc8 Open'],
versions=[u'34', u'1.3<']):
"""Generate and return data for `num` tested on platforms """
data = {}
if num > 0:
for n in xrange(num):
i = n % len(device_types)
data.update({
'testedon-%d-device_type' % n: device_types[i],
'testedon-%d-device' % n: devices[i],
'testedon-%d-version' % n: versions[i],
})
return data
class AppReviewerTest(mkt.site.tests.TestCase):
def setUp(self):
super(AppReviewerTest, self).setUp()
self.reviewer_user = user_factory(email='editor')
self.grant_permission(self.reviewer_user, 'Apps:Review')
self.snr_reviewer_user = user_factory(email='snrreviewer')
self.grant_permission(self.snr_reviewer_user, 'Apps:Review,Apps:Edit,'
'Apps:ReviewEscalated,Apps:ReviewPrivileged',
name='Senior App Reviewers')
self.admin_user = user_factory(email='admin')
self.grant_permission(self.admin_user, '*:*')
self.regular_user = user_factory(email='regular')
self.contact_user = user_factory(email='contact')
self.login_as_editor()
def login_as_admin(self):
self.login(self.admin_user)
def login_as_editor(self):
self.login(self.reviewer_user)
def login_as_senior_reviewer(self):
self.login(self.snr_reviewer_user)
def check_actions(self, expected, elements):
"""Check the action buttons on the review page.
`expected` is a list of tuples containing action name and action form
value. `elements` is a PyQuery list of input elements.
"""
for idx, item in enumerate(expected):
text, form_value = item
e = elements.eq(idx)
eq_(e.parent().text(), text)
eq_(e.attr('name'), 'action')
eq_(e.val(), form_value)
def uses_es(self):
return waffle.switch_is_active('reviewer-tools-elasticsearch')
class AccessMixin(object):
def test_403_for_non_editor(self, *args, **kwargs):
self.login('regular@mozilla.com')
eq_(self.client.head(self.url).status_code, 403)
def test_302_for_anonymous(self, *args, **kwargs):
self.client.logout()
eq_(self.client.head(self.url).status_code, 302)
class SearchMixin(object):
def test_search_query(self):
# Light test to make sure queues can handle search queries.
res = self.client.get(self.url, {'text_query': 'test'})
eq_(res.status_code, 200)
@mock.patch('mkt.webapps.models.Webapp.get_cached_manifest', mock.Mock)
class TestReviewersHome(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewersHome, self).setUp()
self.url = reverse('reviewers.home')
self.apps = [app_factory(name='Antelope',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='Bear',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='Cougar',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING})]
self.packaged_app = app_factory(name='Dinosaur',
status=mkt.STATUS_PUBLIC,
is_packaged=True)
version_factory(addon=self.packaged_app,
file_kw={'status': mkt.STATUS_PENDING})
# Add a disabled app for good measure.
app_factory(name='Elephant', disabled_by_user=True,
status=mkt.STATUS_PENDING)
# Escalate one app to make sure it doesn't affect stats.
escalated = app_factory(name='Eyelash Pit Viper',
status=mkt.STATUS_PENDING)
EscalationQueue.objects.create(addon=escalated)
# Add a public app under re-review.
rereviewed = app_factory(name='Finch', status=mkt.STATUS_PUBLIC)
rq = RereviewQueue.objects.create(addon=rereviewed)
rq.update(created=self.days_ago(1))
# Add an app with latest update deleted. It shouldn't affect anything.
app = app_factory(name='Great White Shark',
status=mkt.STATUS_PUBLIC,
version_kw={'version': '1.0'},
is_packaged=True)
v = version_factory(addon=app,
version='2.1',
file_kw={'status': mkt.STATUS_PENDING})
v.update(deleted=True)
def test_route_reviewer(self):
# App reviewers go to apps home.
req = mkt.site.tests.req_factory_factory(
reverse('reviewers'),
user=UserProfile.objects.get(email='editor@mozilla.com'))
r = route_reviewer(req)
self.assert3xx(r, reverse('reviewers.home'))
def test_progress_pending(self):
self.apps[0].latest_version.update(nomination=self.days_ago(1))
self.apps[1].latest_version.update(nomination=self.days_ago(8))
self.apps[2].latest_version.update(nomination=self.days_ago(15))
counts, percentages = _progress()
eq_(counts['pending']['week'], 1)
eq_(counts['pending']['new'], 1)
eq_(counts['pending']['old'], 1)
eq_(counts['pending']['med'], 1)
self.assertAlmostEqual(percentages['pending']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['pending']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['pending']['med'], 33.333333333333)
def test_progress_rereview(self):
rq = RereviewQueue.objects.create(addon=self.apps[0])
rq.update(created=self.days_ago(8))
rq = RereviewQueue.objects.create(addon=self.apps[1])
rq.update(created=self.days_ago(15))
counts, percentages = _progress()
eq_(counts['rereview']['week'], 1)
eq_(counts['rereview']['new'], 1)
eq_(counts['rereview']['old'], 1)
eq_(counts['rereview']['med'], 1)
self.assertAlmostEqual(percentages['rereview']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['rereview']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['rereview']['med'], 33.333333333333)
def test_progress_updated(self):
extra_app = app_factory(name='Jackalope',
status=mkt.STATUS_PUBLIC,
is_packaged=True,
created=self.days_ago(35))
version_factory(addon=extra_app,
file_kw={'status': mkt.STATUS_PENDING},
created=self.days_ago(25),
nomination=self.days_ago(8))
extra_app = app_factory(name='Jackrabbit',
status=mkt.STATUS_PUBLIC,
is_packaged=True,
created=self.days_ago(35))
version_factory(addon=extra_app,
file_kw={'status': mkt.STATUS_PENDING},
created=self.days_ago(25),
nomination=self.days_ago(25))
counts, percentages = _progress()
eq_(counts['updates']['week'], 1)
eq_(counts['updates']['new'], 1)
eq_(counts['updates']['old'], 1)
eq_(counts['updates']['med'], 1)
self.assertAlmostEqual(percentages['updates']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['updates']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['updates']['med'], 33.333333333333)
def test_stats_waiting(self):
self.apps[0].latest_version.update(nomination=self.days_ago(1))
self.apps[1].latest_version.update(nomination=self.days_ago(5))
self.apps[2].latest_version.update(nomination=self.days_ago(15))
self.packaged_app.update(created=self.days_ago(1))
doc = pq(self.client.get(self.url).content)
anchors = doc('.editor-stats-title a')
eq_(anchors.eq(0).text(), '3 Pending App Reviews')
eq_(anchors.eq(1).text(), '1 Re-review')
eq_(anchors.eq(2).text(), '1 Update Review')
divs = doc('.editor-stats-table > div')
# Pending review.
eq_(divs.eq(0).text(), '2 unreviewed app submissions this week.')
# Re-reviews.
eq_(divs.eq(2).text(), '1 unreviewed app submission this week.')
# Update review.
eq_(divs.eq(4).text(), '1 unreviewed app submission this week.')
# Maths.
# Pending review.
eq_(doc('.waiting_new').eq(0).attr('title')[-3:], '33%')
eq_(doc('.waiting_med').eq(0).attr('title')[-3:], '33%')
eq_(doc('.waiting_old').eq(0).attr('title')[-3:], '33%')
# Re-reviews.
eq_(doc('.waiting_new').eq(1).attr('title')[-4:], '100%')
eq_(doc('.waiting_med').eq(1).attr('title')[-3:], ' 0%')
eq_(doc('.waiting_old').eq(1).attr('title')[-3:], ' 0%')
# Update review.
eq_(doc('.waiting_new').eq(2).attr('title')[-4:], '100%')
eq_(doc('.waiting_med').eq(2).attr('title')[-3:], ' 0%')
eq_(doc('.waiting_old').eq(2).attr('title')[-3:], ' 0%')
def test_reviewer_leaders(self):
reviewers = UserProfile.objects.all()[:2]
# 1st user reviews 2, 2nd user only 1.
users = cycle(reviewers)
for app in self.apps:
mkt.log(mkt.LOG.APPROVE_VERSION, app, app.latest_version,
user=users.next(), details={'comments': 'hawt'})
doc = pq(self.client.get(self.url).content.decode('utf-8'))
# Top Reviews.
table = doc('#editors-stats .editor-stats-table').eq(0)
eq_(table.find('td').eq(0).text(), reviewers[0].email)
eq_(table.find('td').eq(1).text(), u'2')
eq_(table.find('td').eq(2).text(), reviewers[1].email)
eq_(table.find('td').eq(3).text(), u'1')
# Top Reviews this month.
table = doc('#editors-stats .editor-stats-table').eq(1)
eq_(table.find('td').eq(0).text(), reviewers[0].email)
eq_(table.find('td').eq(1).text(), u'2')
eq_(table.find('td').eq(2).text(), reviewers[1].email)
eq_(table.find('td').eq(3).text(), u'1')
class FlagsMixin(object):
def test_flag_packaged_app(self):
self.apps[0].update(is_packaged=True)
if self.uses_es():
self.reindex(Webapp)
eq_(self.apps[0].is_packaged, True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
td = pq(res.content)('#addon-queue tbody tr td.flags').eq(0)
flag = td('div.sprite-reviewer-packaged-app')
eq_(flag.length, 1)
def test_flag_premium_app(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM)
if self.uses_es():
self.reindex(Webapp)
eq_(self.apps[0].is_premium(), True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-premium')
eq_(flags.length, 1)
def test_flag_free_inapp_app(self):
self.apps[0].update(premium_type=mkt.ADDON_FREE_INAPP)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
eq_(tds('div.sprite-reviewer-premium.inapp.free').length, 1)
def test_flag_premium_inapp_app(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM_INAPP)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
eq_(tds('div.sprite-reviewer-premium.inapp').length, 1)
def test_flag_info(self):
self.apps[0].latest_version.update(has_info_request=True)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-info')
eq_(flags.length, 1)
def test_flag_comment(self):
self.apps[0].latest_version.update(has_editor_comment=True)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-editor')
eq_(flags.length, 1)
class XSSMixin(object):
def test_xss_in_queue(self):
a = self.apps[0]
a.name = '<script>alert("xss")</script>'
a.save()
if self.uses_es():
self.refresh(doctypes=('homescreen', 'webapp'))
res = self.client.get(self.url)
eq_(res.status_code, 200)
tbody = pq(res.content)('#addon-queue tbody').html()
assert '<script>' in tbody
assert '<script>' not in tbody
class TestAppQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
def setUp(self):
super(TestAppQueue, self).setUp()
yesterday = self.days_ago(1)
long_ago = self.days_ago(2)
self.apps = [app_factory(name='XXX',
status=mkt.STATUS_PENDING,
version_kw={'nomination': long_ago},
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='YYY',
status=mkt.STATUS_PENDING,
version_kw={'nomination': yesterday},
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='ZZZ')]
self.apps[0].update(created=self.days_ago(12))
self.apps[1].update(created=self.days_ago(11))
# Quick sanity check.
eq_(self.apps[0].latest_version.nomination, long_ago)
eq_(self.apps[1].latest_version.nomination, yesterday)
RereviewQueue.objects.create(addon=self.apps[2])
self.url = reverse('reviewers.apps.queue_pending')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestAppQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_queue_viewing_ping(self):
eq_(self.client.post(reverse('reviewers.queue_viewing')).status_code,
200)
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
apps = Webapp.objects.filter(
status=mkt.STATUS_PENDING).order_by('created')
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
]
check_links(expected, links, verify=False)
def test_action_buttons_pending(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Reject', 'reject'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_rejected(self):
# Check action buttons for a previously rejected app.
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
@mock.patch('mkt.versions.models.Version.is_privileged', True)
def test_action_buttons_privileged_cantreview(self):
self.apps[0].update(is_packaged=True)
self.apps[0].latest_version.files.update(status=mkt.STATUS_PENDING)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
@mock.patch('mkt.versions.models.Version.is_privileged', True)
def test_action_buttons_privileged_canreview(self):
self.login_as_senior_reviewer()
self.apps[0].update(is_packaged=True)
self.apps[0].latest_version.files.update(status=mkt.STATUS_PENDING)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_devices(self):
AddonDeviceType.objects.create(addon=self.apps[0], device_type=1)
AddonDeviceType.objects.create(addon=self.apps[0], device_type=2)
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(5)')
eq_(tds('ul li:not(.unavailable)').length, 2)
def test_payments(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM)
self.apps[1].update(premium_type=mkt.ADDON_FREE_INAPP)
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(6)')
eq_(tds.eq(0).text(),
unicode(mkt.ADDON_PREMIUM_TYPES[mkt.ADDON_PREMIUM]))
eq_(tds.eq(1).text(),
unicode(mkt.ADDON_PREMIUM_TYPES[mkt.ADDON_FREE_INAPP]))
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
if self.uses_es():
self.refresh(doctypes=('webapp', 'homescreen'))
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (2)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[4].text, u'Homescreens (0)')
def test_homescreen_count(self):
Tag(tag_text='homescreen').save_tag(self.apps[1])
self.apps[1].save()
if self.uses_es():
WebappIndexer.unindex(self.apps[1].id)
self.refresh(('homescreen', 'webapp'))
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (1)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[4].text, u'Homescreens (1)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (2)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
# self.apps[2] is not pending so doesn't show up either.
eq_([a.app.id for a in res.context['addons']], [self.apps[1].id])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (1)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (1)')
def test_incomplete_no_in_queue(self):
[app.update(status=mkt.STATUS_NULL) for app in self.apps]
if self.uses_es():
self.reindex(Webapp)
req = req_factory_factory(
self.url,
user=UserProfile.objects.get(email='editor@mozilla.com'))
doc = pq(queue_apps(req).content)
assert not doc('#addon-queue tbody tr').length
def test_waiting_time(self):
"""Check objects show queue objects' created."""
res = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(res.content)('td time')]
expected_waiting_times = [isotime(app.latest_version.nomination)
for app in self.apps[0:2]]
self.assertSetEqual(expected_waiting_times, waiting_times)
class TestAppQueueES(mkt.site.tests.ESTestCase, TestAppQueue):
def setUp(self):
super(TestAppQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestRereviewQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
def setUp(self):
super(TestRereviewQueue, self).setUp()
self.apps = [app_factory(name='XXX'),
app_factory(name='YYY'),
app_factory(name='ZZZ')]
RereviewQueue.objects.create(addon=self.apps[0]).update(
created=self.days_ago(5))
RereviewQueue.objects.create(addon=self.apps[1]).update(
created=self.days_ago(3))
RereviewQueue.objects.create(addon=self.apps[2]).update(
created=self.days_ago(1))
self.apps[0].update(created=self.days_ago(15))
self.apps[1].update(created=self.days_ago(13))
self.apps[2].update(created=self.days_ago(11))
if self.uses_es():
self.refresh(doctypes=('homescreen', 'webapp'))
self.url = reverse('reviewers.apps.queue_rereview')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestRereviewQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
apps = [rq.addon for rq in
RereviewQueue.objects.all().order_by('created')]
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
(unicode(apps[2].name), self.review_url(apps[2])),
]
check_links(expected, links, verify=False)
def test_waiting_time(self):
"""Check objects show queue objects' created."""
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [
isotime(app.rereviewqueue_set.all()[0].created)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
def test_action_buttons_public_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_public(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (3)')
eq_(links[2].text, u'Updates (0)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (3)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
self.assertSetEqual([a.app.id for a in res.context['addons']],
[a.id for a in self.apps[1:]])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (2)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (1)')
def test_addon_deleted(self):
app = self.apps[0]
app.delete()
eq_(RereviewQueue.objects.filter(addon=app).exists(), False)
class TestRereviewQueueES(mkt.site.tests.ESTestCase, TestRereviewQueue):
def setUp(self):
super(TestRereviewQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestUpdateQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
# Prevent update_cached_manifests at setUp() since it gets called and tries
# to access files when we add versions.
@mock.patch('mkt.webapps.tasks.update_cached_manifests', False)
def setUp(self):
super(TestUpdateQueue, self).setUp()
post_request_task._start_queuing_tasks()
app1 = app_factory(is_packaged=True, name='XXX',
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
app2 = app_factory(is_packaged=True, name='YYY',
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
version_factory(addon=app1, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
version_factory(addon=app2, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
post_request_task._send_tasks_and_stop_queuing()
self.apps = list(Webapp.objects.order_by('id'))
self.url = reverse('reviewers.apps.queue_updates')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestUpdateQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_template_links(self):
self.apps[0].versions.latest().update(nomination=self.days_ago(2))
self.apps[1].versions.latest().update(nomination=self.days_ago(1))
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
expected = [
(unicode(self.apps[0].name), self.review_url(self.apps[0])),
(unicode(self.apps[1].name), self.review_url(self.apps[1])),
]
check_links(expected, links, verify=False)
def test_action_buttons_public_senior_reviewer(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_PUBLIC)
self.login_as_senior_reviewer()
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_public(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_PUBLIC)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (2)')
def test_homescreen(self):
Tag(tag_text='homescreen').save_tag(self.apps[1])
self.apps[1].save()
if self.uses_es():
WebappIndexer.unindex(self.apps[1].id)
self.refresh(doctypes=('homescreen', 'webapp'))
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (1)')
eq_(links[3].text, u'Reviewing (0)')
eq_(links[4].text, u'Homescreens (1)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (2)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_([a.app.id for a in res.context['addons']],
[app.id for app in self.apps[1:]])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (1)')
eq_(links[3].text, u'Escalations (1)')
def test_order(self):
self.apps[0].update(created=self.days_ago(10))
self.apps[1].update(created=self.days_ago(5))
self.apps[0].versions.latest().update(nomination=self.days_ago(1))
self.apps[1].versions.latest().update(nomination=self.days_ago(4))
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
apps = list(res.context['addons'])
eq_(apps[0].app.id, self.apps[1].id)
eq_(apps[1].app.id, self.apps[0].id)
def test_only_updates_in_queue(self):
# Add new packaged app, which should only show up in the pending queue.
app = app_factory(is_packaged=True, name='ZZZ',
status=mkt.STATUS_PENDING,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING})
self.apps.append(app)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
apps = [a.app for a in res.context['addons']]
assert app not in apps, (
'Unexpected: Found a new packaged app in the updates queue.')
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (2)')
def test_approved_update_in_queue(self):
app = app_factory(is_packaged=True, name='YYY',
status=mkt.STATUS_APPROVED,
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
self.apps.append(app)
File.objects.filter(version__addon=app).update(status=app.status)
version_factory(addon=app, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
assert app.id in [a.app.id for a in res.context['addons']]
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (3)')
def test_update_queue_with_empty_nomination(self):
app = app_factory(is_packaged=True, name='YYY',
status=mkt.STATUS_NULL,
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': None})
self.apps.append(app)
first_version = app.latest_version
version_factory(addon=app, version='1.1', created=self.days_ago(1),
nomination=None,
file_kw={'status': mkt.STATUS_PENDING})
# Now that we have a version with nomination=None, reset app status.
app.update(status=mkt.STATUS_APPROVED)
File.objects.filter(version=first_version).update(status=app.status)
# Safeguard: we /really/ want to test with nomination=None.
eq_(app.latest_version.reload().nomination, None)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
assert app.id in [a.app.id for a in res.context['addons']]
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (3)')
def test_deleted_version_not_in_queue(self):
"""
This tests that an app with a prior pending version that got
deleted doesn't trigger the app to remain in the review queue.
"""
app = self.apps[0]
# File is PENDING and delete current version.
old_ver = app.versions.order_by('id')[0]
old_ver.files.latest().update(status=mkt.STATUS_PENDING)
old_ver.delete()
# "Approve" the app.
app.versions.latest().files.latest().update(status=mkt.STATUS_PUBLIC)
eq_(app.reload().status, mkt.STATUS_PUBLIC)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
# Verify that our app has 2 versions.
eq_(Version.with_deleted.filter(addon=app).count(), 2)
# Verify the apps in the context are what we expect.
doc = pq(res.content)
eq_(doc('.tabnav li a')[2].text, u'Updates (1)')
apps = [a.app.id for a in res.context['addons']]
ok_(app.id not in apps)
ok_(self.apps[1].id in apps)
def test_waiting_time(self):
"""Check objects show queue objects' created."""
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [isotime(app.latest_version.nomination)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
class TestUpdateQueueES(mkt.site.tests.ESTestCase, TestUpdateQueue):
def setUp(self):
super(TestUpdateQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.refresh(doctypes=('homescreen', 'webapp'))
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestEscalationQueue(AppReviewerTest, AccessMixin, FlagsMixin,
SearchMixin, XSSMixin):
def setUp(self):
super(TestEscalationQueue, self).setUp()
self.apps = [app_factory(name='XXX'),
app_factory(name='YYY'),
app_factory(name='ZZZ')]
EscalationQueue.objects.create(addon=self.apps[0]).update(
created=self.days_ago(5))
EscalationQueue.objects.create(addon=self.apps[1]).update(
created=self.days_ago(3))
EscalationQueue.objects.create(addon=self.apps[2]).update(
created=self.days_ago(1))
self.apps[0].update(created=self.days_ago(15))
self.apps[1].update(created=self.days_ago(13))
self.apps[2].update(created=self.days_ago(11))
self.login_as_senior_reviewer()
self.url = reverse('reviewers.apps.queue_escalated')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestEscalationQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_flag_blocked(self):
# Blocklisted apps should only be in the update queue, so this flag
# check is here rather than in FlagsMixin.
self.apps[0].update(status=mkt.STATUS_BLOCKED)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-blocked')
eq_(flags.length, 1)
def test_no_access_regular_reviewer(self):
self.login_as_editor()
res = self.client.get(self.url)
eq_(res.status_code, 403)
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
apps = [rq.addon for rq in
EscalationQueue.objects.all().order_by('addon__created')]
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
(unicode(apps[2].name), self.review_url(apps[2])),
]
check_links(expected, links, verify=False)
def test_waiting_time(self):
"""Check objects show queue objects' created."""
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [
isotime(app.escalationqueue_set.all()[0].created)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
def test_action_buttons_public(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Clear Escalation', 'clear_escalation'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Clear Escalation', 'clear_escalation'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (3)')
def test_addon_deleted(self):
app = self.apps[0]
app.delete()
eq_(EscalationQueue.objects.filter(addon=app).exists(), False)
class TestEscalationQueueES(mkt.site.tests.ESTestCase, TestEscalationQueue):
def setUp(self):
super(TestEscalationQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
class TestReviewTransaction(AttachmentManagementMixin,
mkt.site.tests.MockEsMixin,
mkt.site.tests.MockBrowserIdMixin,
test.TransactionTestCase,
TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestReviewTransaction, self).setUp()
mkt.site.tests.TestCase.grant_permission(
user_factory(email='editor'), 'Apps:Review')
self.mock_browser_id()
def get_app(self):
return Webapp.objects.get(id=337141)
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
@mock.patch('lib.crypto.packaged.sign_app')
def test_public_sign(self, sign_mock, json_mock, update_cached_manifests):
self.app = self.get_app()
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
with private_storage.open(
self.version.files.all()[0].file_path, 'w') as f:
f.write('.')
public_storage.delete(self.version.files.all()[0].signed_file_path)
self.app.update(status=mkt.STATUS_PENDING, is_packaged=True,
_current_version=None, _signal=False)
eq_(self.get_app().status, mkt.STATUS_PENDING)
update_cached_manifests.reset_mock()
sign_mock.return_value = None # Didn't fail.
json_mock.return_value = {'name': 'Something'}
self.login('editor@mozilla.com')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
resp = self.client.post(
reverse('reviewers.apps.review', args=[self.app.app_slug]), data)
eq_(resp.status_code, 302)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(update_cached_manifests.delay.call_count, 1)
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
@mock.patch('lib.crypto.packaged.sign')
def test_public_sign_failure(self, sign_mock, json_mock,
update_cached_manifests):
self.app = self.get_app()
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING, is_packaged=True,
_current_version=None, _signal=False)
eq_(self.get_app().status, mkt.STATUS_PENDING)
sign_mock.side_effect = packaged.SigningError
json_mock.return_value = {'name': 'Something'}
self.login('editor@mozilla.com')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
resp = self.client.post(
reverse('reviewers.apps.review', args=[self.app.app_slug]), data)
eq_(resp.status_code, 302)
eq_(self.get_app().status, mkt.STATUS_PENDING)
eq_(update_cached_manifests.delay.call_count, 0)
class TestReviewMixin(object):
# E.g commreply+12e0caffc4ca4174a6f62300c0ff180a@marketplace.firefox.com .
COMM_REPLY_RE = r'^commreply\+[a-f0-9]+\@marketplace\.firefox\.com$'
def post(self, data, queue='pending'):
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_%s' % queue))
def _check_email(self, msg, subject, to=None):
if to:
eq_(msg.to, to)
else:
eq_(msg.to, list(self.app.authors.values_list('email', flat=True)))
assert re.match(self.COMM_REPLY_RE, msg.extra_headers['Reply-To'])
eq_(msg.cc, [])
eq_(msg.from_email, settings.MKT_REVIEWERS_EMAIL)
if subject:
eq_(msg.subject, '%s: %s' % (subject, self.app.name))
def _get_mail(self, email):
return filter(lambda x: x.to[0].startswith(email), mail.outbox)[0]
def _check_email_dev_and_contact(self, subject, outbox_len=2):
"""
Helper for checking developer and Mozilla contact get emailed.
"""
eq_(len(mail.outbox), outbox_len)
# Developer.
self._check_email(self._get_mail('steamcube'), subject)
# Mozilla contact.
self._check_email(self._get_mail('contact'), subject,
to=[self.mozilla_contact])
def _check_thread(self):
thread = self.app.threads
eq_(thread.count(), 1)
thread = thread.get()
perms = ('developer', 'reviewer', 'staff')
for key in perms:
assert getattr(thread, 'read_permission_%s' % key)
def _check_email_body(self, msg=None):
if not msg:
msg = mail.outbox[0]
body = msg.message().as_string()
url = self.app.get_url_path()
assert url in body, 'Could not find apps detail URL in %s' % msg
def _check_log(self, action):
assert AppLog.objects.filter(
addon=self.app, activity_log__action=action.id).exists(), (
"Didn't find `%s` action in logs." % action.short)
def _check_score(self, reviewed_type):
scores = ReviewerScore.objects.all()
assert len(scores) > 0
eq_(scores[0].score, mkt.REVIEWED_SCORES[reviewed_type])
eq_(scores[0].note_key, reviewed_type)
class TestReviewApp(SetupFilesMixin, AppReviewerTest, TestReviewMixin,
AccessMixin, AttachmentManagementMixin,
TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestReviewApp, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
make_rated(self.app)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact)
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
self.file = self.version.all_files[0]
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.setup_files()
def get_app(self):
return Webapp.objects.get(id=337141)
def test_review_viewing_ping(self):
eq_(self.client.post(reverse('reviewers.review_viewing')).status_code,
200)
@mock.patch('mkt.webapps.models.Webapp.in_rereview_queue')
def test_rereview(self, is_rereview_queue):
is_rereview_queue.return_value = True
content = pq(self.client.get(self.url).content)
assert content('#queue-rereview').length
@mock.patch('mkt.webapps.models.Webapp.in_escalation_queue')
def test_escalated(self, in_escalation_queue):
in_escalation_queue.return_value = True
content = pq(self.client.get(self.url).content)
assert content('#queue-escalation').length
def test_cannot_review_my_app(self):
with self.settings(ALLOW_SELF_REVIEWS=False):
self.app.addonuser_set.create(
user=UserProfile.objects.get(email='editor@mozilla.com'))
res = self.client.head(self.url)
self.assert3xx(res, reverse('reviewers.home'))
res = self.client.post(self.url)
self.assert3xx(res, reverse('reviewers.home'))
def test_cannot_review_blocklisted_app(self):
self.app.update(status=mkt.STATUS_BLOCKED)
res = self.client.get(self.url)
self.assert3xx(res, reverse('reviewers.home'))
res = self.client.post(self.url)
self.assert3xx(res, reverse('reviewers.home'))
def test_review_no_latest_version(self):
self.app.versions.all().delete()
self.app.reload()
eq_(self.app.latest_version, None)
eq_(self.app.current_version, None)
response = self.client.get(self.url)
eq_(response.status_code, 200)
doc = pq(response.content)
assert not doc('input[name=action][value=info]').length
assert not doc('input[name=action][value=comment]').length
assert not doc('input[name=action][value=public]').length
assert not doc('input[name=action][value=reject]').length
# Also try with a packaged app.
self.app.update(is_packaged=True)
response = self.client.get(self.url)
eq_(response.status_code, 200)
def test_sr_can_review_blocklisted_app(self):
self.app.update(status=mkt.STATUS_BLOCKED)
self.login_as_senior_reviewer()
eq_(self.client.get(self.url).status_code, 200)
data = {'action': 'public', 'comments': 'yo'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_pending'))
def test_pending_to_reject_w_device_overrides(self):
# This shouldn't be possible unless there's form hacking.
AddonDeviceType.objects.create(addon=self.app,
device_type=mkt.DEVICE_DESKTOP.id)
AddonDeviceType.objects.create(addon=self.app,
device_type=mkt.DEVICE_TABLET.id)
eq_(self.app.publish_type, mkt.PUBLISH_IMMEDIATE)
data = {'action': 'reject', 'comments': 'something',
'device_override': [mkt.DEVICE_DESKTOP.id]}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_REJECTED)
eq_(set([o.id for o in app.device_types]),
set([mkt.DEVICE_DESKTOP.id, mkt.DEVICE_TABLET.id]))
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
def test_pending_to_public_w_requirements_overrides(self):
data = {'action': 'public', 'comments': 'something',
'has_packaged_apps': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert not self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert app.latest_version.features.has_packaged_apps
# Since features have been changed by the reviewer, the app should not
# be immediately published.
eq_(app.publish_type, mkt.PUBLISH_PRIVATE)
eq_(app.status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.REVIEW_FEATURES_OVERRIDE)
# A reviewer changing features shouldn't generate a re-review.
eq_(RereviewQueue.objects.count(), 0)
def test_pending_to_public_w_requirements_removed(self):
self.app.latest_version.features.update(has_packaged_apps=True)
data = {'action': 'public', 'comments': 'something',
'has_packaged_apps': False}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert not app.latest_version.features.has_packaged_apps
# Since features have been changed by the reviewer, the app should not
# be immediately published.
eq_(app.publish_type, mkt.PUBLISH_PRIVATE)
eq_(app.status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.REVIEW_FEATURES_OVERRIDE)
# A reviewer changing features shouldn't generate a re-review.
eq_(RereviewQueue.objects.count(), 0)
def test_pending_to_reject_w_requirements_overrides(self):
# Rejecting an app doesn't let you override features requirements.
data = {'action': 'reject', 'comments': 'something',
'has_packaged_apps': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert not self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert not app.latest_version.features.has_packaged_apps
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_REJECTED)
def test_pending_to_public_w_requirements_overrides_nothing_changed(self):
self.version.features.update(has_packaged_apps=True)
data = {'action': 'public', 'comments': 'something',
'has_packaged_apps': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert app.latest_version.features.has_packaged_apps
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_PUBLIC)
action_id = mkt.LOG.REVIEW_FEATURES_OVERRIDE.id
assert not AppLog.objects.filter(
addon=self.app, activity_log__action=action_id).exists()
@mock.patch('mkt.reviewers.views.messages.success', new=mock.Mock)
def test_incomplete_cant_approve(self):
self.app.update(status=mkt.STATUS_NULL)
self.app.latest_version.files.update(status=mkt.STATUS_NULL)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
# Still incomplete.
eq_(self.get_app().status, mkt.STATUS_NULL)
def test_notification_email_translation(self):
# https://bugzilla.mozilla.org/show_bug.cgi?id=1127790
raise SkipTest
"""Test that the app name is translated with the app's default_locale
and not the reviewer's when we are sending notification emails."""
original_name = unicode(self.app.name)
fr_translation = u'Mais allô quoi!'
es_translation = u'¿Dónde está la biblioteca?'
self.app.name = {
'fr': fr_translation,
'es': es_translation,
}
self.app.default_locale = 'fr'
self.app.save()
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.client.post(self.url, data, HTTP_ACCEPT_LANGUAGE='es')
eq_(translation.get_language(), 'es')
eq_(len(mail.outbox), 2)
msg = mail.outbox[0]
assert original_name not in msg.subject
assert es_translation not in msg.subject
assert fr_translation in msg.subject
assert original_name not in msg.body
assert es_translation not in msg.body
assert fr_translation in msg.body
@mock.patch('lib.crypto.packaged.sign')
def test_require_sig_for_public(self, sign):
sign.side_effect = packaged.SigningError
self.get_app().update(is_packaged=True)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.client.post(self.url, data)
eq_(self.get_app().status, mkt.STATUS_PENDING)
def _test_pending_to_public(self):
self.app.update(mozilla_contact='')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version.files.all()[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], ('Approved'))
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
def test_pending_to_public(self):
self._test_pending_to_public()
@mock.patch('mkt.reviewers.views.messages.success')
def test_pending_to_escalation(self, messages):
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(EscalationQueue.objects.count(), 1)
self._check_log(mkt.LOG.ESCALATE_MANUAL)
# Test 2 emails: 1 to dev, 1 to admin.
eq_(len(mail.outbox), 2)
self._check_email(self._get_mail('steamcube'), 'Escalated')
self._check_email(
self._get_mail('snrreviewer'), 'Escalated',
to=[self.snr_reviewer_user.email])
eq_(messages.call_args_list[0][0][1], 'Review successfully processed.')
def test_pending_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_DISABLED)
eq_(app.latest_version.files.all()[0].status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
self._check_email_dev_and_contact('Banned')
def test_pending_to_disable(self):
# Only senior reviewers can ban apps.
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data)
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(len(mail.outbox), 0)
def _test_escalation_to_public(self):
EscalationQueue.objects.create(addon=self.app)
eq_(self.app.status, mkt.STATUS_PENDING)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version.files.all()[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
def test_escalation_to_public(self):
self._test_escalation_to_public()
def test_escalation_to_reject(self):
EscalationQueue.objects.create(addon=self.app)
eq_(self.app.status, mkt.STATUS_PENDING)
files = list(self.version.files.values_list('id', flat=True))
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(File.objects.filter(id__in=files)[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
def test_escalation_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_DISABLED)
eq_(app.latest_version.files.all()[0].status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Banned')
def test_escalation_to_disable(self):
EscalationQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data, queue='escalated')
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(EscalationQueue.objects.count(), 1)
eq_(len(mail.outbox), 0)
def test_clear_escalation(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
EscalationQueue.objects.create(addon=self.app)
data = {'action': 'clear_escalation', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
eq_(EscalationQueue.objects.count(), 0)
self._check_log(mkt.LOG.ESCALATION_CLEARED)
# Ensure we don't send email to developer on clearing escalations.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
def test_rereview_to_reject(self):
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(self.get_app().status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
eq_(RereviewQueue.objects.count(), 0)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_rereview_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(self.get_app().status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
eq_(RereviewQueue.objects.filter(addon=self.app).count(), 0)
self._check_email_dev_and_contact('Banned')
def test_rereview_to_disable(self):
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data, queue='rereview')
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(RereviewQueue.objects.filter(addon=self.app).count(), 1)
eq_(len(mail.outbox), 0)
def test_manual_rereview(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'manual_rereview', 'comments': 'man dem'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
# The app status shouldn't change.
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(RereviewQueue.objects.count(), 1)
self._check_log(mkt.LOG.REREVIEW_MANUAL)
# Ensure we don't send email to developer on manual rereviews.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
def test_clear_rereview(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'clear_rereview', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(RereviewQueue.objects.count(), 0)
self._check_log(mkt.LOG.REREVIEW_CLEARED)
# Ensure we don't send emails to the developer on clearing re-reviews.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_clear_rereview_unlisted(self):
self.app.update(status=mkt.STATUS_UNLISTED)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'clear_rereview', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(RereviewQueue.objects.count(), 0)
self._check_log(mkt.LOG.REREVIEW_CLEARED)
# Ensure we don't send emails to the developer on clearing re-reviews.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_rereview_to_escalation(self):
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(EscalationQueue.objects.count(), 1)
self._check_log(mkt.LOG.ESCALATE_MANUAL)
# Test 2 emails: 1 to dev, 1 to admin.
eq_(len(mail.outbox), 2)
self._check_email(self._get_mail('steamcube'), 'Escalated')
self._check_email(
self._get_mail('snrreviewer'), 'Escalated',
to=[self.snr_reviewer_user.email])
def test_more_information(self):
# Test the same for all queues.
data = {'action': 'info', 'comments': 'Knead moor in faux'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().status, mkt.STATUS_PENDING)
self._check_log(mkt.LOG.REQUEST_INFORMATION)
vqs = self.get_app().versions.all()
eq_(vqs.count(), 1)
eq_(vqs.filter(has_info_request=True).count(), 1)
self._check_email_dev_and_contact('Reviewer comment')
def test_multi_cc_email(self):
# Test multiple mozilla_contact emails via more information.
contacts = [user_factory(email=u'á').email,
user_factory(email=u'ç').email]
self.mozilla_contact = ', '.join(contacts)
self.app.update(mozilla_contact=self.mozilla_contact)
data = {'action': 'info', 'comments': 'Knead moor in faux'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(len(mail.outbox), 3)
subject = 'Reviewer comment'
self._check_email(self._get_mail('steamcube'), subject)
self._check_email(self._get_mail(contacts[0]), subject,
to=[contacts[0]])
self._check_email(self._get_mail(contacts[1]), subject,
to=[contacts[1]])
def test_comment(self):
# Test the same for all queues.
data = {'action': 'comment', 'comments': 'mmm, nice app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_log(mkt.LOG.COMMENT_VERSION)
def test_receipt_no_node(self):
res = self.client.get(self.url)
eq_(len(pq(res.content)('#receipt-check-result')), 0)
def test_receipt_has_node(self):
self.get_app().update(premium_type=mkt.ADDON_PREMIUM)
res = self.client.get(self.url)
eq_(len(pq(res.content)('.reviewers-desktop #receipt-check-result')),
1)
eq_(len(pq(res.content)('.reviewers-mobile #receipt-check-result')),
1)
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json(self, mock_get):
m = mock.Mock()
m.content = 'the manifest contents <script>'
m.headers = CaseInsensitiveDict(
{'content-type': 'application/x-web-app-manifest+json <script>'})
mock_get.return_value = m
expected = {
'content': 'the manifest contents <script>',
'headers': {'content-type':
'application/x-web-app-manifest+json <script>'},
'success': True,
'permissions': {}
}
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), expected)
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_unicode(self, mock_get):
m = mock.Mock()
m.content = u'كك some foreign ish'
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), {'content': u'كك some foreign ish',
'headers': {}, 'success': True,
'permissions': {}})
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_encoding(self, mock_get):
m = mock.Mock()
m.content = open(self.manifest_path('non-utf8.webapp')).read()
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
data = json.loads(r.content)
assert u'"name": "W2MO\u017d"' in data['content']
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_encoding_empty(self, mock_get):
m = mock.Mock()
m.content = ''
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), {'content': u'', 'headers': {},
'success': True, 'permissions': {}})
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_traceback_in_response(self, mock_get):
m = mock.Mock()
m.content = {'name': 'Some name'}
m.headers = CaseInsensitiveDict({})
mock_get.side_effect = requests.exceptions.SSLError
mock_get.return_value = m
# We should not 500 on a traceback.
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
data = json.loads(r.content)
assert data['content'], 'There should be a content with the traceback'
eq_(data['headers'], {})
@mock.patch('mkt.reviewers.views.json.dumps')
def test_manifest_json_packaged(self, mock_):
# Test that when the app is packaged, _mini_manifest is called.
mock_.return_value = '{}'
self.get_app().update(is_packaged=True)
res = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(res.status_code, 200)
assert mock_.called
@mock.patch('mkt.reviewers.views._get_manifest_json')
def test_manifest_json_perms(self, mock_):
mock_.return_value = {
'permissions': {
"foo": {"description": "foo"},
"camera": {"description": "<script>"}
}
}
self.get_app().update(is_packaged=True)
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content)['permissions'],
{'foo': {'description': 'foo', 'type': 'web'},
'camera': {'description': '<script>', 'type': 'priv'}})
def test_abuse(self):
AbuseReport.objects.create(addon=self.app, message='!@#$')
res = self.client.get(self.url)
doc = pq(res.content)
dd = doc('.reviewers-desktop #summary dd.abuse-reports')
eq_(dd.text(), u'1')
eq_(dd.find('a').attr('href'), reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug]))
dd = doc('.reviewers-mobile #summary dd.abuse-reports')
eq_(dd.text(), u'1')
eq_(dd.find('a').attr('href'), reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug]))
def _attachment_form_data(self, num=1, action='comment'):
data = {'action': action,
'comments': 'mmm, nice app'}
data.update(self._attachment_management_form(num=num))
data.update(self._attachments(num))
return data
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
@mock.patch('mkt.site.storage_utils.LocalFileStorage.save')
def test_no_attachments(self, save_mock):
""" Test addition of no attachment """
data = self._attachment_form_data(num=0, action='public')
data.update(self._testedon_management_form())
self.post(data)
eq_(save_mock.called, False, save_mock.call_args_list)
def test_idn_app_domain(self):
response = self.client.get(self.url)
assert 'IDN domain!' not in response.content
self.get_app().update(app_domain=u'http://www.allïzom.org')
response = self.client.get(self.url)
assert 'IDN domain!' in response.content
def test_xss_domain(self):
# It shouldn't be possible to have this in app domain, it will never
# validate, but better safe than sorry.
self.get_app().update(app_domain=u'<script>alert(42)</script>')
response = self.client.get(self.url)
assert '<script>alert(42)</script>' not in response.content
assert '<script>alert(42)</script>' in response.content
def test_priority_flag_cleared_for_public(self):
self.get_app().update(priority_review=True)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().priority_review, False)
def test_priority_flag_uncleared_for_reject(self):
self.get_app().update(priority_review=True)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().priority_review, True)
def test_is_showcase_checkbox(self):
res = self.client.get(self.url)
eq_(pq(res.content)('#id_is_showcase:checked').length, 0)
app = self.get_app()
Tag(tag_text=SHOWCASE_TAG).save_tag(app)
res = self.client.get(self.url)
eq_(pq(res.content)('#id_is_showcase:checked').length, 1)
def test_is_showcase_on(self):
# Note: Using action=comment b/c it does less and keeps test faster.
data = {'action': 'comment', 'comments': 'blah', 'is_showcase': 'on'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
tags = self.get_app().tags.values_list('tag_text', flat=True)
assert SHOWCASE_TAG in tags
# Check email is sent to curation board.
msg = self._get_mail('appcurationboard')
eq_(msg.to, [settings.APP_CURATION_BOARD_EMAIL])
eq_(msg.subject,
u'App [%s] nominated to be featured' % self.get_app().name)
def test_is_showcase_off(self):
# Clearing contact so we don't get a superflous email below.
self.app.update(mozilla_contact='')
# Note: Using action=comment b/c it does less and keeps test faster.
# Note: `is_showcase` isn't passed b/c checkboxes.
data = {'action': 'comment', 'comments': 'blah'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
tags = self.get_app().tags.values_list('tag_text', flat=True)
assert SHOWCASE_TAG not in tags
# Check no email is sent.
eq_(len(mail.outbox), 0)
def test_versions_history_pagination(self):
self.app.update(is_packaged=True)
version_factory(addon=self.app, version='2.0')
version_factory(addon=self.app, version='3.0')
# Mock paginate to paginate with only 2 versions to limit the
# number of versions this test has to create.
with mock.patch('mkt.reviewers.views.paginate',
lambda req, objs, limit: paginate(req, objs, 2)):
content = pq(self.client.get(self.url).content)
eq_(len(content('#review-files tr.listing-body')), 2)
eq_(len(content('#review-files-paginate a[rel=next]')), 1)
eq_(len(content('#review-files-paginate a[rel=prev]')), 0)
link = content('#review-files-paginate a[rel=next]')[0].attrib['href']
eq_(link, '%s?page=2#history' % self.url)
# Look at page 2.
with mock.patch('mkt.reviewers.views.paginate',
lambda req, objs, limit: paginate(req, objs, 2)):
content = pq(self.client.get(link).content)
eq_(len(content('#review-files tr.listing-body')), 1)
eq_(len(content('#review-files-paginate a[rel=next]')), 0)
eq_(len(content('#review-files-paginate a[rel=prev]')), 1)
eq_(content('#review-files-paginate a[rel=prev]')[0].attrib['href'],
'%s?page=1#history' % self.url)
class TestCannedResponses(AppReviewerTest):
def setUp(self):
super(TestCannedResponses, self).setUp()
self.login_as_editor()
self.app = app_factory(name='XXX', status=mkt.STATUS_PENDING)
self.cr = CannedResponse.objects.create(
name=u'app reason', response=u'app reason body',
sort_group=u'public')
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def test_ok(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
form = r.context['form']
choices = form.fields['canned_response'].choices[1][1]
# choices is grouped by the sort_group, where choices[0] is the
# default "Choose a response..." option.
# Within that, it's paired by [group, [[response, name],...]].
# So above, choices[1][1] gets the first real group's list of
# responses.
eq_(len(choices), 1)
assert self.cr.response in choices[0]
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApproveHostedApp(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin, TestedonManagementMixin):
"""
A separate test class for apps going to an approved state. All other state
transitions are tested above.
We're doing this to make the mocks easier to handle.
"""
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApproveHostedApp, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.file.update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact,
_current_version=None)
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Web App Review" successfully processed (+60 points, 60 total).')
def test_pending_to_public(self, update_name, update_locales,
update_cached_manifests,
index_webapps, messages):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_message(messages)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
eq_(update_name.call_count, 0) # Not a packaged app.
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
# App is not packaged, no need to call update_cached_manifests.
eq_(update_cached_manifests.delay.call_count, 0)
def test_pending_to_hidden(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages):
self.get_app().update(publish_type=mkt.PUBLISH_HIDDEN)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
self._check_message(messages)
eq_(update_name.call_count, 0) # Not a packaged app.
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
# App is not packaged, no need to call update_cached_manifests.
eq_(update_cached_manifests.delay.call_count, 0)
def test_pending_to_approved(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages):
self.get_app().update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
# File status is PUBLIC since it is the only version.
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_message(messages)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
# The app is not private but can still be installed by team members,
# so we should call those:
eq_(update_name.call_count, 0) # Not a packaged app.
eq_(update_locales.call_count, 1)
# App is not packaged, no need to call update_cached_manifests.
eq_(update_cached_manifests.delay.call_count, 0)
# App is private so we don't send this yet.
eq_(index_webapps.delay.call_count, 1)
def test_pending_to_reject(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages):
index_webapps.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'suxor'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(index_webapps.delay.call_count, 1)
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(self.file.reload().status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_message(messages)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
eq_(update_name.call_count, 0) # Not a packaged app.
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
@mock.patch('lib.crypto.packaged.sign')
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApprovePackagedApp(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin,
TestedonManagementMixin):
"""
A separate test class for packaged apps going to an approved state.
We're doing this to make the mocks easier to handle.
"""
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApprovePackagedApp, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.file.update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact,
_current_version=None, is_packaged=True)
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Packaged App Review" successfully processed '
'(+60 points, 60 total).')
def test_pending_to_public(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_hidden(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
self.get_app().update(publish_type=mkt.PUBLISH_HIDDEN)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_approved(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
self.get_app().update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_rejected(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(self.file.reload().status, mkt.STATUS_REJECTED)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_pending_to_approved_app_private_prior_version_rejected(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
"""
Test that everything works out ok when v1.0 was rejected and developer
submitted v1.1 that is then approved. This should still be considered a
packaged review (not an update) and set the approved version to PUBLIC
since the proir verison is DISABLED. See bug 1075042.
"""
self.app.update(status=mkt.STATUS_REJECTED,
publish_type=mkt.PUBLISH_PRIVATE)
self.file.update(status=mkt.STATUS_DISABLED)
self.new_version = version_factory(
addon=self.app, version='1.1',
file_kw={'status': mkt.STATUS_PENDING})
index_webapps.delay.reset_mock()
update_cached_manifests.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(self.app.current_version, None)
eq_(self.app.latest_version, self.new_version)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(app.latest_version, self.new_version)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
@mock.patch('lib.crypto.packaged.sign')
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApprovePackagedVersions(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin,
TestedonManagementMixin):
"""
A separate test class for packaged apps with a 2nd version going to an
approved state.
We're doing this to make the mocks easier to handle.
"""
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApprovePackagedVersions, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.app.update(status=mkt.STATUS_PUBLIC,
mozilla_contact=self.mozilla_contact,
is_packaged=True)
self.new_version = version_factory(
addon=self.app, version='2.0',
file_kw={'status': mkt.STATUS_PENDING})
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Updated Packaged App Review" successfully processed '
'(+40 points, 40 total).')
def test_version_pending_to_public(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved(self, update_name, update_locales,
update_cached_manifests,
index_webapps, messages,
sign_mock):
self.app.update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_public_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED,
publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_public_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED,
publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_rejected_app_public(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_PUBLIC)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_version_pending_to_rejected_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_version_pending_to_rejected_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
class TestReviewLog(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewLog, self).setUp()
# Note: if `created` is not specified, `app_factory` uses a randomly
# generated timestamp.
self.apps = [app_factory(name='XXX', created=days_ago(3),
status=mkt.STATUS_PENDING),
app_factory(name='YYY', created=days_ago(2),
status=mkt.STATUS_PENDING)]
self.url = reverse('reviewers.apps.logs')
patcher = mock.patch.object(settings, 'TASK_USER_ID',
self.admin_user.id)
patcher.start()
self.addCleanup(patcher.stop)
def get_user(self):
return self.reviewer_user
def make_approvals(self):
d = 1
for app in self.apps:
days_ago = self.days_ago(d)
mkt.log(mkt.LOG.REJECT_VERSION, app, app.latest_version,
user=self.get_user(), details={'comments': 'youwin'},
created=days_ago)
# Throw in a few tasks logs that shouldn't get queried.
mkt.log(mkt.LOG.REREVIEW_MANIFEST_CHANGE, app, app.latest_version,
user=self.admin_user, details={'comments': 'foo'},
created=days_ago)
d += 1
def make_an_approval(self, action, comment='youwin', user=None, app=None):
if not user:
user = self.get_user()
if not app:
app = self.apps[0]
mkt.log(action, app, app.latest_version, user=user,
details={'comments': comment})
def test_basic(self):
self.make_approvals()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
assert doc('#log-filter button'), 'No filters.'
# Should have 2 showing.
rows = doc('tbody tr')
logs = rows.filter(':not(.hide)')
eq_(logs.length, 2)
# Ensure that the app links are valid.
eq_(logs.find('.name .app-link').eq(0).attr('href'),
self.apps[0].get_url_path())
eq_(logs.find('.name .app-link').eq(1).attr('href'),
self.apps[1].get_url_path())
eq_(rows.filter('.hide').eq(0).text(), 'youwin')
def test_search_app_soft_deleted(self):
self.make_approvals()
self.apps[0].update(status=mkt.STATUS_DELETED)
res = self.client.get(self.url)
eq_(res.status_code, 200)
doc = pq(res.content)
all_reviews = [d.attrib.get('data-addonid')
for d in doc('#log-listing tbody tr')]
assert str(self.apps[0].pk) in all_reviews, (
'Soft deleted review did not show up in listing')
def test_xss(self):
a = self.apps[0]
a.name = '<script>alert("xss")</script>'
a.save()
mkt.log(mkt.LOG.REJECT_VERSION, a, a.latest_version,
user=self.get_user(), details={'comments': 'xss!'})
r = self.client.get(self.url)
eq_(r.status_code, 200)
inner_html = pq(r.content)('#log-listing tbody td').eq(1).html()
assert '<script>' in inner_html
assert '<script>' not in inner_html
def test_end_filter(self):
"""
Let's use today as an end-day filter and make sure we see stuff if we
filter.
"""
self.make_approvals()
# Make sure we show the stuff we just made.
date = time.strftime('%Y-%m-%d')
r = self.client.get(self.url, dict(end=date))
eq_(r.status_code, 200)
doc = pq(r.content)('#log-listing tbody')
eq_(doc('tr:not(.hide)').length, 2)
eq_(doc('tr.hide').eq(0).text(), 'youwin')
def test_end_filter_wrong(self):
"""
Let's use today as an end-day filter and make sure we see stuff if we
filter.
"""
self.make_approvals()
r = self.client.get(self.url, dict(end='wrong!'))
# If this is broken, we'll get a traceback.
eq_(r.status_code, 200)
eq_(pq(r.content)('#log-listing tr:not(.hide)').length, 3)
def test_search_comment_exists(self):
"""Search by comment."""
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, comment='hello')
r = self.client.get(self.url, dict(search='hello'))
eq_(r.status_code, 200)
eq_(pq(r.content)('#log-listing tbody tr.hide').eq(0).text(), 'hello')
def test_search_comment_doesnt_exist(self):
"""Search by comment, with no results."""
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, comment='hello')
r = self.client.get(self.url, dict(search='bye'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
def test_search_author_exists(self):
"""Search by author."""
self.make_approvals()
user = UserProfile.objects.get(email='regular@mozilla.com')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, user=user, comment='hi')
r = self.client.get(self.url, dict(search='regular'))
eq_(r.status_code, 200)
rows = pq(r.content)('#log-listing tbody tr')
eq_(rows.filter(':not(.hide)').length, 1)
eq_(rows.filter('.hide').eq(0).text(), 'hi')
def test_search_author_doesnt_exist(self):
"""Search by author, with no results."""
self.make_approvals()
user = UserProfile.objects.get(email='editor@mozilla.com')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, user=user)
r = self.client.get(self.url, dict(search='wrong'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
def test_search_addon_exists(self):
"""Search by add-on name."""
self.make_approvals()
app = self.apps[0]
r = self.client.get(self.url, dict(search=app.name))
eq_(r.status_code, 200)
tr = pq(r.content)('#log-listing tr[data-addonid="%s"]' % app.id)
eq_(tr.length, 1)
eq_(tr.siblings('.comments').text(), 'youwin')
def test_search_addon_by_slug_exists(self):
"""Search by app slug."""
app = self.apps[0]
app.app_slug = 'a-fox-was-sly'
app.save()
self.make_approvals()
r = self.client.get(self.url, dict(search='fox'))
eq_(r.status_code, 200)
tr = pq(r.content)('#log-listing tr[data-addonid="%s"]' % app.id)
eq_(tr.length, 1)
eq_(tr.siblings('.comments').text(), 'youwin')
def test_search_addon_doesnt_exist(self):
"""Search by add-on name, with no results."""
self.make_approvals()
r = self.client.get(self.url, dict(search='zzz'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
@mock.patch('mkt.developers.models.ActivityLog.arguments', new=mock.Mock)
def test_addon_missing(self):
self.make_approvals()
r = self.client.get(self.url)
eq_(pq(r.content)('#log-listing tr td').eq(1).text(),
'App has been deleted.')
def test_request_info_logs(self):
self.make_an_approval(mkt.LOG.REQUEST_INFORMATION)
r = self.client.get(self.url)
eq_(pq(r.content)('#log-listing tr td a').eq(1).text(),
'More information requested')
def test_escalate_logs(self):
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL)
r = self.client.get(self.url)
eq_(pq(r.content)('#log-listing tr td a').eq(1).text(),
'Reviewer escalation')
def test_no_double_encode(self):
version = self.apps[0].latest_version
version.update(version='<foo>')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL)
r = self.client.get(self.url)
assert '<foo>' in pq(r.content)('#log-listing tr td').eq(1).text(), (
'Double-encoded string was found in reviewer log.')
class TestMotd(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestMotd, self).setUp()
self.url = reverse('reviewers.apps.motd')
self.key = u'mkt_reviewers_motd'
set_config(self.key, u'original value')
def test_perms_not_editor(self):
self.client.logout()
req = self.client.get(self.url, follow=True)
self.assert3xx(req, '%s?to=%s' % (reverse('users.login'), self.url))
self.client.login('regular@mozilla.com')
eq_(self.client.get(self.url).status_code, 403)
def test_perms_not_motd(self):
# Any type of reviewer can see the MOTD.
self.login_as_editor()
req = self.client.get(self.url)
eq_(req.status_code, 200)
eq_(req.context['form'], None)
# No redirect means it didn't save.
eq_(self.client.post(self.url, dict(motd='motd')).status_code, 200)
eq_(get_config(self.key), u'original value')
def test_motd_change(self):
# Only users in the MOTD group can POST.
user = self.reviewer_user
self.grant_permission(user, 'AppReviewerMOTD:Edit')
self.login_as_editor()
# Get is a 200 with a form.
req = self.client.get(self.url)
eq_(req.status_code, 200)
eq_(req.context['form'].initial['motd'], u'original value')
# Empty post throws an error.
req = self.client.post(self.url, dict(motd=''))
eq_(req.status_code, 200) # Didn't redirect after save.
eq_(pq(req.content)('#editor-motd .errorlist').text(),
'This field is required.')
# A real post now.
req = self.client.post(self.url, dict(motd='new motd'))
self.assert3xx(req, self.url)
eq_(get_config(self.key), u'new motd')
class TestReviewAppComm(AppReviewerTest, AttachmentManagementMixin,
TestReviewMixin, TestedonManagementMixin):
"""
Integration test that notes are created and that emails are
sent to the right groups of people.
"""
def setUp(self):
super(TestReviewAppComm, self).setUp()
self.app = app_factory(rated=True, status=mkt.STATUS_PENDING,
mozilla_contact='contact@mozilla.com')
self.app.addonuser_set.create(user=user_factory(email='steamcube'))
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.mozilla_contact = 'contact@mozilla.com'
def _post(self, data, queue='pending'):
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_%s' % queue))
def _get_note(self):
eq_(self.app.threads.count(), 1)
thread = self.app.threads.all()[0]
eq_(thread.notes.count(), 1)
return thread.notes.all()[0]
def test_email_cc(self):
"""
Emailed cc'ed people (those who have posted on the thread).
"""
poster = user_factory()
thread, note = create_comm_note(
self.app, self.app.latest_version, poster, 'lgtm')
data = {'action': 'public', 'comments': 'gud jerb'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test emails.
self._check_email_dev_and_contact(None, outbox_len=5)
# Some person who joined the thread.
self._check_email(
self._get_mail(poster.email), 'Approved', to=[poster.email])
def test_approve(self):
"""
On approval, send an email to [developer, mozilla contact].
"""
data = {'action': 'public', 'comments': 'gud jerb'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.APPROVAL)
eq_(note.body, 'gud jerb')
# Test emails.
self._check_email_dev_and_contact(None)
def test_reject(self):
"""
On rejection, send an email to [developer, mozilla contact].
"""
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, 'rubesh')
# Test emails.
self._check_email_dev_and_contact(None)
def test_info(self):
"""
On info request, send an email to [developer, mozilla contact].
"""
data = {'action': 'info', 'comments': 'huh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.MORE_INFO_REQUIRED)
eq_(note.body, 'huh')
# Test emails.
self._check_email_dev_and_contact(None)
def test_escalate(self):
"""
On escalation, send an email to senior reviewers and developer.
"""
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.ESCALATION)
eq_(note.body, 'soup her man')
# Test emails.
eq_(len(mail.outbox), 2)
self._check_email( # Senior reviewer.
self._get_mail(self.snr_reviewer_user.email), 'Escalated',
to=[self.snr_reviewer_user.email])
self._check_email(self._get_mail('steamcube'), 'Escalated')
def test_comment(self):
"""
On reviewer comment, send an email to those but developers.
"""
data = {'action': 'comment', 'comments': 'huh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REVIEWER_COMMENT)
eq_(note.body, 'huh')
# Test emails.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], 'Private reviewer comment',
to=[self.mozilla_contact])
def test_disable(self):
"""
On banning, send an email to [developer, mozilla contact].
"""
self.login_as_admin()
data = {'action': 'disable', 'comments': 'u dun it'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.DISABLED)
eq_(note.body, 'u dun it')
# Test emails.
self._check_email_dev_and_contact(None)
def test_attachments(self):
data = {'action': 'comment', 'comments': 'huh'}
data.update(self._attachment_management_form(num=2))
data.update(self._attachments(num=2))
data.update(self._testedon_management_form())
self._post(data)
# Test attachments.
note = self._get_note()
eq_(note.attachments.count(), 2)
def test_tested_on_one(self):
"""Tested 'Tested on' message appended to note body."""
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form(num=1))
data.update(self._platforms(1))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, u'rubesh\n\n'
u'Tested on \xd0esktop platform on PC with version 34')
def test_tested_on_two(self):
"""Tested two 'Tested on' messages appended to note body."""
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form(num=2))
data.update(self._platforms(2))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, u'rubesh\n\n'
u'Tested on \xd0esktop platform on PC with version 34; '
u'FirefoxOS platform on ZT\xc8 Open with version 1.3<')
class TestModeratedQueue(mkt.site.tests.TestCase, AccessMixin):
def setUp(self):
super(TestModeratedQueue, self).setUp()
self.app = app_factory()
self.moderator_user = user_factory(email='moderator')
self.grant_permission(self.moderator_user, 'Apps:ModerateReview')
user_factory(email='regular')
user1 = user_factory()
user2 = user_factory()
self.url = reverse('reviewers.apps.queue_moderated')
self.review1 = Review.objects.create(addon=self.app, body='body',
user=user1, rating=3,
editorreview=True)
ReviewFlag.objects.create(review=self.review1, flag=ReviewFlag.SPAM,
user=user1)
self.review2 = Review.objects.create(addon=self.app, body='body',
user=user2, rating=4,
editorreview=True)
ReviewFlag.objects.create(review=self.review2, flag=ReviewFlag.SUPPORT,
user=user2)
self.login(self.moderator_user)
def _post(self, action):
ctx = self.client.get(self.url).context
data_formset = formset(initial(ctx['reviews_formset'].forms[0]))
data_formset['form-0-action'] = action
res = self.client.post(self.url, data_formset)
self.assert3xx(res, self.url)
def _get_logs(self, action):
return ActivityLog.objects.filter(action=action.id)
def test_anonymous_flagger(self):
ReviewFlag.objects.all()[0].update(user=None)
ReviewFlag.objects.all()[1].delete()
res = self.client.get(self.url)
txt = pq(res.content)('.reviews-flagged-reasons li div span').text()
teststring = u'Flagged by an anonymous user on'
ok_(txt.startswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_setup(self):
eq_(Review.objects.filter(editorreview=True).count(), 2)
eq_(ReviewFlag.objects.filter(flag=ReviewFlag.SPAM).count(), 1)
res = self.client.get(self.url)
doc = pq(res.content)('#reviews-flagged')
# Test the default action is "skip".
eq_(doc('.reviewers-desktop #id_form-0-action_1:checked').length, 1)
def test_skip(self):
# Skip the first review, which still leaves two.
self._post(mkt.ratings.REVIEW_MODERATE_SKIP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 2)
def test_delete(self):
# Delete the first review, which leaves one.
self._post(mkt.ratings.REVIEW_MODERATE_DELETE)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
eq_(self._get_logs(mkt.LOG.DELETE_REVIEW).count(), 1)
def test_keep(self):
# Keep the first review, which leaves one.
self._post(mkt.ratings.REVIEW_MODERATE_KEEP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
eq_(self._get_logs(mkt.LOG.APPROVE_REVIEW).count(), 1)
def test_no_reviews(self):
Review.objects.all().delete()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(pq(res.content)('#reviews-flagged .no-results').length, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a')[0].text, u'Moderated Reviews (2)')
def test_queue_count_reviewer_and_moderator(self):
self.grant_permission(self.moderator_user, 'Apps:Review')
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Reviewing (0)')
eq_(links[4].text, u'Homescreens (0)')
eq_(links[5].text, u'Moderated Reviews (2)')
def test_deleted_app(self):
"Test that a deleted app doesn't break the queue."
self.app.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_queue_count_deleted_app(self):
self.app.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a')[0].text, u'Moderated Reviews (0)')
class AbuseQueueMixin(object):
def _setUp(self):
self.abuseviewer_user = user_factory(email='abuser')
self.grant_permission(self.abuseviewer_user, self.perm)
self.login(self.abuseviewer_user)
user_factory(email='regular')
self.url = reverse(self.view_name)
def _post(self, action, form_index=0):
ctx = self.client.get(self.url).context
data_formset = formset(initial(ctx['abuse_formset'].forms[0]))
data_formset['form-%s-action' % (form_index)] = action
res = self.client.post(self.url, data_formset)
self.assert3xx(res, self.url)
def _get_logs(self, action):
return ActivityLog.objects.filter(action=action.id)
def test_anonymous_flagger(self):
AbuseReport.objects.all()[0].update(reporter=None)
res = self.client.get(self.url)
txt = pq(res.content)('.abuse-reports-reports li div span').text()
teststring = u'Submitted by an anonymous user on'
ok_(txt.startswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_no_reviews(self):
AbuseReport.objects.all().delete()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(pq(res.content)('#abuse-reports .no-results').length, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
txt = pq(r.content)('.tabnav li a')[0].text
teststring = u'Abuse Reports (2)'
ok_(txt.endswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_skip(self):
# Skip the first xxx's reports, which still leaves 2 apps/sites.
self._post(mkt.abuse.forms.ABUSE_REPORT_SKIP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 2)
def test_first_read(self):
# Mark read the first xxx's reports, which leaves one.
self._post(mkt.abuse.forms.ABUSE_REPORT_READ)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
# There are two abuse reports for app1/website1, so two log entries.
eq_(self._get_logs(self.log_const).count(), 2)
# Check the remaining abuse report remains unread.
eq_(AbuseReport.objects.filter(read=False).count(), 1)
def test_first_flag(self):
# Flag the first xxx's reports.
self._post(mkt.abuse.forms.ABUSE_REPORT_FLAG)
res = self.client.get(self.url)
# Check one is left.
eq_(len(res.context['page'].object_list), 1)
# Check the object is flagged.
eq_(RereviewQueue.objects.count(), 1)
# As flagging marks read too, there should be 2 log entries.
eq_(self._get_logs(self.log_const).count(), 2)
# Check the remaining abuse report remains unread.
eq_(AbuseReport.objects.filter(read=False).count(), 1)
def test_xss(self):
xss = '<script>alert("xss")</script>'
AbuseReport.objects.all()[0].update(message=xss)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tbody = pq(res.content)(
'#abuse-reports .abuse-reports-reports').html()
assert '<script>' in tbody
assert '<script>' not in tbody
def test_deleted_website(self):
"Test that a deleted app/website doesn't break the queue."
AbuseReport.objects.all()[0].object.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
txt = pq(r.content)('.tabnav li a')[0].text
teststring = u'Abuse Reports (1)'
ok_(txt.endswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
class TestAppAbuseQueue(mkt.site.tests.TestCase, AccessMixin,
AbuseQueueMixin):
perm = 'Apps:ReadAbuse'
view_name = 'reviewers.apps.queue_abuse'
log_const = mkt.LOG.APP_ABUSE_MARKREAD
def setUp(self):
super(TestAppAbuseQueue, self).setUp()
self._setUp()
@classmethod
def setUpTestData(cls):
app1 = app_factory()
app2 = app_factory()
# Add some extra apps, which shouldn't show up.
app_factory()
app_factory()
user1 = user_factory()
user2 = user_factory()
AbuseReport.objects.create(reporter=user1, ip_address='123.45.67.89',
addon=app1, message='bad')
AbuseReport.objects.create(reporter=user2, ip_address='123.01.67.89',
addon=app1, message='terrible')
AbuseReport.objects.create(reporter=user1, ip_address='123.01.02.89',
addon=app2, message='the worst')
def test_setup(self):
eq_(AbuseReport.objects.filter(read=False).count(), 3)
eq_(AbuseReport.objects.filter(addon=Webapp.objects.all()[0]).count(),
2)
res = self.client.get(self.url)
# Check there are 2 apps listed.
eq_(len(res.context['page'].object_list), 2)
def test_queue_count_reviewer_and_moderator(self):
self.grant_permission(self.abuseviewer_user, 'Apps:Review')
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Reviewing (0)')
eq_(links[4].text, u'Homescreens (0)')
eq_(links[5].text, u'Abuse Reports (2)')
class TestWebsiteAbuseQueue(mkt.site.tests.TestCase, AccessMixin,
AbuseQueueMixin):
perm = 'Websites:ReadAbuse'
view_name = 'reviewers.websites.queue_abuse'
log_const = mkt.LOG.WEBSITE_ABUSE_MARKREAD
def setUp(self):
super(TestWebsiteAbuseQueue, self).setUp()
self._setUp()
@classmethod
def setUpTestData(cls):
website1 = website_factory()
website2 = website_factory()
# Add some extra sites, which shouldn't show up.
website_factory()
website_factory()
user1 = user_factory()
user2 = user_factory()
AbuseReport.objects.create(reporter=user1, ip_address='123.45.67.89',
website=website1, message='bad')
AbuseReport.objects.create(reporter=user2, ip_address='123.01.67.89',
website=website1, message='terrible')
AbuseReport.objects.create(reporter=user1, ip_address='123.01.02.89',
website=website2, message='the worst')
cls.website1 = website1
def test_setup(self):
eq_(AbuseReport.objects.filter(read=False).count(), 3)
eq_(AbuseReport.objects.filter(website=self.website1).count(), 2)
res = self.client.get(self.url)
# Check there are 2 websites listed.
eq_(len(res.context['page'].object_list), 2)
def test_first_flag(self):
# No re-review flagging for Websites yet - no re-review queue!
raise SkipTest()
class TestGetSigned(BasePackagedAppTest, mkt.site.tests.TestCase):
def setUp(self):
super(TestGetSigned, self).setUp()
self.url = reverse('reviewers.signed', args=[self.app.app_slug,
self.version.pk])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('editor@mozilla.com')
def test_not_logged_in(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_not_reviewer(self):
self.client.logout()
self.login(user_factory())
eq_(self.client.get(self.url).status_code, 403)
@override_settings(
DEFAULT_FILE_STORAGE='mkt.site.storage_utils.LocalFileStorage')
@mock.patch('lib.crypto.packaged.sign')
def test_reviewer_sign_arguments_local(self, sign_mock):
sign_mock.side_effect = mock_sign
self.setup_files()
res = self.client.get(self.url)
sign_mock.assert_called_with(self.version.pk, reviewer=True)
eq_(res.status_code, 200)
@override_settings(
DEFAULT_FILE_STORAGE='mkt.site.storage_utils.S3BotoPrivateStorage')
@mock.patch('lib.crypto.packaged.sign')
def test_reviewer_sign_arguments_storage(self, sign_mock):
sign_mock.side_effect = mock_sign
self.setup_files()
res = self.client.get(self.url)
sign_mock.assert_called_with(self.version.pk, reviewer=True)
self.assert3xx(res, private_storage.url(
self.file.signed_reviewer_file_path))
@mock.patch.object(packaged, 'sign', mock_sign)
def test_reviewer(self):
if not settings.XSENDFILE:
raise SkipTest()
self.setup_files()
res = self.client.get(self.url)
eq_(res.status_code, 200)
file_ = self.app.current_version.all_files[0]
eq_(res['x-sendfile'], file_.signed_reviewer_file_path)
eq_(res['etag'], '"%s"' % file_.hash.split(':')[-1])
def test_not_packaged(self):
self.app.update(is_packaged=False)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_wrong_version(self):
self.url = reverse('reviewers.signed', args=[self.app.app_slug, 0])
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_token_good(self):
if not settings.XSENDFILE:
raise SkipTest()
token = Token(data={'app_id': self.app.id})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 200)
file_ = self.app.current_version.all_files[0]
eq_(res['x-sendfile'], file_.signed_reviewer_file_path)
eq_(res['etag'], '"%s"' % file_.hash.split(':')[-1])
# Test token doesn't work the 2nd time.
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
def test_token_bad(self):
token = Token(data={'app_id': 'abcdef'})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
class TestMiniManifestView(BasePackagedAppTest):
def setUp(self):
super(TestMiniManifestView, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.app.update(is_packaged=True)
self.version = self.app.versions.latest()
self.file = self.version.all_files[0]
self.file.update(filename='mozball.zip')
self.url = reverse('reviewers.mini_manifest', args=[self.app.app_slug,
self.version.pk])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('editor@mozilla.com')
def test_not_logged_in(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_not_reviewer(self):
self.client.logout()
self.login(user_factory())
eq_(self.client.get(self.url).status_code, 403)
def test_not_packaged(self):
self.app.update(is_packaged=False)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_wrong_version(self):
url = reverse('reviewers.mini_manifest', args=[self.app.app_slug, 0])
res = self.client.get(url)
eq_(res.status_code, 404)
def test_reviewer(self):
self.setup_files()
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
eq_(data['developer']['name'], 'Mozilla Marketplace')
eq_(data['package_path'],
absolutify(reverse('reviewers.signed',
args=[self.app.app_slug, self.version.id])))
def test_rejected(self):
# Rejected sets file.status to DISABLED and moves to a guarded path.
self.setup_files()
self.app.update(status=mkt.STATUS_REJECTED)
self.file.update(status=mkt.STATUS_DISABLED)
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
eq_(data['developer']['name'], 'Mozilla Marketplace')
eq_(data['package_path'],
absolutify(reverse('reviewers.signed',
args=[self.app.app_slug,
self.version.id])))
def test_minifest_name_matches_manifest_name(self):
self.setup_files()
self.app.name = 'XXX'
self.app.save()
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
def test_token_good(self):
token = Token(data={'app_id': self.app.id})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 200)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
ok_('token=' in data['package_path'])
# Test token doesn't work the 2nd time.
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
def test_token_bad(self):
token = Token(data={'app_id': 'abcdef'})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
class TestReviewersScores(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewersScores, self).setUp()
self.user = self.reviewer_user
self.url = reverse('reviewers.performance', args=[self.user.email])
def test_404(self):
res = self.client.get(reverse('reviewers.performance', args=['poop']))
eq_(res.status_code, 404)
def test_with_email(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['profile'].id, self.user.id)
def test_without_email(self):
res = self.client.get(reverse('reviewers.performance'))
eq_(res.status_code, 200)
eq_(res.context['profile'].id, self.user.id)
def test_no_reviews(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
assert u'No review points awarded yet' in res.content
class TestQueueSort(AppReviewerTest):
def setUp(self):
super(TestQueueSort, self).setUp()
"""Create and set up apps for some filtering fun."""
self.apps = [app_factory(name='Lillard',
status=mkt.STATUS_PENDING,
is_packaged=False,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_FREE),
app_factory(name='Batum',
status=mkt.STATUS_PENDING,
is_packaged=True,
version_kw={'version': '1.0',
'has_editor_comment': True,
'has_info_request': True},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_PREMIUM)]
# Set up app attributes.
self.apps[0].update(created=self.days_ago(2))
self.apps[1].update(created=self.days_ago(5))
self.apps[0].addonuser_set.create(user=user_factory(email='XXX'))
self.apps[1].addonuser_set.create(user=user_factory(email='illmatic'))
self.apps[0].addondevicetype_set.create(
device_type=mkt.DEVICE_DESKTOP.id)
self.apps[1].addondevicetype_set.create(
device_type=mkt.DEVICE_MOBILE.id)
self.url = reverse('reviewers.apps.queue_pending')
def test_do_sort_webapp(self):
"""
Test that apps are sorted in order specified in GET params.
"""
rf = RequestFactory()
qs = Webapp.objects.all()
# Test apps are sorted by created/asc by default.
req = rf.get(self.url, {'sort': 'invalidsort', 'order': 'dontcare'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[1], self.apps[0]])
# Test sorting by created, descending.
req = rf.get(self.url, {'sort': 'created', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[0], self.apps[1]])
# Test sorting by app name.
req = rf.get(self.url, {'sort': 'name', 'order': 'asc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[1], self.apps[0]])
req = rf.get(self.url, {'sort': 'name', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[0], self.apps[1]])
def test_do_sort_version_nom(self):
"""Tests version nomination sort order."""
url = reverse('reviewers.apps.queue_pending')
user = UserProfile.objects.get(email='editor@mozilla.com')
version_0 = self.apps[0].versions.get()
version_0.update(nomination=days_ago(1))
version_1 = self.apps[1].versions.get()
version_1.update(nomination=days_ago(2))
# Throw in some disabled versions, they shouldn't affect order.
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[0],
nomination=days_ago(10))
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[1],
nomination=days_ago(1))
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[1],
nomination=days_ago(20))
req = mkt.site.tests.req_factory_factory(
url, user=user, data={'sort': 'nomination'})
res = queue_apps(req)
doc = pq(res.content)
# Desktop and mobile (hidden on desktop) alternate, so we jump by 2.
eq_(doc('tbody tr')[0].get('data-addon'), str(version_1.addon.id))
eq_(doc('tbody tr')[2].get('data-addon'), str(version_0.addon.id))
req = mkt.site.tests.req_factory_factory(
url, user=user, data={'sort': 'nomination', 'order': 'desc'})
res = queue_apps(req)
doc = pq(res.content)
# Desktop and mobile (hidden on desktop) alternate, so we jump by 2.
eq_(doc('tbody tr')[0].get('data-addon'), str(version_0.addon.id))
eq_(doc('tbody tr')[2].get('data-addon'), str(version_1.addon.id))
def test_do_sort_queue_object(self):
"""Tests sorting queue object."""
rf = RequestFactory()
url = reverse('reviewers.apps.queue_rereview')
earlier_rrq = RereviewQueue.objects.create(addon=self.apps[0])
later_rrq = RereviewQueue.objects.create(addon=self.apps[1])
later_rrq.created += timedelta(days=1)
later_rrq.save()
request = rf.get(url, {'sort': 'created'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
# Assert the order that RereviewQueue objects were created is
# maintained.
eq_([earlier_rrq.addon, later_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'created', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([later_rrq.addon, earlier_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'name', 'order': 'asc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([later_rrq.addon, earlier_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'name', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([earlier_rrq.addon, later_rrq.addon], list(apps))
def test_sort_with_priority_review(self):
"""Tests the sorts are correct with a priority review flagged app."""
# Set up the priority review flagged app.
self.apps.append(app_factory(name='Foxkeh',
status=mkt.STATUS_PENDING,
is_packaged=False,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_FREE,
priority_review=True))
# Set up app attributes.
self.apps[2].update(created=self.days_ago(1))
self.apps[2].addonuser_set.create(
user=user_factory(email='redpanda@mozilla.com'))
self.apps[2].addondevicetype_set.create(
device_type=mkt.DEVICE_DESKTOP.id)
# And check it also comes out top of waiting time with Webapp model.
rf = RequestFactory()
qs = Webapp.objects.all()
# Test apps are sorted by created/asc by default.
req = rf.get(self.url, {'sort': 'invalidsort', 'order': 'dontcare'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[2], self.apps[1], self.apps[0]])
# Test sorting by created, descending.
req = rf.get(self.url, {'sort': 'created', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[2], self.apps[0], self.apps[1]])
# And with Version model.
version_0 = self.apps[0].versions.get()
version_0.update(nomination=days_ago(1))
version_1 = self.apps[1].versions.get()
version_1.update(nomination=days_ago(2))
qs = (Version.objects.filter(
files__status=mkt.STATUS_PENDING,
addon__disabled_by_user=False,
addon__status=mkt.STATUS_PENDING)
.order_by('nomination', 'created')
.select_related('addon', 'files').no_transforms())
req = rf.get(self.url, {'sort': 'nomination'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs, date_sort='nomination')
eq_(list(sorted_qs), [self.apps[2], self.apps[1], self.apps[0]])
req = rf.get(self.url, {'sort': 'nomination', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs, date_sort='nomination')
eq_(list(sorted_qs), [self.apps[2], self.apps[0], self.apps[1]])
# And with Rereview model.
url = reverse('reviewers.apps.queue_rereview')
earlier_rrq = RereviewQueue.objects.create(addon=self.apps[0])
earlier_rrq.created += timedelta(days=1)
earlier_rrq.save()
later_rrq = RereviewQueue.objects.create(addon=self.apps[1])
later_rrq.created += timedelta(days=2)
later_rrq.save()
pri_rrq = RereviewQueue.objects.create(addon=self.apps[2])
pri_rrq.save()
request = rf.get(url, {'sort': 'created'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([pri_rrq.addon, earlier_rrq.addon, later_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'created', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([pri_rrq.addon, later_rrq.addon, earlier_rrq.addon], list(apps))
class TestAppsReviewing(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestAppsReviewing, self).setUp()
self.url = reverse('reviewers.apps.apps_reviewing')
self.apps = [app_factory(name='Antelope',
status=mkt.STATUS_PENDING),
app_factory(name='Bear',
status=mkt.STATUS_PENDING),
app_factory(name='Cougar',
status=mkt.STATUS_PENDING)]
def _view_app(self, app_id):
self.client.post(reverse('reviewers.review_viewing'), {
'addon_id': app_id})
def test_no_apps_reviewing(self):
res = self.client.get(self.url)
eq_(len(res.context['apps']), 0)
def test_apps_reviewing(self):
self._view_app(self.apps[0].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 1)
def test_multiple_reviewers_no_cross_streams(self):
self._view_app(self.apps[0].id)
self._view_app(self.apps[1].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 2)
# Now view an app as another user and verify app.
self.login('admin@mozilla.com')
self._view_app(self.apps[2].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 1)
# Check original user again to make sure app list didn't increment.
self.login_as_editor()
res = self.client.get(self.url)
eq_(len(res.context['apps']), 2)
class TestLeaderboard(AppReviewerTest):
def setUp(self):
super(TestLeaderboard, self).setUp()
self.url = reverse('reviewers.leaderboard')
mkt.set_user(self.reviewer_user)
def _award_points(self, user, score):
ReviewerScore.objects.create(user=user, note_key=mkt.REVIEWED_MANUAL,
score=score, note='Thing.')
def test_leaderboard_ranks(self):
users = (self.reviewer_user,
self.regular_user,
user_factory(email='clouserw'))
self._award_points(users[0], mkt.REVIEWED_LEVELS[0]['points'] - 1)
self._award_points(users[1], mkt.REVIEWED_LEVELS[0]['points'] + 1)
self._award_points(users[2], mkt.REVIEWED_LEVELS[0]['points'] + 2)
def get_cells():
doc = pq(self.client.get(self.url).content.decode('utf-8'))
cells = doc('#leaderboard > tbody > tr > .name, '
'#leaderboard > tbody > tr > .level')
return [cells.eq(i).text() for i in range(0, cells.length)]
eq_(get_cells(),
[users[2].display_name,
users[1].display_name,
mkt.REVIEWED_LEVELS[0]['name'],
users[0].display_name])
self._award_points(users[0], 1)
eq_(get_cells(),
[users[2].display_name,
users[1].display_name,
users[0].display_name,
mkt.REVIEWED_LEVELS[0]['name']])
self._award_points(users[0], -1)
self._award_points(users[2], (mkt.REVIEWED_LEVELS[1]['points'] -
mkt.REVIEWED_LEVELS[0]['points']))
eq_(get_cells(),
[users[2].display_name,
mkt.REVIEWED_LEVELS[1]['name'],
users[1].display_name,
mkt.REVIEWED_LEVELS[0]['name'],
users[0].display_name])
class TestReviewPage(mkt.site.tests.TestCase):
def setUp(self):
super(TestReviewPage, self).setUp()
self.app = app_factory(status=mkt.STATUS_PENDING)
self.reviewer = user_factory(email='editor')
self.grant_permission(self.reviewer, 'Apps:Review')
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def test_status_null_disable_approve_btn(self):
self.app.update(status=mkt.STATUS_NULL)
req = req_factory_factory(self.url, user=self.reviewer)
res = app_review(req, app_slug=self.app.app_slug)
doc = pq(res.content)
assert (doc('#review-actions input[value=public]')
.parents('li').hasClass('disabled'))
assert not (doc('#review-actions input[value=reject]')
.parents('li').hasClass('disabled'))
class TestAbusePage(AppReviewerTest):
def setUp(self):
super(TestAbusePage, self).setUp()
self.app = app_factory(name=u'My app é <script>alert(5)</script>')
self.url = reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug])
AbuseReport.objects.create(addon=self.app, message=self.app.name)
def testXSS(self):
from django.utils.encoding import smart_unicode
from jinja2.utils import escape
content = smart_unicode(self.client.get(self.url).content)
ok_(not unicode(self.app.name) in content)
ok_(unicode(escape(self.app.name)) in content)
class TestReviewTranslate(RestOAuth):
def setUp(self):
super(TestReviewTranslate, self).setUp()
self.grant_permission(self.profile, 'Apps:ModerateReview')
self.create_switch('reviews-translate')
user = user_factory(email='diego')
app = app_factory(app_slug='myapp~-_')
self.review = app.reviews.create(title=u'yes', body=u'oui',
addon=app, user=user,
editorreview=True, rating=4)
def test_regular_call(self):
res = self.client.get(reverse('reviewers.review_translate',
args=[self.review.addon.app_slug,
self.review.id, 'fr']))
self.assert3xx(res, 'https://translate.google.com/#auto/fr/oui', 302)
@mock.patch('mkt.reviewers.views.requests')
def test_ajax_call(self, requests):
# Mock requests.
response = mock.Mock(status_code=200)
response.json.return_value = {
u'data': {
u'translations': [{
u'translatedText': u'oui',
u'detectedSourceLanguage': u'fr'
}]
}
}
requests.get.return_value = response
# Call translation.
review = self.review
url = reverse('reviewers.review_translate',
args=[review.addon.app_slug, review.id, 'fr'])
res = self.client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
eq_(res.status_code, 200)
eq_(res.content, '{"body": "oui", "title": "oui"}')
@mock.patch('mkt.reviewers.views.requests')
def test_invalid_api_key(self, requests):
# Mock requests.
response = mock.Mock(status_code=400)
response.json.return_value = {
'error': {
'code': 400,
'errors': [
{'domain': 'usageLimits',
'message': 'Bad Request',
'reason': 'keyInvalid'}
],
'message': 'Bad Request'
}
}
requests.get.return_value = response
# Call translation.
review = self.review
res = self.client.get(
reverse('reviewers.review_translate',
args=[review.addon.app_slug, review.id, 'fr']),
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
eq_(res.status_code, 400)
class TestReviewHistory(mkt.site.tests.TestCase, CommTestMixin):
def setUp(self):
super(TestReviewHistory, self).setUp()
self.app = self.addon = app_factory()
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('editor@mozilla.com')
self._thread_factory()
def test_comm_url(self):
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=1&serializer=simple')
def test_comm_url_multiple_thread(self):
self._thread_factory()
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=2&serializer=simple')
def test_comm_url_no_encode(self):
self.addon = app_factory(app_slug='台北')
self._thread_factory()
url = reverse('reviewers.apps.review', args=[self.addon.app_slug])
r = self.client.get(url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=1&serializer=simple')
class ModerateLogTest(mkt.site.tests.TestCase):
def setUp(self):
super(ModerateLogTest, self).setUp()
self.review = Review.objects.create(addon=app_factory(), body='body',
user=user_factory(), rating=4,
editorreview=True)
self.moderator_user = user_factory(email='moderator')
self.grant_permission(self.moderator_user, 'Apps:ModerateReview')
mkt.set_user(self.moderator_user)
self.login(self.moderator_user)
self.admin_user = user_factory(email='admin')
self.grant_permission(self.admin_user, '*:*')
user_factory(email='regular')
class TestModerateLog(ModerateLogTest, AccessMixin):
def setUp(self):
super(TestModerateLog, self).setUp()
self.url = reverse('reviewers.apps.moderatelog')
def test_log(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_start_filter(self):
r = self.client.get(self.url, dict(start='2011-01-01'))
eq_(r.status_code, 200)
def test_enddate_filter(self):
"""
Make sure that if our end date is 1/1/2011, that we include items from
1/1/2011.
"""
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review, self.review.addon,
created=datetime(2011, 1, 1))
r = self.client.get(self.url, dict(end='2011-01-01'))
eq_(r.status_code, 200)
eq_(pq(r.content)('tbody td').eq(0).text(), 'Jan 1, 2011, 12:00:00 AM')
def test_action_filter(self):
"""
Based on setup we should see only two items if we filter for deleted
reviews.
"""
for i in xrange(2):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review.addon, self.review)
mkt.log(mkt.LOG.DELETE_REVIEW, self.review.addon, self.review)
r = self.client.get(self.url, dict(search='deleted'))
eq_(pq(r.content)('tbody tr').length, 2)
def test_no_results(self):
r = self.client.get(self.url, dict(end='2004-01-01'))
no_results = 'No events found for this period.'
assert no_results in r.content, 'Expected no results to be found.'
def test_display_name_xss(self):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review, self.review.addon,
user=self.admin_user)
self.admin_user.display_name = '<script>alert("xss")</script>'
self.admin_user.save()
assert '<script>' in self.admin_user.display_name, (
'Expected <script> to be in display name')
r = self.client.get(self.url)
pq(r.content)('#log-listing tbody td').eq(1).html()
assert '<script>' not in r.content
assert '<script>' in r.content
class TestModerateLogDetail(ModerateLogTest, AccessMixin):
def setUp(self):
super(TestModerateLogDetail, self).setUp()
# AccessMixin needs a url property.
self.url = self._url(0)
def _url(self, id):
return reverse('reviewers.apps.moderatelog.detail', args=[id])
def test_detail_page(self):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review.addon, self.review)
e_id = ActivityLog.objects.editor_events()[0].id
r = self.client.get(self._url(e_id))
eq_(r.status_code, 200)
def test_undelete_selfmoderation(self):
e_id = mkt.log(
mkt.LOG.DELETE_REVIEW, self.review.addon, self.review).id
self.review.delete()
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 302)
self.review = Review.objects.get(id=self.review.id)
assert not self.review.deleted, 'Review should be undeleted now.'
def test_undelete_admin(self):
e_id = mkt.log(
mkt.LOG.DELETE_REVIEW, self.review.addon, self.review).id
self.review.delete()
self.client.logout()
self.login(self.admin_user)
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 302)
self.review = Review.objects.get(id=self.review.id)
assert not self.review.deleted, 'Review should be undeleted now.'
def test_undelete_unauthorized(self):
# Delete as admin (or any other user than the reviewer).
e_id = mkt.log(mkt.LOG.DELETE_REVIEW, self.review.addon, self.review,
user=self.admin_user).id
self.review.delete()
# Try to undelete as normal reviewer.
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 403)
self.review = Review.with_deleted.get(id=self.review.id)
assert self.review.deleted, 'Review shouldn`t have been undeleted.'
| 40.79454 | 79 | 0.620518 |
import json
import re
import time
from datetime import datetime, timedelta
from itertools import cycle
from os import path
from django import test
from django.conf import settings
from django.core import mail
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django.utils import translation
import mock
import requests
import waffle
from cache_nuggets.lib import Token
from jingo.helpers import urlparams
from nose import SkipTest
from nose.tools import eq_, ok_
from post_request_task import task as post_request_task
from pyquery import PyQuery as pq
from requests.structures import CaseInsensitiveDict
import mkt
import mkt.ratings
import mkt.site.tests
from lib.crypto import packaged
from lib.crypto.tests import mock_sign
from mkt.abuse.models import AbuseReport
from mkt.api.tests.test_oauth import RestOAuth
from mkt.comm.tests.test_views import CommTestMixin
from mkt.comm.utils import create_comm_note
from mkt.constants import MANIFEST_CONTENT_TYPE, comm
from mkt.developers.models import ActivityLog, AppLog
from mkt.files.models import File
from mkt.ratings.models import Review, ReviewFlag
from mkt.reviewers.models import (SHOWCASE_TAG, CannedResponse,
EscalationQueue, RereviewQueue,
ReviewerScore)
from mkt.reviewers.utils import ReviewersQueuesHelper
from mkt.reviewers.views import (_progress, app_review, queue_apps,
route_reviewer)
from mkt.site.fixtures import fixture
from mkt.site.helpers import absolutify, isotime
from mkt.site.storage_utils import private_storage, public_storage
from mkt.site.tests import (check_links, days_ago, formset, initial,
req_factory_factory, user_factory)
from mkt.site.utils import app_factory, make_rated, paginate, version_factory
from mkt.submit.tests.test_views import BasePackagedAppTest, SetupFilesMixin
from mkt.tags.models import Tag
from mkt.users.models import UserProfile
from mkt.versions.models import Version
from mkt.webapps.indexers import WebappIndexer
from mkt.webapps.models import AddonDeviceType, Webapp
from mkt.webapps.tasks import unindex_webapps
from mkt.websites.utils import website_factory
from mkt.zadmin.models import get_config, set_config
TIME_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
TEST_PATH = path.dirname(path.abspath(__file__))
ATTACHMENTS_DIR = path.abspath(path.join(TEST_PATH, '..', '..', 'comm',
'tests', 'attachments'))
class AttachmentManagementMixin(object):
def _attachment_management_form(self, num=1):
return {'attachment-TOTAL_FORMS': max(1, num),
'attachment-INITIAL_FORMS': 0,
'attachment-MAX_NUM_FORMS': 1000}
def _attachments(self, num):
data = {}
files = ['bacon.jpg', 'bacon.txt']
descriptions = ['mmm, bacon', '']
if num > 0:
for n in xrange(num):
i = 0 if n % 2 else 1
attachment = open(path.join(ATTACHMENTS_DIR, files[i]), 'r+')
data.update({
'attachment-%d-attachment' % n: attachment,
'attachment-%d-description' % n: descriptions[i]
})
return data
class TestedonManagementMixin(object):
def _testedon_management_form(self, num=0):
return {'testedon-TOTAL_FORMS': max(1, num),
'testedon-INITIAL_FORMS': 0,
'testedon-MAX_NUM_FORMS': 1000}
def _platforms(self, num, device_types=[u'\xd0esktop', u'FirefoxOS'],
devices=[u'PC ', u'ZT\xc8 Open'],
versions=[u'34', u'1.3<']):
data = {}
if num > 0:
for n in xrange(num):
i = n % len(device_types)
data.update({
'testedon-%d-device_type' % n: device_types[i],
'testedon-%d-device' % n: devices[i],
'testedon-%d-version' % n: versions[i],
})
return data
class AppReviewerTest(mkt.site.tests.TestCase):
def setUp(self):
super(AppReviewerTest, self).setUp()
self.reviewer_user = user_factory(email='editor')
self.grant_permission(self.reviewer_user, 'Apps:Review')
self.snr_reviewer_user = user_factory(email='snrreviewer')
self.grant_permission(self.snr_reviewer_user, 'Apps:Review,Apps:Edit,'
'Apps:ReviewEscalated,Apps:ReviewPrivileged',
name='Senior App Reviewers')
self.admin_user = user_factory(email='admin')
self.grant_permission(self.admin_user, '*:*')
self.regular_user = user_factory(email='regular')
self.contact_user = user_factory(email='contact')
self.login_as_editor()
def login_as_admin(self):
self.login(self.admin_user)
def login_as_editor(self):
self.login(self.reviewer_user)
def login_as_senior_reviewer(self):
self.login(self.snr_reviewer_user)
def check_actions(self, expected, elements):
for idx, item in enumerate(expected):
text, form_value = item
e = elements.eq(idx)
eq_(e.parent().text(), text)
eq_(e.attr('name'), 'action')
eq_(e.val(), form_value)
def uses_es(self):
return waffle.switch_is_active('reviewer-tools-elasticsearch')
class AccessMixin(object):
def test_403_for_non_editor(self, *args, **kwargs):
self.login('regular@mozilla.com')
eq_(self.client.head(self.url).status_code, 403)
def test_302_for_anonymous(self, *args, **kwargs):
self.client.logout()
eq_(self.client.head(self.url).status_code, 302)
class SearchMixin(object):
def test_search_query(self):
res = self.client.get(self.url, {'text_query': 'test'})
eq_(res.status_code, 200)
@mock.patch('mkt.webapps.models.Webapp.get_cached_manifest', mock.Mock)
class TestReviewersHome(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewersHome, self).setUp()
self.url = reverse('reviewers.home')
self.apps = [app_factory(name='Antelope',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='Bear',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='Cougar',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING})]
self.packaged_app = app_factory(name='Dinosaur',
status=mkt.STATUS_PUBLIC,
is_packaged=True)
version_factory(addon=self.packaged_app,
file_kw={'status': mkt.STATUS_PENDING})
app_factory(name='Elephant', disabled_by_user=True,
status=mkt.STATUS_PENDING)
escalated = app_factory(name='Eyelash Pit Viper',
status=mkt.STATUS_PENDING)
EscalationQueue.objects.create(addon=escalated)
# Add a public app under re-review.
rereviewed = app_factory(name='Finch', status=mkt.STATUS_PUBLIC)
rq = RereviewQueue.objects.create(addon=rereviewed)
rq.update(created=self.days_ago(1))
# Add an app with latest update deleted. It shouldn't affect anything.
app = app_factory(name='Great White Shark',
status=mkt.STATUS_PUBLIC,
version_kw={'version': '1.0'},
is_packaged=True)
v = version_factory(addon=app,
version='2.1',
file_kw={'status': mkt.STATUS_PENDING})
v.update(deleted=True)
def test_route_reviewer(self):
req = mkt.site.tests.req_factory_factory(
reverse('reviewers'),
user=UserProfile.objects.get(email='editor@mozilla.com'))
r = route_reviewer(req)
self.assert3xx(r, reverse('reviewers.home'))
def test_progress_pending(self):
self.apps[0].latest_version.update(nomination=self.days_ago(1))
self.apps[1].latest_version.update(nomination=self.days_ago(8))
self.apps[2].latest_version.update(nomination=self.days_ago(15))
counts, percentages = _progress()
eq_(counts['pending']['week'], 1)
eq_(counts['pending']['new'], 1)
eq_(counts['pending']['old'], 1)
eq_(counts['pending']['med'], 1)
self.assertAlmostEqual(percentages['pending']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['pending']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['pending']['med'], 33.333333333333)
def test_progress_rereview(self):
rq = RereviewQueue.objects.create(addon=self.apps[0])
rq.update(created=self.days_ago(8))
rq = RereviewQueue.objects.create(addon=self.apps[1])
rq.update(created=self.days_ago(15))
counts, percentages = _progress()
eq_(counts['rereview']['week'], 1)
eq_(counts['rereview']['new'], 1)
eq_(counts['rereview']['old'], 1)
eq_(counts['rereview']['med'], 1)
self.assertAlmostEqual(percentages['rereview']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['rereview']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['rereview']['med'], 33.333333333333)
def test_progress_updated(self):
extra_app = app_factory(name='Jackalope',
status=mkt.STATUS_PUBLIC,
is_packaged=True,
created=self.days_ago(35))
version_factory(addon=extra_app,
file_kw={'status': mkt.STATUS_PENDING},
created=self.days_ago(25),
nomination=self.days_ago(8))
extra_app = app_factory(name='Jackrabbit',
status=mkt.STATUS_PUBLIC,
is_packaged=True,
created=self.days_ago(35))
version_factory(addon=extra_app,
file_kw={'status': mkt.STATUS_PENDING},
created=self.days_ago(25),
nomination=self.days_ago(25))
counts, percentages = _progress()
eq_(counts['updates']['week'], 1)
eq_(counts['updates']['new'], 1)
eq_(counts['updates']['old'], 1)
eq_(counts['updates']['med'], 1)
self.assertAlmostEqual(percentages['updates']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['updates']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['updates']['med'], 33.333333333333)
def test_stats_waiting(self):
self.apps[0].latest_version.update(nomination=self.days_ago(1))
self.apps[1].latest_version.update(nomination=self.days_ago(5))
self.apps[2].latest_version.update(nomination=self.days_ago(15))
self.packaged_app.update(created=self.days_ago(1))
doc = pq(self.client.get(self.url).content)
anchors = doc('.editor-stats-title a')
eq_(anchors.eq(0).text(), '3 Pending App Reviews')
eq_(anchors.eq(1).text(), '1 Re-review')
eq_(anchors.eq(2).text(), '1 Update Review')
divs = doc('.editor-stats-table > div')
eq_(divs.eq(0).text(), '2 unreviewed app submissions this week.')
eq_(divs.eq(2).text(), '1 unreviewed app submission this week.')
eq_(divs.eq(4).text(), '1 unreviewed app submission this week.')
eq_(doc('.waiting_new').eq(0).attr('title')[-3:], '33%')
eq_(doc('.waiting_med').eq(0).attr('title')[-3:], '33%')
eq_(doc('.waiting_old').eq(0).attr('title')[-3:], '33%')
eq_(doc('.waiting_new').eq(1).attr('title')[-4:], '100%')
eq_(doc('.waiting_med').eq(1).attr('title')[-3:], ' 0%')
eq_(doc('.waiting_old').eq(1).attr('title')[-3:], ' 0%')
eq_(doc('.waiting_new').eq(2).attr('title')[-4:], '100%')
eq_(doc('.waiting_med').eq(2).attr('title')[-3:], ' 0%')
eq_(doc('.waiting_old').eq(2).attr('title')[-3:], ' 0%')
def test_reviewer_leaders(self):
reviewers = UserProfile.objects.all()[:2]
users = cycle(reviewers)
for app in self.apps:
mkt.log(mkt.LOG.APPROVE_VERSION, app, app.latest_version,
user=users.next(), details={'comments': 'hawt'})
doc = pq(self.client.get(self.url).content.decode('utf-8'))
table = doc('#editors-stats .editor-stats-table').eq(0)
eq_(table.find('td').eq(0).text(), reviewers[0].email)
eq_(table.find('td').eq(1).text(), u'2')
eq_(table.find('td').eq(2).text(), reviewers[1].email)
eq_(table.find('td').eq(3).text(), u'1')
table = doc('#editors-stats .editor-stats-table').eq(1)
eq_(table.find('td').eq(0).text(), reviewers[0].email)
eq_(table.find('td').eq(1).text(), u'2')
eq_(table.find('td').eq(2).text(), reviewers[1].email)
eq_(table.find('td').eq(3).text(), u'1')
class FlagsMixin(object):
def test_flag_packaged_app(self):
self.apps[0].update(is_packaged=True)
if self.uses_es():
self.reindex(Webapp)
eq_(self.apps[0].is_packaged, True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
td = pq(res.content)('#addon-queue tbody tr td.flags').eq(0)
flag = td('div.sprite-reviewer-packaged-app')
eq_(flag.length, 1)
def test_flag_premium_app(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM)
if self.uses_es():
self.reindex(Webapp)
eq_(self.apps[0].is_premium(), True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-premium')
eq_(flags.length, 1)
def test_flag_free_inapp_app(self):
self.apps[0].update(premium_type=mkt.ADDON_FREE_INAPP)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
eq_(tds('div.sprite-reviewer-premium.inapp.free').length, 1)
def test_flag_premium_inapp_app(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM_INAPP)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
eq_(tds('div.sprite-reviewer-premium.inapp').length, 1)
def test_flag_info(self):
self.apps[0].latest_version.update(has_info_request=True)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-info')
eq_(flags.length, 1)
def test_flag_comment(self):
self.apps[0].latest_version.update(has_editor_comment=True)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-editor')
eq_(flags.length, 1)
class XSSMixin(object):
def test_xss_in_queue(self):
a = self.apps[0]
a.name = '<script>alert("xss")</script>'
a.save()
if self.uses_es():
self.refresh(doctypes=('homescreen', 'webapp'))
res = self.client.get(self.url)
eq_(res.status_code, 200)
tbody = pq(res.content)('#addon-queue tbody').html()
assert '<script>' in tbody
assert '<script>' not in tbody
class TestAppQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
def setUp(self):
super(TestAppQueue, self).setUp()
yesterday = self.days_ago(1)
long_ago = self.days_ago(2)
self.apps = [app_factory(name='XXX',
status=mkt.STATUS_PENDING,
version_kw={'nomination': long_ago},
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='YYY',
status=mkt.STATUS_PENDING,
version_kw={'nomination': yesterday},
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='ZZZ')]
self.apps[0].update(created=self.days_ago(12))
self.apps[1].update(created=self.days_ago(11))
eq_(self.apps[0].latest_version.nomination, long_ago)
eq_(self.apps[1].latest_version.nomination, yesterday)
RereviewQueue.objects.create(addon=self.apps[2])
self.url = reverse('reviewers.apps.queue_pending')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestAppQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_queue_viewing_ping(self):
eq_(self.client.post(reverse('reviewers.queue_viewing')).status_code,
200)
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
apps = Webapp.objects.filter(
status=mkt.STATUS_PENDING).order_by('created')
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
]
check_links(expected, links, verify=False)
def test_action_buttons_pending(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Reject', 'reject'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_rejected(self):
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
@mock.patch('mkt.versions.models.Version.is_privileged', True)
def test_action_buttons_privileged_cantreview(self):
self.apps[0].update(is_packaged=True)
self.apps[0].latest_version.files.update(status=mkt.STATUS_PENDING)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
@mock.patch('mkt.versions.models.Version.is_privileged', True)
def test_action_buttons_privileged_canreview(self):
self.login_as_senior_reviewer()
self.apps[0].update(is_packaged=True)
self.apps[0].latest_version.files.update(status=mkt.STATUS_PENDING)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_devices(self):
AddonDeviceType.objects.create(addon=self.apps[0], device_type=1)
AddonDeviceType.objects.create(addon=self.apps[0], device_type=2)
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(5)')
eq_(tds('ul li:not(.unavailable)').length, 2)
def test_payments(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM)
self.apps[1].update(premium_type=mkt.ADDON_FREE_INAPP)
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(6)')
eq_(tds.eq(0).text(),
unicode(mkt.ADDON_PREMIUM_TYPES[mkt.ADDON_PREMIUM]))
eq_(tds.eq(1).text(),
unicode(mkt.ADDON_PREMIUM_TYPES[mkt.ADDON_FREE_INAPP]))
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
if self.uses_es():
self.refresh(doctypes=('webapp', 'homescreen'))
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (2)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[4].text, u'Homescreens (0)')
def test_homescreen_count(self):
Tag(tag_text='homescreen').save_tag(self.apps[1])
self.apps[1].save()
if self.uses_es():
WebappIndexer.unindex(self.apps[1].id)
self.refresh(('homescreen', 'webapp'))
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (1)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[4].text, u'Homescreens (1)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (2)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_([a.app.id for a in res.context['addons']], [self.apps[1].id])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (1)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (1)')
def test_incomplete_no_in_queue(self):
[app.update(status=mkt.STATUS_NULL) for app in self.apps]
if self.uses_es():
self.reindex(Webapp)
req = req_factory_factory(
self.url,
user=UserProfile.objects.get(email='editor@mozilla.com'))
doc = pq(queue_apps(req).content)
assert not doc('
def test_waiting_time(self):
res = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(res.content)('td time')]
expected_waiting_times = [isotime(app.latest_version.nomination)
for app in self.apps[0:2]]
self.assertSetEqual(expected_waiting_times, waiting_times)
class TestAppQueueES(mkt.site.tests.ESTestCase, TestAppQueue):
def setUp(self):
super(TestAppQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestRereviewQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
def setUp(self):
super(TestRereviewQueue, self).setUp()
self.apps = [app_factory(name='XXX'),
app_factory(name='YYY'),
app_factory(name='ZZZ')]
RereviewQueue.objects.create(addon=self.apps[0]).update(
created=self.days_ago(5))
RereviewQueue.objects.create(addon=self.apps[1]).update(
created=self.days_ago(3))
RereviewQueue.objects.create(addon=self.apps[2]).update(
created=self.days_ago(1))
self.apps[0].update(created=self.days_ago(15))
self.apps[1].update(created=self.days_ago(13))
self.apps[2].update(created=self.days_ago(11))
if self.uses_es():
self.refresh(doctypes=('homescreen', 'webapp'))
self.url = reverse('reviewers.apps.queue_rereview')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestRereviewQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('
apps = [rq.addon for rq in
RereviewQueue.objects.all().order_by('created')]
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
(unicode(apps[2].name), self.review_url(apps[2])),
]
check_links(expected, links, verify=False)
def test_waiting_time(self):
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [
isotime(app.rereviewqueue_set.all()[0].created)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
def test_action_buttons_public_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_public(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Reject', 'reject'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Approve', 'public'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (3)')
eq_(links[2].text, u'Updates (0)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (3)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
self.assertSetEqual([a.app.id for a in res.context['addons']],
[a.id for a in self.apps[1:]])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (2)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (1)')
def test_addon_deleted(self):
app = self.apps[0]
app.delete()
eq_(RereviewQueue.objects.filter(addon=app).exists(), False)
class TestRereviewQueueES(mkt.site.tests.ESTestCase, TestRereviewQueue):
def setUp(self):
super(TestRereviewQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestUpdateQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
# Prevent update_cached_manifests at setUp() since it gets called and tries
# to access files when we add versions.
@mock.patch('mkt.webapps.tasks.update_cached_manifests', False)
def setUp(self):
super(TestUpdateQueue, self).setUp()
post_request_task._start_queuing_tasks()
app1 = app_factory(is_packaged=True, name='XXX',
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
app2 = app_factory(is_packaged=True, name='YYY',
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
version_factory(addon=app1, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
version_factory(addon=app2, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
post_request_task._send_tasks_and_stop_queuing()
self.apps = list(Webapp.objects.order_by('id'))
self.url = reverse('reviewers.apps.queue_updates')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestUpdateQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_template_links(self):
self.apps[0].versions.latest().update(nomination=self.days_ago(2))
self.apps[1].versions.latest().update(nomination=self.days_ago(1))
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('
expected = [
(unicode(self.apps[0].name), self.review_url(self.apps[0])),
(unicode(self.apps[1].name), self.review_url(self.apps[1])),
]
check_links(expected, links, verify=False)
def test_action_buttons_public_senior_reviewer(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_PUBLIC)
self.login_as_senior_reviewer()
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_public(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_PUBLIC)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Reject', 'reject'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Approve', 'public'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (2)')
def test_homescreen(self):
Tag(tag_text='homescreen').save_tag(self.apps[1])
self.apps[1].save()
if self.uses_es():
WebappIndexer.unindex(self.apps[1].id)
self.refresh(doctypes=('homescreen', 'webapp'))
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (1)')
eq_(links[3].text, u'Reviewing (0)')
eq_(links[4].text, u'Homescreens (1)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (2)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_([a.app.id for a in res.context['addons']],
[app.id for app in self.apps[1:]])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (1)')
eq_(links[3].text, u'Escalations (1)')
def test_order(self):
self.apps[0].update(created=self.days_ago(10))
self.apps[1].update(created=self.days_ago(5))
self.apps[0].versions.latest().update(nomination=self.days_ago(1))
self.apps[1].versions.latest().update(nomination=self.days_ago(4))
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
apps = list(res.context['addons'])
eq_(apps[0].app.id, self.apps[1].id)
eq_(apps[1].app.id, self.apps[0].id)
def test_only_updates_in_queue(self):
# Add new packaged app, which should only show up in the pending queue.
app = app_factory(is_packaged=True, name='ZZZ',
status=mkt.STATUS_PENDING,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING})
self.apps.append(app)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
apps = [a.app for a in res.context['addons']]
assert app not in apps, (
'Unexpected: Found a new packaged app in the updates queue.')
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (2)')
def test_approved_update_in_queue(self):
app = app_factory(is_packaged=True, name='YYY',
status=mkt.STATUS_APPROVED,
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
self.apps.append(app)
File.objects.filter(version__addon=app).update(status=app.status)
version_factory(addon=app, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
assert app.id in [a.app.id for a in res.context['addons']]
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (3)')
def test_update_queue_with_empty_nomination(self):
app = app_factory(is_packaged=True, name='YYY',
status=mkt.STATUS_NULL,
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': None})
self.apps.append(app)
first_version = app.latest_version
version_factory(addon=app, version='1.1', created=self.days_ago(1),
nomination=None,
file_kw={'status': mkt.STATUS_PENDING})
# Now that we have a version with nomination=None, reset app status.
app.update(status=mkt.STATUS_APPROVED)
File.objects.filter(version=first_version).update(status=app.status)
# Safeguard: we /really/ want to test with nomination=None.
eq_(app.latest_version.reload().nomination, None)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
assert app.id in [a.app.id for a in res.context['addons']]
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (3)')
def test_deleted_version_not_in_queue(self):
app = self.apps[0]
# File is PENDING and delete current version.
old_ver = app.versions.order_by('id')[0]
old_ver.files.latest().update(status=mkt.STATUS_PENDING)
old_ver.delete()
# "Approve" the app.
app.versions.latest().files.latest().update(status=mkt.STATUS_PUBLIC)
eq_(app.reload().status, mkt.STATUS_PUBLIC)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
# Verify that our app has 2 versions.
eq_(Version.with_deleted.filter(addon=app).count(), 2)
# Verify the apps in the context are what we expect.
doc = pq(res.content)
eq_(doc('.tabnav li a')[2].text, u'Updates (1)')
apps = [a.app.id for a in res.context['addons']]
ok_(app.id not in apps)
ok_(self.apps[1].id in apps)
def test_waiting_time(self):
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [isotime(app.latest_version.nomination)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
class TestUpdateQueueES(mkt.site.tests.ESTestCase, TestUpdateQueue):
def setUp(self):
super(TestUpdateQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.refresh(doctypes=('homescreen', 'webapp'))
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestEscalationQueue(AppReviewerTest, AccessMixin, FlagsMixin,
SearchMixin, XSSMixin):
def setUp(self):
super(TestEscalationQueue, self).setUp()
self.apps = [app_factory(name='XXX'),
app_factory(name='YYY'),
app_factory(name='ZZZ')]
EscalationQueue.objects.create(addon=self.apps[0]).update(
created=self.days_ago(5))
EscalationQueue.objects.create(addon=self.apps[1]).update(
created=self.days_ago(3))
EscalationQueue.objects.create(addon=self.apps[2]).update(
created=self.days_ago(1))
self.apps[0].update(created=self.days_ago(15))
self.apps[1].update(created=self.days_ago(13))
self.apps[2].update(created=self.days_ago(11))
self.login_as_senior_reviewer()
self.url = reverse('reviewers.apps.queue_escalated')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestEscalationQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_flag_blocked(self):
# Blocklisted apps should only be in the update queue, so this flag
# check is here rather than in FlagsMixin.
self.apps[0].update(status=mkt.STATUS_BLOCKED)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('
flags = tds('div.sprite-reviewer-blocked')
eq_(flags.length, 1)
def test_no_access_regular_reviewer(self):
self.login_as_editor()
res = self.client.get(self.url)
eq_(res.status_code, 403)
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('
apps = [rq.addon for rq in
EscalationQueue.objects.all().order_by('addon__created')]
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
(unicode(apps[2].name), self.review_url(apps[2])),
]
check_links(expected, links, verify=False)
def test_waiting_time(self):
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [
isotime(app.escalationqueue_set.all()[0].created)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
def test_action_buttons_public(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Clear Escalation', 'clear_escalation'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Approve', 'public'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Clear Escalation', 'clear_escalation'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (3)')
def test_addon_deleted(self):
app = self.apps[0]
app.delete()
eq_(EscalationQueue.objects.filter(addon=app).exists(), False)
class TestEscalationQueueES(mkt.site.tests.ESTestCase, TestEscalationQueue):
def setUp(self):
super(TestEscalationQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
class TestReviewTransaction(AttachmentManagementMixin,
mkt.site.tests.MockEsMixin,
mkt.site.tests.MockBrowserIdMixin,
test.TransactionTestCase,
TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestReviewTransaction, self).setUp()
mkt.site.tests.TestCase.grant_permission(
user_factory(email='editor'), 'Apps:Review')
self.mock_browser_id()
def get_app(self):
return Webapp.objects.get(id=337141)
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
@mock.patch('lib.crypto.packaged.sign_app')
def test_public_sign(self, sign_mock, json_mock, update_cached_manifests):
self.app = self.get_app()
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
with private_storage.open(
self.version.files.all()[0].file_path, 'w') as f:
f.write('.')
public_storage.delete(self.version.files.all()[0].signed_file_path)
self.app.update(status=mkt.STATUS_PENDING, is_packaged=True,
_current_version=None, _signal=False)
eq_(self.get_app().status, mkt.STATUS_PENDING)
update_cached_manifests.reset_mock()
sign_mock.return_value = None # Didn't fail.
json_mock.return_value = {'name': 'Something'}
self.login('editor@mozilla.com')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
resp = self.client.post(
reverse('reviewers.apps.review', args=[self.app.app_slug]), data)
eq_(resp.status_code, 302)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(update_cached_manifests.delay.call_count, 1)
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
@mock.patch('lib.crypto.packaged.sign')
def test_public_sign_failure(self, sign_mock, json_mock,
update_cached_manifests):
self.app = self.get_app()
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING, is_packaged=True,
_current_version=None, _signal=False)
eq_(self.get_app().status, mkt.STATUS_PENDING)
sign_mock.side_effect = packaged.SigningError
json_mock.return_value = {'name': 'Something'}
self.login('editor@mozilla.com')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
resp = self.client.post(
reverse('reviewers.apps.review', args=[self.app.app_slug]), data)
eq_(resp.status_code, 302)
eq_(self.get_app().status, mkt.STATUS_PENDING)
eq_(update_cached_manifests.delay.call_count, 0)
class TestReviewMixin(object):
COMM_REPLY_RE = r'^commreply\+[a-f0-9]+\@marketplace\.firefox\.com$'
def post(self, data, queue='pending'):
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_%s' % queue))
def _check_email(self, msg, subject, to=None):
if to:
eq_(msg.to, to)
else:
eq_(msg.to, list(self.app.authors.values_list('email', flat=True)))
assert re.match(self.COMM_REPLY_RE, msg.extra_headers['Reply-To'])
eq_(msg.cc, [])
eq_(msg.from_email, settings.MKT_REVIEWERS_EMAIL)
if subject:
eq_(msg.subject, '%s: %s' % (subject, self.app.name))
def _get_mail(self, email):
return filter(lambda x: x.to[0].startswith(email), mail.outbox)[0]
def _check_email_dev_and_contact(self, subject, outbox_len=2):
eq_(len(mail.outbox), outbox_len)
self._check_email(self._get_mail('steamcube'), subject)
self._check_email(self._get_mail('contact'), subject,
to=[self.mozilla_contact])
def _check_thread(self):
thread = self.app.threads
eq_(thread.count(), 1)
thread = thread.get()
perms = ('developer', 'reviewer', 'staff')
for key in perms:
assert getattr(thread, 'read_permission_%s' % key)
def _check_email_body(self, msg=None):
if not msg:
msg = mail.outbox[0]
body = msg.message().as_string()
url = self.app.get_url_path()
assert url in body, 'Could not find apps detail URL in %s' % msg
def _check_log(self, action):
assert AppLog.objects.filter(
addon=self.app, activity_log__action=action.id).exists(), (
"Didn't find `%s` action in logs." % action.short)
def _check_score(self, reviewed_type):
scores = ReviewerScore.objects.all()
assert len(scores) > 0
eq_(scores[0].score, mkt.REVIEWED_SCORES[reviewed_type])
eq_(scores[0].note_key, reviewed_type)
class TestReviewApp(SetupFilesMixin, AppReviewerTest, TestReviewMixin,
AccessMixin, AttachmentManagementMixin,
TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestReviewApp, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
make_rated(self.app)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact)
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
self.file = self.version.all_files[0]
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.setup_files()
def get_app(self):
return Webapp.objects.get(id=337141)
def test_review_viewing_ping(self):
eq_(self.client.post(reverse('reviewers.review_viewing')).status_code,
200)
@mock.patch('mkt.webapps.models.Webapp.in_rereview_queue')
def test_rereview(self, is_rereview_queue):
is_rereview_queue.return_value = True
content = pq(self.client.get(self.url).content)
assert content('
@mock.patch('mkt.webapps.models.Webapp.in_escalation_queue')
def test_escalated(self, in_escalation_queue):
in_escalation_queue.return_value = True
content = pq(self.client.get(self.url).content)
assert content('
def test_cannot_review_my_app(self):
with self.settings(ALLOW_SELF_REVIEWS=False):
self.app.addonuser_set.create(
user=UserProfile.objects.get(email='editor@mozilla.com'))
res = self.client.head(self.url)
self.assert3xx(res, reverse('reviewers.home'))
res = self.client.post(self.url)
self.assert3xx(res, reverse('reviewers.home'))
def test_cannot_review_blocklisted_app(self):
self.app.update(status=mkt.STATUS_BLOCKED)
res = self.client.get(self.url)
self.assert3xx(res, reverse('reviewers.home'))
res = self.client.post(self.url)
self.assert3xx(res, reverse('reviewers.home'))
def test_review_no_latest_version(self):
self.app.versions.all().delete()
self.app.reload()
eq_(self.app.latest_version, None)
eq_(self.app.current_version, None)
response = self.client.get(self.url)
eq_(response.status_code, 200)
doc = pq(response.content)
assert not doc('input[name=action][value=info]').length
assert not doc('input[name=action][value=comment]').length
assert not doc('input[name=action][value=public]').length
assert not doc('input[name=action][value=reject]').length
# Also try with a packaged app.
self.app.update(is_packaged=True)
response = self.client.get(self.url)
eq_(response.status_code, 200)
def test_sr_can_review_blocklisted_app(self):
self.app.update(status=mkt.STATUS_BLOCKED)
self.login_as_senior_reviewer()
eq_(self.client.get(self.url).status_code, 200)
data = {'action': 'public', 'comments': 'yo'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_pending'))
def test_pending_to_reject_w_device_overrides(self):
# This shouldn't be possible unless there's form hacking.
AddonDeviceType.objects.create(addon=self.app,
device_type=mkt.DEVICE_DESKTOP.id)
AddonDeviceType.objects.create(addon=self.app,
device_type=mkt.DEVICE_TABLET.id)
eq_(self.app.publish_type, mkt.PUBLISH_IMMEDIATE)
data = {'action': 'reject', 'comments': 'something',
'device_override': [mkt.DEVICE_DESKTOP.id]}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_REJECTED)
eq_(set([o.id for o in app.device_types]),
set([mkt.DEVICE_DESKTOP.id, mkt.DEVICE_TABLET.id]))
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
def test_pending_to_public_w_requirements_overrides(self):
data = {'action': 'public', 'comments': 'something',
'has_packaged_apps': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert not self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert app.latest_version.features.has_packaged_apps
# Since features have been changed by the reviewer, the app should not
# be immediately published.
eq_(app.publish_type, mkt.PUBLISH_PRIVATE)
eq_(app.status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.REVIEW_FEATURES_OVERRIDE)
# A reviewer changing features shouldn't generate a re-review.
eq_(RereviewQueue.objects.count(), 0)
def test_pending_to_public_w_requirements_removed(self):
self.app.latest_version.features.update(has_packaged_apps=True)
data = {'action': 'public', 'comments': 'something',
'has_packaged_apps': False}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert not app.latest_version.features.has_packaged_apps
eq_(app.publish_type, mkt.PUBLISH_PRIVATE)
eq_(app.status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.REVIEW_FEATURES_OVERRIDE)
eq_(RereviewQueue.objects.count(), 0)
def test_pending_to_reject_w_requirements_overrides(self):
# Rejecting an app doesn't let you override features requirements.
data = {'action': 'reject', 'comments': 'something',
'has_packaged_apps': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert not self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert not app.latest_version.features.has_packaged_apps
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_REJECTED)
def test_pending_to_public_w_requirements_overrides_nothing_changed(self):
self.version.features.update(has_packaged_apps=True)
data = {'action': 'public', 'comments': 'something',
'has_packaged_apps': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert app.latest_version.features.has_packaged_apps
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_PUBLIC)
action_id = mkt.LOG.REVIEW_FEATURES_OVERRIDE.id
assert not AppLog.objects.filter(
addon=self.app, activity_log__action=action_id).exists()
@mock.patch('mkt.reviewers.views.messages.success', new=mock.Mock)
def test_incomplete_cant_approve(self):
self.app.update(status=mkt.STATUS_NULL)
self.app.latest_version.files.update(status=mkt.STATUS_NULL)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().status, mkt.STATUS_NULL)
def test_notification_email_translation(self):
raise SkipTest
original_name = unicode(self.app.name)
fr_translation = u'Mais allô quoi!'
es_translation = u'¿Dónde está la biblioteca?'
self.app.name = {
'fr': fr_translation,
'es': es_translation,
}
self.app.default_locale = 'fr'
self.app.save()
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.client.post(self.url, data, HTTP_ACCEPT_LANGUAGE='es')
eq_(translation.get_language(), 'es')
eq_(len(mail.outbox), 2)
msg = mail.outbox[0]
assert original_name not in msg.subject
assert es_translation not in msg.subject
assert fr_translation in msg.subject
assert original_name not in msg.body
assert es_translation not in msg.body
assert fr_translation in msg.body
@mock.patch('lib.crypto.packaged.sign')
def test_require_sig_for_public(self, sign):
sign.side_effect = packaged.SigningError
self.get_app().update(is_packaged=True)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.client.post(self.url, data)
eq_(self.get_app().status, mkt.STATUS_PENDING)
def _test_pending_to_public(self):
self.app.update(mozilla_contact='')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version.files.all()[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], ('Approved'))
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
def test_pending_to_public(self):
self._test_pending_to_public()
@mock.patch('mkt.reviewers.views.messages.success')
def test_pending_to_escalation(self, messages):
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(EscalationQueue.objects.count(), 1)
self._check_log(mkt.LOG.ESCALATE_MANUAL)
eq_(len(mail.outbox), 2)
self._check_email(self._get_mail('steamcube'), 'Escalated')
self._check_email(
self._get_mail('snrreviewer'), 'Escalated',
to=[self.snr_reviewer_user.email])
eq_(messages.call_args_list[0][0][1], 'Review successfully processed.')
def test_pending_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_DISABLED)
eq_(app.latest_version.files.all()[0].status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
self._check_email_dev_and_contact('Banned')
def test_pending_to_disable(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data)
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(len(mail.outbox), 0)
def _test_escalation_to_public(self):
EscalationQueue.objects.create(addon=self.app)
eq_(self.app.status, mkt.STATUS_PENDING)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version.files.all()[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
def test_escalation_to_public(self):
self._test_escalation_to_public()
def test_escalation_to_reject(self):
EscalationQueue.objects.create(addon=self.app)
eq_(self.app.status, mkt.STATUS_PENDING)
files = list(self.version.files.values_list('id', flat=True))
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(File.objects.filter(id__in=files)[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
def test_escalation_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_DISABLED)
eq_(app.latest_version.files.all()[0].status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Banned')
def test_escalation_to_disable(self):
EscalationQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data, queue='escalated')
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(EscalationQueue.objects.count(), 1)
eq_(len(mail.outbox), 0)
def test_clear_escalation(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
EscalationQueue.objects.create(addon=self.app)
data = {'action': 'clear_escalation', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
eq_(EscalationQueue.objects.count(), 0)
self._check_log(mkt.LOG.ESCALATION_CLEARED)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
def test_rereview_to_reject(self):
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(self.get_app().status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
eq_(RereviewQueue.objects.count(), 0)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_rereview_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(self.get_app().status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
eq_(RereviewQueue.objects.filter(addon=self.app).count(), 0)
self._check_email_dev_and_contact('Banned')
def test_rereview_to_disable(self):
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data, queue='rereview')
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(RereviewQueue.objects.filter(addon=self.app).count(), 1)
eq_(len(mail.outbox), 0)
def test_manual_rereview(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'manual_rereview', 'comments': 'man dem'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
# The app status shouldn't change.
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(RereviewQueue.objects.count(), 1)
self._check_log(mkt.LOG.REREVIEW_MANUAL)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
def test_clear_rereview(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'clear_rereview', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(RereviewQueue.objects.count(), 0)
self._check_log(mkt.LOG.REREVIEW_CLEARED)
# Ensure we don't send emails to the developer on clearing re-reviews.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_clear_rereview_unlisted(self):
self.app.update(status=mkt.STATUS_UNLISTED)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'clear_rereview', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(RereviewQueue.objects.count(), 0)
self._check_log(mkt.LOG.REREVIEW_CLEARED)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_rereview_to_escalation(self):
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(EscalationQueue.objects.count(), 1)
self._check_log(mkt.LOG.ESCALATE_MANUAL)
# Test 2 emails: 1 to dev, 1 to admin.
eq_(len(mail.outbox), 2)
self._check_email(self._get_mail('steamcube'), 'Escalated')
self._check_email(
self._get_mail('snrreviewer'), 'Escalated',
to=[self.snr_reviewer_user.email])
def test_more_information(self):
# Test the same for all queues.
data = {'action': 'info', 'comments': 'Knead moor in faux'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().status, mkt.STATUS_PENDING)
self._check_log(mkt.LOG.REQUEST_INFORMATION)
vqs = self.get_app().versions.all()
eq_(vqs.count(), 1)
eq_(vqs.filter(has_info_request=True).count(), 1)
self._check_email_dev_and_contact('Reviewer comment')
def test_multi_cc_email(self):
# Test multiple mozilla_contact emails via more information.
contacts = [user_factory(email=u'á').email,
user_factory(email=u'ç').email]
self.mozilla_contact = ', '.join(contacts)
self.app.update(mozilla_contact=self.mozilla_contact)
data = {'action': 'info', 'comments': 'Knead moor in faux'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(len(mail.outbox), 3)
subject = 'Reviewer comment'
self._check_email(self._get_mail('steamcube'), subject)
self._check_email(self._get_mail(contacts[0]), subject,
to=[contacts[0]])
self._check_email(self._get_mail(contacts[1]), subject,
to=[contacts[1]])
def test_comment(self):
# Test the same for all queues.
data = {'action': 'comment', 'comments': 'mmm, nice app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_log(mkt.LOG.COMMENT_VERSION)
def test_receipt_no_node(self):
res = self.client.get(self.url)
eq_(len(pq(res.content)('
def test_receipt_has_node(self):
self.get_app().update(premium_type=mkt.ADDON_PREMIUM)
res = self.client.get(self.url)
eq_(len(pq(res.content)('.reviewers-desktop
1)
eq_(len(pq(res.content)('.reviewers-mobile
1)
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json(self, mock_get):
m = mock.Mock()
m.content = 'the manifest contents <script>'
m.headers = CaseInsensitiveDict(
{'content-type': 'application/x-web-app-manifest+json <script>'})
mock_get.return_value = m
expected = {
'content': 'the manifest contents <script>',
'headers': {'content-type':
'application/x-web-app-manifest+json <script>'},
'success': True,
'permissions': {}
}
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), expected)
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_unicode(self, mock_get):
m = mock.Mock()
m.content = u'كك some foreign ish'
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), {'content': u'كك some foreign ish',
'headers': {}, 'success': True,
'permissions': {}})
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_encoding(self, mock_get):
m = mock.Mock()
m.content = open(self.manifest_path('non-utf8.webapp')).read()
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
data = json.loads(r.content)
assert u'&t):
m = mock.Mock()
m.content = ''
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), {'content': u'', 'headers': {},
'success': True, 'permissions': {}})
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_traceback_in_response(self, mock_get):
m = mock.Mock()
m.content = {'name': 'Some name'}
m.headers = CaseInsensitiveDict({})
mock_get.side_effect = requests.exceptions.SSLError
mock_get.return_value = m
# We should not 500 on a traceback.
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
data = json.loads(r.content)
assert data['content'], 'There should be a content with the traceback'
eq_(data['headers'], {})
@mock.patch('mkt.reviewers.views.json.dumps')
def test_manifest_json_packaged(self, mock_):
# Test that when the app is packaged, _mini_manifest is called.
mock_.return_value = '{}'
self.get_app().update(is_packaged=True)
res = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(res.status_code, 200)
assert mock_.called
@mock.patch('mkt.reviewers.views._get_manifest_json')
def test_manifest_json_perms(self, mock_):
mock_.return_value = {
'permissions': {
"foo": {"description": "foo"},
"camera": {"description": "<script>"}
}
}
self.get_app().update(is_packaged=True)
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content)['permissions'],
{'foo': {'description': 'foo', 'type': 'web'},
'camera': {'description': '<script>', 'type': 'priv'}})
def test_abuse(self):
AbuseReport.objects.create(addon=self.app, message='!@
res = self.client.get(self.url)
doc = pq(res.content)
dd = doc('.reviewers-desktop
eq_(dd.text(), u'1')
eq_(dd.find('a').attr('href'), reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug]))
dd = doc('.reviewers-mobile
eq_(dd.text(), u'1')
eq_(dd.find('a').attr('href'), reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug]))
def _attachment_form_data(self, num=1, action='comment'):
data = {'action': action,
'comments': 'mmm, nice app'}
data.update(self._attachment_management_form(num=num))
data.update(self._attachments(num))
return data
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
@mock.patch('mkt.site.storage_utils.LocalFileStorage.save')
def test_no_attachments(self, save_mock):
data = self._attachment_form_data(num=0, action='public')
data.update(self._testedon_management_form())
self.post(data)
eq_(save_mock.called, False, save_mock.call_args_list)
def test_idn_app_domain(self):
response = self.client.get(self.url)
assert 'IDN domain!' not in response.content
self.get_app().update(app_domain=u'http://www.allïzom.org')
response = self.client.get(self.url)
assert 'IDN domain!' in response.content
def test_xss_domain(self):
# It shouldn't be possible to have this in app domain, it will never
self.get_app().update(app_domain=u'<script>alert(42)</script>')
response = self.client.get(self.url)
assert '<script>alert(42)</script>' not in response.content
assert '<script>alert(42)</script>' in response.content
def test_priority_flag_cleared_for_public(self):
self.get_app().update(priority_review=True)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().priority_review, False)
def test_priority_flag_uncleared_for_reject(self):
self.get_app().update(priority_review=True)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().priority_review, True)
def test_is_showcase_checkbox(self):
res = self.client.get(self.url)
eq_(pq(res.content)('#id_is_showcase:checked').length, 0)
app = self.get_app()
Tag(tag_text=SHOWCASE_TAG).save_tag(app)
res = self.client.get(self.url)
eq_(pq(res.content)('#id_is_showcase:checked').length, 1)
def test_is_showcase_on(self):
data = {'action': 'comment', 'comments': 'blah', 'is_showcase': 'on'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
tags = self.get_app().tags.values_list('tag_text', flat=True)
assert SHOWCASE_TAG in tags
msg = self._get_mail('appcurationboard')
eq_(msg.to, [settings.APP_CURATION_BOARD_EMAIL])
eq_(msg.subject,
u'App [%s] nominated to be featured' % self.get_app().name)
def test_is_showcase_off(self):
self.app.update(mozilla_contact='')
# Note: Using action=comment b/c it does less and keeps test faster.
# Note: `is_showcase` isn't passed b/c checkboxes.
data = {'action': 'comment', 'comments': 'blah'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
tags = self.get_app().tags.values_list('tag_text', flat=True)
assert SHOWCASE_TAG not in tags
eq_(len(mail.outbox), 0)
def test_versions_history_pagination(self):
self.app.update(is_packaged=True)
version_factory(addon=self.app, version='2.0')
version_factory(addon=self.app, version='3.0')
with mock.patch('mkt.reviewers.views.paginate',
lambda req, objs, limit: paginate(req, objs, 2)):
content = pq(self.client.get(self.url).content)
eq_(len(content('#review-files tr.listing-body')), 2)
eq_(len(content('#review-files-paginate a[rel=next]')), 1)
eq_(len(content('#review-files-paginate a[rel=prev]')), 0)
link = content('#review-files-paginate a[rel=next]')[0].attrib['href']
eq_(link, '%s?page=2#history' % self.url)
with mock.patch('mkt.reviewers.views.paginate',
lambda req, objs, limit: paginate(req, objs, 2)):
content = pq(self.client.get(link).content)
eq_(len(content('#review-files tr.listing-body')), 1)
eq_(len(content('#review-files-paginate a[rel=next]')), 0)
eq_(len(content('#review-files-paginate a[rel=prev]')), 1)
eq_(content('#review-files-paginate a[rel=prev]')[0].attrib['href'],
'%s?page=1#history' % self.url)
class TestCannedResponses(AppReviewerTest):
def setUp(self):
super(TestCannedResponses, self).setUp()
self.login_as_editor()
self.app = app_factory(name='XXX', status=mkt.STATUS_PENDING)
self.cr = CannedResponse.objects.create(
name=u'app reason', response=u'app reason body',
sort_group=u'public')
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def test_ok(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
form = r.context['form']
choices = form.fields['canned_response'].choices[1][1]
# So above, choices[1][1] gets the first real group's list of
eq_(len(choices), 1)
assert self.cr.response in choices[0]
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApproveHostedApp(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin, TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApproveHostedApp, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.file.update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact,
_current_version=None)
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Web App Review" successfully processed (+60 points, 60 total).')
def test_pending_to_public(self, update_name, update_locales,
update_cached_manifests,
index_webapps, messages):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_message(messages)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
def test_pending_to_hidden(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages):
self.get_app().update(publish_type=mkt.PUBLISH_HIDDEN)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
def test_pending_to_approved(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages):
self.get_app().update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_message(messages)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
def test_pending_to_reject(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages):
index_webapps.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'suxor'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(index_webapps.delay.call_count, 1)
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(self.file.reload().status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_message(messages)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
eq_(update_name.call_count, 0) # Not a packaged app.
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
@mock.patch('lib.crypto.packaged.sign')
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApprovePackagedApp(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin,
TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApprovePackagedApp, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.file.update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact,
_current_version=None, is_packaged=True)
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Packaged App Review" successfully processed '
'(+60 points, 60 total).')
def test_pending_to_public(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_hidden(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
self.get_app().update(publish_type=mkt.PUBLISH_HIDDEN)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_approved(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
self.get_app().update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_rejected(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(self.file.reload().status, mkt.STATUS_REJECTED)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_pending_to_approved_app_private_prior_version_rejected(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_REJECTED,
publish_type=mkt.PUBLISH_PRIVATE)
self.file.update(status=mkt.STATUS_DISABLED)
self.new_version = version_factory(
addon=self.app, version='1.1',
file_kw={'status': mkt.STATUS_PENDING})
index_webapps.delay.reset_mock()
update_cached_manifests.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(self.app.current_version, None)
eq_(self.app.latest_version, self.new_version)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(app.latest_version, self.new_version)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
@mock.patch('lib.crypto.packaged.sign')
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApprovePackagedVersions(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin,
TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApprovePackagedVersions, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.app.update(status=mkt.STATUS_PUBLIC,
mozilla_contact=self.mozilla_contact,
is_packaged=True)
self.new_version = version_factory(
addon=self.app, version='2.0',
file_kw={'status': mkt.STATUS_PENDING})
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Updated Packaged App Review" successfully processed '
'(+40 points, 40 total).')
def test_version_pending_to_public(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved(self, update_name, update_locales,
update_cached_manifests,
index_webapps, messages,
sign_mock):
self.app.update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_public_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED,
publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_public_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED,
publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_rejected_app_public(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_PUBLIC)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_version_pending_to_rejected_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_version_pending_to_rejected_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
class TestReviewLog(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewLog, self).setUp()
# Note: if `created` is not specified, `app_factory` uses a randomly
# generated timestamp.
self.apps = [app_factory(name='XXX', created=days_ago(3),
status=mkt.STATUS_PENDING),
app_factory(name='YYY', created=days_ago(2),
status=mkt.STATUS_PENDING)]
self.url = reverse('reviewers.apps.logs')
patcher = mock.patch.object(settings, 'TASK_USER_ID',
self.admin_user.id)
patcher.start()
self.addCleanup(patcher.stop)
def get_user(self):
return self.reviewer_user
def make_approvals(self):
d = 1
for app in self.apps:
days_ago = self.days_ago(d)
mkt.log(mkt.LOG.REJECT_VERSION, app, app.latest_version,
user=self.get_user(), details={'comments': 'youwin'},
created=days_ago)
# Throw in a few tasks logs that shouldn't get queried.
mkt.log(mkt.LOG.REREVIEW_MANIFEST_CHANGE, app, app.latest_version,
user=self.admin_user, details={'comments': 'foo'},
created=days_ago)
d += 1
def make_an_approval(self, action, comment='youwin', user=None, app=None):
if not user:
user = self.get_user()
if not app:
app = self.apps[0]
mkt.log(action, app, app.latest_version, user=user,
details={'comments': comment})
def test_basic(self):
self.make_approvals()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
assert doc('#log-filter button'), 'No filters.'
rows = doc('tbody tr')
logs = rows.filter(':not(.hide)')
eq_(logs.length, 2)
eq_(logs.find('.name .app-link').eq(0).attr('href'),
self.apps[0].get_url_path())
eq_(logs.find('.name .app-link').eq(1).attr('href'),
self.apps[1].get_url_path())
eq_(rows.filter('.hide').eq(0).text(), 'youwin')
def test_search_app_soft_deleted(self):
self.make_approvals()
self.apps[0].update(status=mkt.STATUS_DELETED)
res = self.client.get(self.url)
eq_(res.status_code, 200)
doc = pq(res.content)
all_reviews = [d.attrib.get('data-addonid')
for d in doc('#log-listing tbody tr')]
assert str(self.apps[0].pk) in all_reviews, (
'Soft deleted review did not show up in listing')
def test_xss(self):
a = self.apps[0]
a.name = '<script>alert("xss")</script>'
a.save()
mkt.log(mkt.LOG.REJECT_VERSION, a, a.latest_version,
user=self.get_user(), details={'comments': 'xss!'})
r = self.client.get(self.url)
eq_(r.status_code, 200)
inner_html = pq(r.content)('#log-listing tbody td').eq(1).html()
assert '<script>' in inner_html
assert '<script>' not in inner_html
def test_end_filter(self):
self.make_approvals()
date = time.strftime('%Y-%m-%d')
r = self.client.get(self.url, dict(end=date))
eq_(r.status_code, 200)
doc = pq(r.content)('#log-listing tbody')
eq_(doc('tr:not(.hide)').length, 2)
eq_(doc('tr.hide').eq(0).text(), 'youwin')
def test_end_filter_wrong(self):
self.make_approvals()
r = self.client.get(self.url, dict(end='wrong!'))
eq_(r.status_code, 200)
eq_(pq(r.content)('
def test_search_comment_exists(self):
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, comment='hello')
r = self.client.get(self.url, dict(search='hello'))
eq_(r.status_code, 200)
eq_(pq(r.content)('
def test_search_comment_doesnt_exist(self):
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, comment='hello')
r = self.client.get(self.url, dict(search='bye'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
def test_search_author_exists(self):
self.make_approvals()
user = UserProfile.objects.get(email='regular@mozilla.com')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, user=user, comment='hi')
r = self.client.get(self.url, dict(search='regular'))
eq_(r.status_code, 200)
rows = pq(r.content)('
eq_(rows.filter(':not(.hide)').length, 1)
eq_(rows.filter('.hide').eq(0).text(), 'hi')
def test_search_author_doesnt_exist(self):
self.make_approvals()
user = UserProfile.objects.get(email='editor@mozilla.com')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, user=user)
r = self.client.get(self.url, dict(search='wrong'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
def test_search_addon_exists(self):
self.make_approvals()
app = self.apps[0]
r = self.client.get(self.url, dict(search=app.name))
eq_(r.status_code, 200)
tr = pq(r.content)('
eq_(tr.length, 1)
eq_(tr.siblings('.comments').text(), 'youwin')
def test_search_addon_by_slug_exists(self):
app = self.apps[0]
app.app_slug = 'a-fox-was-sly'
app.save()
self.make_approvals()
r = self.client.get(self.url, dict(search='fox'))
eq_(r.status_code, 200)
tr = pq(r.content)('
eq_(tr.length, 1)
eq_(tr.siblings('.comments').text(), 'youwin')
def test_search_addon_doesnt_exist(self):
self.make_approvals()
r = self.client.get(self.url, dict(search='zzz'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
@mock.patch('mkt.developers.models.ActivityLog.arguments', new=mock.Mock)
def test_addon_missing(self):
self.make_approvals()
r = self.client.get(self.url)
eq_(pq(r.content)('
'App has been deleted.')
def test_request_info_logs(self):
self.make_an_approval(mkt.LOG.REQUEST_INFORMATION)
r = self.client.get(self.url)
eq_(pq(r.content)('
'More information requested')
def test_escalate_logs(self):
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL)
r = self.client.get(self.url)
eq_(pq(r.content)('
'Reviewer escalation')
def test_no_double_encode(self):
version = self.apps[0].latest_version
version.update(version='<foo>')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL)
r = self.client.get(self.url)
assert '<foo>' in pq(r.content)('
'Double-encoded string was found in reviewer log.')
class TestMotd(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestMotd, self).setUp()
self.url = reverse('reviewers.apps.motd')
self.key = u'mkt_reviewers_motd'
set_config(self.key, u'original value')
def test_perms_not_editor(self):
self.client.logout()
req = self.client.get(self.url, follow=True)
self.assert3xx(req, '%s?to=%s' % (reverse('users.login'), self.url))
self.client.login('regular@mozilla.com')
eq_(self.client.get(self.url).status_code, 403)
def test_perms_not_motd(self):
# Any type of reviewer can see the MOTD.
self.login_as_editor()
req = self.client.get(self.url)
eq_(req.status_code, 200)
eq_(req.context['form'], None)
# No redirect means it didn't save.
eq_(self.client.post(self.url, dict(motd='motd')).status_code, 200)
eq_(get_config(self.key), u'original value')
def test_motd_change(self):
user = self.reviewer_user
self.grant_permission(user, 'AppReviewerMOTD:Edit')
self.login_as_editor()
req = self.client.get(self.url)
eq_(req.status_code, 200)
eq_(req.context['form'].initial['motd'], u'original value')
req = self.client.post(self.url, dict(motd=''))
eq_(req.status_code, 200)
eq_(pq(req.content)('
'This field is required.')
# A real post now.
req = self.client.post(self.url, dict(motd='new motd'))
self.assert3xx(req, self.url)
eq_(get_config(self.key), u'new motd')
class TestReviewAppComm(AppReviewerTest, AttachmentManagementMixin,
TestReviewMixin, TestedonManagementMixin):
def setUp(self):
super(TestReviewAppComm, self).setUp()
self.app = app_factory(rated=True, status=mkt.STATUS_PENDING,
mozilla_contact='contact@mozilla.com')
self.app.addonuser_set.create(user=user_factory(email='steamcube'))
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.mozilla_contact = 'contact@mozilla.com'
def _post(self, data, queue='pending'):
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_%s' % queue))
def _get_note(self):
eq_(self.app.threads.count(), 1)
thread = self.app.threads.all()[0]
eq_(thread.notes.count(), 1)
return thread.notes.all()[0]
def test_email_cc(self):
poster = user_factory()
thread, note = create_comm_note(
self.app, self.app.latest_version, poster, 'lgtm')
data = {'action': 'public', 'comments': 'gud jerb'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test emails.
self._check_email_dev_and_contact(None, outbox_len=5)
# Some person who joined the thread.
self._check_email(
self._get_mail(poster.email), 'Approved', to=[poster.email])
def test_approve(self):
data = {'action': 'public', 'comments': 'gud jerb'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.APPROVAL)
eq_(note.body, 'gud jerb')
# Test emails.
self._check_email_dev_and_contact(None)
def test_reject(self):
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, 'rubesh')
# Test emails.
self._check_email_dev_and_contact(None)
def test_info(self):
data = {'action': 'info', 'comments': 'huh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.MORE_INFO_REQUIRED)
eq_(note.body, 'huh')
# Test emails.
self._check_email_dev_and_contact(None)
def test_escalate(self):
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.ESCALATION)
eq_(note.body, 'soup her man')
# Test emails.
eq_(len(mail.outbox), 2)
self._check_email( # Senior reviewer.
self._get_mail(self.snr_reviewer_user.email), 'Escalated',
to=[self.snr_reviewer_user.email])
self._check_email(self._get_mail('steamcube'), 'Escalated')
def test_comment(self):
data = {'action': 'comment', 'comments': 'huh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REVIEWER_COMMENT)
eq_(note.body, 'huh')
# Test emails.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], 'Private reviewer comment',
to=[self.mozilla_contact])
def test_disable(self):
self.login_as_admin()
data = {'action': 'disable', 'comments': 'u dun it'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.DISABLED)
eq_(note.body, 'u dun it')
# Test emails.
self._check_email_dev_and_contact(None)
def test_attachments(self):
data = {'action': 'comment', 'comments': 'huh'}
data.update(self._attachment_management_form(num=2))
data.update(self._attachments(num=2))
data.update(self._testedon_management_form())
self._post(data)
# Test attachments.
note = self._get_note()
eq_(note.attachments.count(), 2)
def test_tested_on_one(self):
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form(num=1))
data.update(self._platforms(1))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, u'rubesh\n\n'
u'Tested on \xd0esktop platform on PC with version 34')
def test_tested_on_two(self):
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form(num=2))
data.update(self._platforms(2))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, u'rubesh\n\n'
u'Tested on \xd0esktop platform on PC with version 34; '
u'FirefoxOS platform on ZT\xc8 Open with version 1.3<')
class TestModeratedQueue(mkt.site.tests.TestCase, AccessMixin):
def setUp(self):
super(TestModeratedQueue, self).setUp()
self.app = app_factory()
self.moderator_user = user_factory(email='moderator')
self.grant_permission(self.moderator_user, 'Apps:ModerateReview')
user_factory(email='regular')
user1 = user_factory()
user2 = user_factory()
self.url = reverse('reviewers.apps.queue_moderated')
self.review1 = Review.objects.create(addon=self.app, body='body',
user=user1, rating=3,
editorreview=True)
ReviewFlag.objects.create(review=self.review1, flag=ReviewFlag.SPAM,
user=user1)
self.review2 = Review.objects.create(addon=self.app, body='body',
user=user2, rating=4,
editorreview=True)
ReviewFlag.objects.create(review=self.review2, flag=ReviewFlag.SUPPORT,
user=user2)
self.login(self.moderator_user)
def _post(self, action):
ctx = self.client.get(self.url).context
data_formset = formset(initial(ctx['reviews_formset'].forms[0]))
data_formset['form-0-action'] = action
res = self.client.post(self.url, data_formset)
self.assert3xx(res, self.url)
def _get_logs(self, action):
return ActivityLog.objects.filter(action=action.id)
def test_anonymous_flagger(self):
ReviewFlag.objects.all()[0].update(user=None)
ReviewFlag.objects.all()[1].delete()
res = self.client.get(self.url)
txt = pq(res.content)('.reviews-flagged-reasons li div span').text()
teststring = u'Flagged by an anonymous user on'
ok_(txt.startswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_setup(self):
eq_(Review.objects.filter(editorreview=True).count(), 2)
eq_(ReviewFlag.objects.filter(flag=ReviewFlag.SPAM).count(), 1)
res = self.client.get(self.url)
doc = pq(res.content)('#reviews-flagged')
eq_(doc('.reviewers-desktop #id_form-0-action_1:checked').length, 1)
def test_skip(self):
self._post(mkt.ratings.REVIEW_MODERATE_SKIP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 2)
def test_delete(self):
self._post(mkt.ratings.REVIEW_MODERATE_DELETE)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
eq_(self._get_logs(mkt.LOG.DELETE_REVIEW).count(), 1)
def test_keep(self):
self._post(mkt.ratings.REVIEW_MODERATE_KEEP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
eq_(self._get_logs(mkt.LOG.APPROVE_REVIEW).count(), 1)
def test_no_reviews(self):
Review.objects.all().delete()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(pq(res.content)('#reviews-flagged .no-results').length, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a')[0].text, u'Moderated Reviews (2)')
def test_queue_count_reviewer_and_moderator(self):
self.grant_permission(self.moderator_user, 'Apps:Review')
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Reviewing (0)')
eq_(links[4].text, u'Homescreens (0)')
eq_(links[5].text, u'Moderated Reviews (2)')
def test_deleted_app(self):
self.app.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_queue_count_deleted_app(self):
self.app.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a')[0].text, u'Moderated Reviews (0)')
class AbuseQueueMixin(object):
def _setUp(self):
self.abuseviewer_user = user_factory(email='abuser')
self.grant_permission(self.abuseviewer_user, self.perm)
self.login(self.abuseviewer_user)
user_factory(email='regular')
self.url = reverse(self.view_name)
def _post(self, action, form_index=0):
ctx = self.client.get(self.url).context
data_formset = formset(initial(ctx['abuse_formset'].forms[0]))
data_formset['form-%s-action' % (form_index)] = action
res = self.client.post(self.url, data_formset)
self.assert3xx(res, self.url)
def _get_logs(self, action):
return ActivityLog.objects.filter(action=action.id)
def test_anonymous_flagger(self):
AbuseReport.objects.all()[0].update(reporter=None)
res = self.client.get(self.url)
txt = pq(res.content)('.abuse-reports-reports li div span').text()
teststring = u'Submitted by an anonymous user on'
ok_(txt.startswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_no_reviews(self):
AbuseReport.objects.all().delete()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(pq(res.content)('
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
txt = pq(r.content)('.tabnav li a')[0].text
teststring = u'Abuse Reports (2)'
ok_(txt.endswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_skip(self):
self._post(mkt.abuse.forms.ABUSE_REPORT_SKIP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 2)
def test_first_read(self):
# Mark read the first xxx's reports, which leaves one.
self._post(mkt.abuse.forms.ABUSE_REPORT_READ)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
eq_(self._get_logs(self.log_const).count(), 2)
eq_(AbuseReport.objects.filter(read=False).count(), 1)
def test_first_flag(self):
self._post(mkt.abuse.forms.ABUSE_REPORT_FLAG)
res = self.client.get(self.url)
# Check one is left.
eq_(len(res.context['page'].object_list), 1)
# Check the object is flagged.
eq_(RereviewQueue.objects.count(), 1)
# As flagging marks read too, there should be 2 log entries.
eq_(self._get_logs(self.log_const).count(), 2)
# Check the remaining abuse report remains unread.
eq_(AbuseReport.objects.filter(read=False).count(), 1)
def test_xss(self):
xss = '<script>alert("xss")</script>'
AbuseReport.objects.all()[0].update(message=xss)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tbody = pq(res.content)(
'
assert '<script>' in tbody
assert '<script>' not in tbody
def test_deleted_website(self):
AbuseReport.objects.all()[0].object.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
txt = pq(r.content)('.tabnav li a')[0].text
teststring = u'Abuse Reports (1)'
ok_(txt.endswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
class TestAppAbuseQueue(mkt.site.tests.TestCase, AccessMixin,
AbuseQueueMixin):
perm = 'Apps:ReadAbuse'
view_name = 'reviewers.apps.queue_abuse'
log_const = mkt.LOG.APP_ABUSE_MARKREAD
def setUp(self):
super(TestAppAbuseQueue, self).setUp()
self._setUp()
@classmethod
def setUpTestData(cls):
app1 = app_factory()
app2 = app_factory()
app_factory()
app_factory()
user1 = user_factory()
user2 = user_factory()
AbuseReport.objects.create(reporter=user1, ip_address='123.45.67.89',
addon=app1, message='bad')
AbuseReport.objects.create(reporter=user2, ip_address='123.01.67.89',
addon=app1, message='terrible')
AbuseReport.objects.create(reporter=user1, ip_address='123.01.02.89',
addon=app2, message='the worst')
def test_setup(self):
eq_(AbuseReport.objects.filter(read=False).count(), 3)
eq_(AbuseReport.objects.filter(addon=Webapp.objects.all()[0]).count(),
2)
res = self.client.get(self.url)
# Check there are 2 apps listed.
eq_(len(res.context['page'].object_list), 2)
def test_queue_count_reviewer_and_moderator(self):
self.grant_permission(self.abuseviewer_user, 'Apps:Review')
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Reviewing (0)')
eq_(links[4].text, u'Homescreens (0)')
eq_(links[5].text, u'Abuse Reports (2)')
class TestWebsiteAbuseQueue(mkt.site.tests.TestCase, AccessMixin,
AbuseQueueMixin):
perm = 'Websites:ReadAbuse'
view_name = 'reviewers.websites.queue_abuse'
log_const = mkt.LOG.WEBSITE_ABUSE_MARKREAD
def setUp(self):
super(TestWebsiteAbuseQueue, self).setUp()
self._setUp()
@classmethod
def setUpTestData(cls):
website1 = website_factory()
website2 = website_factory()
# Add some extra sites, which shouldn't show up.
website_factory()
website_factory()
user1 = user_factory()
user2 = user_factory()
AbuseReport.objects.create(reporter=user1, ip_address='123.45.67.89',
website=website1, message='bad')
AbuseReport.objects.create(reporter=user2, ip_address='123.01.67.89',
website=website1, message='terrible')
AbuseReport.objects.create(reporter=user1, ip_address='123.01.02.89',
website=website2, message='the worst')
cls.website1 = website1
def test_setup(self):
eq_(AbuseReport.objects.filter(read=False).count(), 3)
eq_(AbuseReport.objects.filter(website=self.website1).count(), 2)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 2)
def test_first_flag(self):
raise SkipTest()
class TestGetSigned(BasePackagedAppTest, mkt.site.tests.TestCase):
def setUp(self):
super(TestGetSigned, self).setUp()
self.url = reverse('reviewers.signed', args=[self.app.app_slug,
self.version.pk])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('editor@mozilla.com')
def test_not_logged_in(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_not_reviewer(self):
self.client.logout()
self.login(user_factory())
eq_(self.client.get(self.url).status_code, 403)
@override_settings(
DEFAULT_FILE_STORAGE='mkt.site.storage_utils.LocalFileStorage')
@mock.patch('lib.crypto.packaged.sign')
def test_reviewer_sign_arguments_local(self, sign_mock):
sign_mock.side_effect = mock_sign
self.setup_files()
res = self.client.get(self.url)
sign_mock.assert_called_with(self.version.pk, reviewer=True)
eq_(res.status_code, 200)
@override_settings(
DEFAULT_FILE_STORAGE='mkt.site.storage_utils.S3BotoPrivateStorage')
@mock.patch('lib.crypto.packaged.sign')
def test_reviewer_sign_arguments_storage(self, sign_mock):
sign_mock.side_effect = mock_sign
self.setup_files()
res = self.client.get(self.url)
sign_mock.assert_called_with(self.version.pk, reviewer=True)
self.assert3xx(res, private_storage.url(
self.file.signed_reviewer_file_path))
@mock.patch.object(packaged, 'sign', mock_sign)
def test_reviewer(self):
if not settings.XSENDFILE:
raise SkipTest()
self.setup_files()
res = self.client.get(self.url)
eq_(res.status_code, 200)
file_ = self.app.current_version.all_files[0]
eq_(res['x-sendfile'], file_.signed_reviewer_file_path)
eq_(res['etag'], '"%s"' % file_.hash.split(':')[-1])
def test_not_packaged(self):
self.app.update(is_packaged=False)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_wrong_version(self):
self.url = reverse('reviewers.signed', args=[self.app.app_slug, 0])
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_token_good(self):
if not settings.XSENDFILE:
raise SkipTest()
token = Token(data={'app_id': self.app.id})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 200)
file_ = self.app.current_version.all_files[0]
eq_(res['x-sendfile'], file_.signed_reviewer_file_path)
eq_(res['etag'], '"%s"' % file_.hash.split(':')[-1])
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
def test_token_bad(self):
token = Token(data={'app_id': 'abcdef'})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
class TestMiniManifestView(BasePackagedAppTest):
def setUp(self):
super(TestMiniManifestView, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.app.update(is_packaged=True)
self.version = self.app.versions.latest()
self.file = self.version.all_files[0]
self.file.update(filename='mozball.zip')
self.url = reverse('reviewers.mini_manifest', args=[self.app.app_slug,
self.version.pk])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('editor@mozilla.com')
def test_not_logged_in(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_not_reviewer(self):
self.client.logout()
self.login(user_factory())
eq_(self.client.get(self.url).status_code, 403)
def test_not_packaged(self):
self.app.update(is_packaged=False)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_wrong_version(self):
url = reverse('reviewers.mini_manifest', args=[self.app.app_slug, 0])
res = self.client.get(url)
eq_(res.status_code, 404)
def test_reviewer(self):
self.setup_files()
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
eq_(data['developer']['name'], 'Mozilla Marketplace')
eq_(data['package_path'],
absolutify(reverse('reviewers.signed',
args=[self.app.app_slug, self.version.id])))
def test_rejected(self):
# Rejected sets file.status to DISABLED and moves to a guarded path.
self.setup_files()
self.app.update(status=mkt.STATUS_REJECTED)
self.file.update(status=mkt.STATUS_DISABLED)
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
eq_(data['developer']['name'], 'Mozilla Marketplace')
eq_(data['package_path'],
absolutify(reverse('reviewers.signed',
args=[self.app.app_slug,
self.version.id])))
def test_minifest_name_matches_manifest_name(self):
self.setup_files()
self.app.name = 'XXX'
self.app.save()
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
def test_token_good(self):
token = Token(data={'app_id': self.app.id})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 200)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
ok_('token=' in data['package_path'])
# Test token doesn't work the 2nd time.
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
def test_token_bad(self):
token = Token(data={'app_id': 'abcdef'})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
class TestReviewersScores(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewersScores, self).setUp()
self.user = self.reviewer_user
self.url = reverse('reviewers.performance', args=[self.user.email])
def test_404(self):
res = self.client.get(reverse('reviewers.performance', args=['poop']))
eq_(res.status_code, 404)
def test_with_email(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['profile'].id, self.user.id)
def test_without_email(self):
res = self.client.get(reverse('reviewers.performance'))
eq_(res.status_code, 200)
eq_(res.context['profile'].id, self.user.id)
def test_no_reviews(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
assert u'No review points awarded yet' in res.content
class TestQueueSort(AppReviewerTest):
def setUp(self):
super(TestQueueSort, self).setUp()
self.apps = [app_factory(name='Lillard',
status=mkt.STATUS_PENDING,
is_packaged=False,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_FREE),
app_factory(name='Batum',
status=mkt.STATUS_PENDING,
is_packaged=True,
version_kw={'version': '1.0',
'has_editor_comment': True,
'has_info_request': True},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_PREMIUM)]
self.apps[0].update(created=self.days_ago(2))
self.apps[1].update(created=self.days_ago(5))
self.apps[0].addonuser_set.create(user=user_factory(email='XXX'))
self.apps[1].addonuser_set.create(user=user_factory(email='illmatic'))
self.apps[0].addondevicetype_set.create(
device_type=mkt.DEVICE_DESKTOP.id)
self.apps[1].addondevicetype_set.create(
device_type=mkt.DEVICE_MOBILE.id)
self.url = reverse('reviewers.apps.queue_pending')
def test_do_sort_webapp(self):
rf = RequestFactory()
qs = Webapp.objects.all()
req = rf.get(self.url, {'sort': 'invalidsort', 'order': 'dontcare'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[1], self.apps[0]])
req = rf.get(self.url, {'sort': 'created', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[0], self.apps[1]])
req = rf.get(self.url, {'sort': 'name', 'order': 'asc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[1], self.apps[0]])
req = rf.get(self.url, {'sort': 'name', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[0], self.apps[1]])
def test_do_sort_version_nom(self):
url = reverse('reviewers.apps.queue_pending')
user = UserProfile.objects.get(email='editor@mozilla.com')
version_0 = self.apps[0].versions.get()
version_0.update(nomination=days_ago(1))
version_1 = self.apps[1].versions.get()
version_1.update(nomination=days_ago(2))
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[0],
nomination=days_ago(10))
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[1],
nomination=days_ago(1))
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[1],
nomination=days_ago(20))
req = mkt.site.tests.req_factory_factory(
url, user=user, data={'sort': 'nomination'})
res = queue_apps(req)
doc = pq(res.content)
# Desktop and mobile (hidden on desktop) alternate, so we jump by 2.
eq_(doc('tbody tr')[0].get('data-addon'), str(version_1.addon.id))
eq_(doc('tbody tr')[2].get('data-addon'), str(version_0.addon.id))
req = mkt.site.tests.req_factory_factory(
url, user=user, data={'sort': 'nomination', 'order': 'desc'})
res = queue_apps(req)
doc = pq(res.content)
# Desktop and mobile (hidden on desktop) alternate, so we jump by 2.
eq_(doc('tbody tr')[0].get('data-addon'), str(version_0.addon.id))
eq_(doc('tbody tr')[2].get('data-addon'), str(version_1.addon.id))
def test_do_sort_queue_object(self):
rf = RequestFactory()
url = reverse('reviewers.apps.queue_rereview')
earlier_rrq = RereviewQueue.objects.create(addon=self.apps[0])
later_rrq = RereviewQueue.objects.create(addon=self.apps[1])
later_rrq.created += timedelta(days=1)
later_rrq.save()
request = rf.get(url, {'sort': 'created'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
# Assert the order that RereviewQueue objects were created is
# maintained.
eq_([earlier_rrq.addon, later_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'created', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([later_rrq.addon, earlier_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'name', 'order': 'asc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([later_rrq.addon, earlier_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'name', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([earlier_rrq.addon, later_rrq.addon], list(apps))
def test_sort_with_priority_review(self):
# Set up the priority review flagged app.
self.apps.append(app_factory(name='Foxkeh',
status=mkt.STATUS_PENDING,
is_packaged=False,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_FREE,
priority_review=True))
# Set up app attributes.
self.apps[2].update(created=self.days_ago(1))
self.apps[2].addonuser_set.create(
user=user_factory(email='redpanda@mozilla.com'))
self.apps[2].addondevicetype_set.create(
device_type=mkt.DEVICE_DESKTOP.id)
# And check it also comes out top of waiting time with Webapp model.
rf = RequestFactory()
qs = Webapp.objects.all()
# Test apps are sorted by created/asc by default.
req = rf.get(self.url, {'sort': 'invalidsort', 'order': 'dontcare'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[2], self.apps[1], self.apps[0]])
# Test sorting by created, descending.
req = rf.get(self.url, {'sort': 'created', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[2], self.apps[0], self.apps[1]])
# And with Version model.
version_0 = self.apps[0].versions.get()
version_0.update(nomination=days_ago(1))
version_1 = self.apps[1].versions.get()
version_1.update(nomination=days_ago(2))
qs = (Version.objects.filter(
files__status=mkt.STATUS_PENDING,
addon__disabled_by_user=False,
addon__status=mkt.STATUS_PENDING)
.order_by('nomination', 'created')
.select_related('addon', 'files').no_transforms())
req = rf.get(self.url, {'sort': 'nomination'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs, date_sort='nomination')
eq_(list(sorted_qs), [self.apps[2], self.apps[1], self.apps[0]])
req = rf.get(self.url, {'sort': 'nomination', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs, date_sort='nomination')
eq_(list(sorted_qs), [self.apps[2], self.apps[0], self.apps[1]])
# And with Rereview model.
url = reverse('reviewers.apps.queue_rereview')
earlier_rrq = RereviewQueue.objects.create(addon=self.apps[0])
earlier_rrq.created += timedelta(days=1)
earlier_rrq.save()
later_rrq = RereviewQueue.objects.create(addon=self.apps[1])
later_rrq.created += timedelta(days=2)
later_rrq.save()
pri_rrq = RereviewQueue.objects.create(addon=self.apps[2])
pri_rrq.save()
request = rf.get(url, {'sort': 'created'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([pri_rrq.addon, earlier_rrq.addon, later_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'created', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([pri_rrq.addon, later_rrq.addon, earlier_rrq.addon], list(apps))
class TestAppsReviewing(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestAppsReviewing, self).setUp()
self.url = reverse('reviewers.apps.apps_reviewing')
self.apps = [app_factory(name='Antelope',
status=mkt.STATUS_PENDING),
app_factory(name='Bear',
status=mkt.STATUS_PENDING),
app_factory(name='Cougar',
status=mkt.STATUS_PENDING)]
def _view_app(self, app_id):
self.client.post(reverse('reviewers.review_viewing'), {
'addon_id': app_id})
def test_no_apps_reviewing(self):
res = self.client.get(self.url)
eq_(len(res.context['apps']), 0)
def test_apps_reviewing(self):
self._view_app(self.apps[0].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 1)
def test_multiple_reviewers_no_cross_streams(self):
self._view_app(self.apps[0].id)
self._view_app(self.apps[1].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 2)
# Now view an app as another user and verify app.
self.login('admin@mozilla.com')
self._view_app(self.apps[2].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 1)
# Check original user again to make sure app list didn't increment.
self.login_as_editor()
res = self.client.get(self.url)
eq_(len(res.context['apps']), 2)
class TestLeaderboard(AppReviewerTest):
def setUp(self):
super(TestLeaderboard, self).setUp()
self.url = reverse('reviewers.leaderboard')
mkt.set_user(self.reviewer_user)
def _award_points(self, user, score):
ReviewerScore.objects.create(user=user, note_key=mkt.REVIEWED_MANUAL,
score=score, note='Thing.')
def test_leaderboard_ranks(self):
users = (self.reviewer_user,
self.regular_user,
user_factory(email='clouserw'))
self._award_points(users[0], mkt.REVIEWED_LEVELS[0]['points'] - 1)
self._award_points(users[1], mkt.REVIEWED_LEVELS[0]['points'] + 1)
self._award_points(users[2], mkt.REVIEWED_LEVELS[0]['points'] + 2)
def get_cells():
doc = pq(self.client.get(self.url).content.decode('utf-8'))
cells = doc('#leaderboard > tbody > tr > .name, '
'#leaderboard > tbody > tr > .level')
return [cells.eq(i).text() for i in range(0, cells.length)]
eq_(get_cells(),
[users[2].display_name,
users[1].display_name,
mkt.REVIEWED_LEVELS[0]['name'],
users[0].display_name])
self._award_points(users[0], 1)
eq_(get_cells(),
[users[2].display_name,
users[1].display_name,
users[0].display_name,
mkt.REVIEWED_LEVELS[0]['name']])
self._award_points(users[0], -1)
self._award_points(users[2], (mkt.REVIEWED_LEVELS[1]['points'] -
mkt.REVIEWED_LEVELS[0]['points']))
eq_(get_cells(),
[users[2].display_name,
mkt.REVIEWED_LEVELS[1]['name'],
users[1].display_name,
mkt.REVIEWED_LEVELS[0]['name'],
users[0].display_name])
class TestReviewPage(mkt.site.tests.TestCase):
def setUp(self):
super(TestReviewPage, self).setUp()
self.app = app_factory(status=mkt.STATUS_PENDING)
self.reviewer = user_factory(email='editor')
self.grant_permission(self.reviewer, 'Apps:Review')
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def test_status_null_disable_approve_btn(self):
self.app.update(status=mkt.STATUS_NULL)
req = req_factory_factory(self.url, user=self.reviewer)
res = app_review(req, app_slug=self.app.app_slug)
doc = pq(res.content)
assert (doc('#review-actions input[value=public]')
.parents('li').hasClass('disabled'))
assert not (doc('#review-actions input[value=reject]')
.parents('li').hasClass('disabled'))
class TestAbusePage(AppReviewerTest):
def setUp(self):
super(TestAbusePage, self).setUp()
self.app = app_factory(name=u'My app é <script>alert(5)</script>')
self.url = reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug])
AbuseReport.objects.create(addon=self.app, message=self.app.name)
def testXSS(self):
from django.utils.encoding import smart_unicode
from jinja2.utils import escape
content = smart_unicode(self.client.get(self.url).content)
ok_(not unicode(self.app.name) in content)
ok_(unicode(escape(self.app.name)) in content)
class TestReviewTranslate(RestOAuth):
def setUp(self):
super(TestReviewTranslate, self).setUp()
self.grant_permission(self.profile, 'Apps:ModerateReview')
self.create_switch('reviews-translate')
user = user_factory(email='diego')
app = app_factory(app_slug='myapp~-_')
self.review = app.reviews.create(title=u'yes', body=u'oui',
addon=app, user=user,
editorreview=True, rating=4)
def test_regular_call(self):
res = self.client.get(reverse('reviewers.review_translate',
args=[self.review.addon.app_slug,
self.review.id, 'fr']))
self.assert3xx(res, 'https://translate.google.com/#auto/fr/oui', 302)
@mock.patch('mkt.reviewers.views.requests')
def test_ajax_call(self, requests):
response = mock.Mock(status_code=200)
response.json.return_value = {
u'data': {
u'translations': [{
u'translatedText': u'oui',
u'detectedSourceLanguage': u'fr'
}]
}
}
requests.get.return_value = response
review = self.review
url = reverse('reviewers.review_translate',
args=[review.addon.app_slug, review.id, 'fr'])
res = self.client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
eq_(res.status_code, 200)
eq_(res.content, '{"body": "oui", "title": "oui"}')
@mock.patch('mkt.reviewers.views.requests')
def test_invalid_api_key(self, requests):
response = mock.Mock(status_code=400)
response.json.return_value = {
'error': {
'code': 400,
'errors': [
{'domain': 'usageLimits',
'message': 'Bad Request',
'reason': 'keyInvalid'}
],
'message': 'Bad Request'
}
}
requests.get.return_value = response
review = self.review
res = self.client.get(
reverse('reviewers.review_translate',
args=[review.addon.app_slug, review.id, 'fr']),
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
eq_(res.status_code, 400)
class TestReviewHistory(mkt.site.tests.TestCase, CommTestMixin):
def setUp(self):
super(TestReviewHistory, self).setUp()
self.app = self.addon = app_factory()
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('editor@mozilla.com')
self._thread_factory()
def test_comm_url(self):
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=1&serializer=simple')
def test_comm_url_multiple_thread(self):
self._thread_factory()
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=2&serializer=simple')
def test_comm_url_no_encode(self):
self.addon = app_factory(app_slug='台北')
self._thread_factory()
url = reverse('reviewers.apps.review', args=[self.addon.app_slug])
r = self.client.get(url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=1&serializer=simple')
class ModerateLogTest(mkt.site.tests.TestCase):
def setUp(self):
super(ModerateLogTest, self).setUp()
self.review = Review.objects.create(addon=app_factory(), body='body',
user=user_factory(), rating=4,
editorreview=True)
self.moderator_user = user_factory(email='moderator')
self.grant_permission(self.moderator_user, 'Apps:ModerateReview')
mkt.set_user(self.moderator_user)
self.login(self.moderator_user)
self.admin_user = user_factory(email='admin')
self.grant_permission(self.admin_user, '*:*')
user_factory(email='regular')
class TestModerateLog(ModerateLogTest, AccessMixin):
def setUp(self):
super(TestModerateLog, self).setUp()
self.url = reverse('reviewers.apps.moderatelog')
def test_log(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_start_filter(self):
r = self.client.get(self.url, dict(start='2011-01-01'))
eq_(r.status_code, 200)
def test_enddate_filter(self):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review, self.review.addon,
created=datetime(2011, 1, 1))
r = self.client.get(self.url, dict(end='2011-01-01'))
eq_(r.status_code, 200)
eq_(pq(r.content)('tbody td').eq(0).text(), 'Jan 1, 2011, 12:00:00 AM')
def test_action_filter(self):
for i in xrange(2):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review.addon, self.review)
mkt.log(mkt.LOG.DELETE_REVIEW, self.review.addon, self.review)
r = self.client.get(self.url, dict(search='deleted'))
eq_(pq(r.content)('tbody tr').length, 2)
def test_no_results(self):
r = self.client.get(self.url, dict(end='2004-01-01'))
no_results = 'No events found for this period.'
assert no_results in r.content, 'Expected no results to be found.'
def test_display_name_xss(self):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review, self.review.addon,
user=self.admin_user)
self.admin_user.display_name = '<script>alert("xss")</script>'
self.admin_user.save()
assert '<script>' in self.admin_user.display_name, (
'Expected <script> to be in display name')
r = self.client.get(self.url)
pq(r.content)('#log-listing tbody td').eq(1).html()
assert '<script>' not in r.content
assert '<script>' in r.content
class TestModerateLogDetail(ModerateLogTest, AccessMixin):
def setUp(self):
super(TestModerateLogDetail, self).setUp()
self.url = self._url(0)
def _url(self, id):
return reverse('reviewers.apps.moderatelog.detail', args=[id])
def test_detail_page(self):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review.addon, self.review)
e_id = ActivityLog.objects.editor_events()[0].id
r = self.client.get(self._url(e_id))
eq_(r.status_code, 200)
def test_undelete_selfmoderation(self):
e_id = mkt.log(
mkt.LOG.DELETE_REVIEW, self.review.addon, self.review).id
self.review.delete()
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 302)
self.review = Review.objects.get(id=self.review.id)
assert not self.review.deleted, 'Review should be undeleted now.'
def test_undelete_admin(self):
e_id = mkt.log(
mkt.LOG.DELETE_REVIEW, self.review.addon, self.review).id
self.review.delete()
self.client.logout()
self.login(self.admin_user)
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 302)
self.review = Review.objects.get(id=self.review.id)
assert not self.review.deleted, 'Review should be undeleted now.'
def test_undelete_unauthorized(self):
e_id = mkt.log(mkt.LOG.DELETE_REVIEW, self.review.addon, self.review,
user=self.admin_user).id
self.review.delete()
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 403)
self.review = Review.with_deleted.get(id=self.review.id)
assert self.review.deleted, 'Review shouldn`t have been undeleted.'
| true | true |
f72bd40ce088f4120389f93fcd804f18f5fdfbf5 | 7,929 | py | Python | tests/rastrigin_accept_action.py | LiyrAstroph/CDNest | afb6b869ce1c4ebd76662b20310f1d9d3db4e26e | [
"MIT"
] | 6 | 2019-09-11T03:34:45.000Z | 2020-10-16T12:14:05.000Z | tests/rastrigin_accept_action.py | LiyrAstroph/DNest_C | afb6b869ce1c4ebd76662b20310f1d9d3db4e26e | [
"MIT"
] | 2 | 2020-05-14T10:04:48.000Z | 2021-01-06T02:04:19.000Z | tests/rastrigin_accept_action.py | LiyrAstroph/DNest_C | afb6b869ce1c4ebd76662b20310f1d9d3db4e26e | [
"MIT"
] | null | null | null | #
# sample from a Rastrigin test function
# this is to illustrate how to use accept_action in CDNest to avoid repeat calculations.
#
# A 2D Rastrigin function looks
#
# logL=-(10.0*2 + (coords[0]**2 - 10*np.cos(2.0*np.pi*coords[0])) + (coords[1]**2 - 10*np.cos(2.0*np.pi*coords[1])) )
#
# Every perturb, only one parameter is updated, so that the terms related to the rest parameters
# do not need to recalculate, just use the values in the previous step.
#
# In this example, we use an array to record values of the term "(coords[0]**2 - 10*np.cos(2.0*np.pi*coords[0]))"
# in every accepted perturb.
#
from mpi4py import MPI
import numpy as np
import cydnest
import matplotlib.pyplot as plt
from matplotlib import cm
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
def randh():
"""
generate from the heavy-tailed distribution.
"""
return 10.0**(1.5 - 3*np.abs(np.random.randn()/np.sqrt(-np.log(np.random.rand()))))*np.random.randn()
def wrap(x, a, b):
assert b > a
return (x - a)%(b - a) + a
class Model(object):
def __init__(self, num_params=1, num_particles=1):
"""
intialize the model
"""
# number of particles each core holds
self.num_particles = num_particles
# number of parameters
self.num_params = num_params
# parameter ranges, a list
self.param_range = [[-5.12, 5.12]]*num_params
# parameter prior type.
# three types: Uniform, Gaussian, Log
self.prior_type = ["Uniform"]*num_params
# parameter prior information. used when the prior is Gaussian
# indicate the mean and standard deviation of the Gaussian prior
self.prior_info = [[0.0, 1.0]]*num_params
# which parameter being perturbed
# which particle being perturbed
self.which_param_update = 0
self.which_particle_update = 0
# perturbed values and accepted values for all particles
self.value_perturb = [0.0]*self.num_particles
self.value_accept = [0.0]*self.num_particles
def accept_action(self):
"""
action taken when a perturb is accepted
record the accepted values from the perturbed values
"""
# note "which_particle_update" is updated and "which_param_update" is updated
if self.which_param_update < 1:
self.value_accept[self.which_particle_update] = self.value_perturb[self.which_particle_update]
def kill_action(self, i, i_copy):
"""
cdnest kill a particle when it is not updated for a long time.
action taken when a particle is killed: i particle is killed,
copy i_copy particle's values to i particle's values
this function is needed, since we record some accepted values
"""
self.value_accept[i] = self.value_accept[i_copy]
return
# users can define their own functions to generate
# the initial parameter values
# this is optinal. if not defined, cydnest will use the internal
# function.
def from_prior(self):
"""
generate initial values of model parameters from priors
"""
coords = np.zeros(self.num_params)
for i in range(self.num_params):
if self.prior_type[i] == "Uniform":
coords[i] = np.random.uniform(self.param_range[i][0], self.param_range[i][1])
elif self.prior_type[i] == "Gaussian":
coords[i] = np.random.randn() * self.prior_info[i][1] + self.prior_info[0]
wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
elif self.prior_type[i] == "Log": # LOG prior
coords[i] = np.random.uniform(np.log(self.param_range[i][0]), np.log(self.param_range[i][1]))
coords[i] = np.exp(coords[i])
return coords
# users can define their own functions to perturb
# parameter values for sampling
# this is optinal. if not defined, cydnest will use the internal
# function.
def perturb(self, coords):
"""
perturb the parameters
"""
i = np.random.randint(self.num_params)
# record which parameter is updated
self.which_param_update = i
LogH = 0.0 # prior ratio: ln(prior(new)/prior(old)) = ln(prior(new)) - ln(prior(old))
width = (self.param_range[i][1]-self.param_range[i][0])
if self.prior_type[i] == "Uniform":
coords[i] += width*randh()
coords[i] = wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
elif self.prior_type[i] == "Gaussian":
LogH -= ( -0.5* (coords[i] - self.prior_info[i][0])**2/self.prior_info[i][1]**2 ) # ln(Gaussian)
coords[i] += width*randh()
coords[i] = wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
LogH += ( -0.5* (coords[i] - self.prior_info[i][0])**2/self.prior_info[i][1]**2 )
elif self.prior_type[i] == "Log":
LogH -= ( -np.log(coords[i]) ) # ln(1/x) = -ln(x)
coords[i] += width*randh()
coords[i] = wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
LogH += ( -np.log(coords[i]) )
return LogH
def log_likelihood_initial(self, coords):
"""
calculate likelihood at initial start
"""
self.which_particle_update = cydnest.get_which_particle_update()
self.value_accept[self.which_particle_update] = coords[0]**2 - 10*np.cos(2.0*np.pi*coords[0])
value = self.value_accept[self.which_particle_update]
return -(10.0*2 + (value) + (coords[1]**2 - 10*np.cos(2.0*np.pi*coords[1])) )
def log_likelihood(self, coords):
"""
calculate likelihood
"""
# get which particle is being updated, and save it to self model
self.which_particle_update = cydnest.get_which_particle_update()
value = 0.0
if self.which_param_update < 1: # when 0-th parameter update, recalculate
self.value_perturb[self.which_particle_update] = coords[0]**2 - 10*np.cos(2.0*np.pi*coords[0])
value = self.value_perturb[self.which_particle_update]
else: # otherwise, use the accepted value
value = self.value_accept[self.which_particle_update]
return -(10.0*2 + (value) + (coords[1]**2 - 10*np.cos(2.0*np.pi*coords[1])) )
# create a model
model = Model(num_params=2, num_particles=2)
# create a dnest sampler
# max_num_save is the number of samples to generate
# max_num_levels is the number of levels
# ptol is the likelihood tolerance in loge()
sampler = cydnest.sampler(model, sample_dir="./", max_num_saves = 10000, ptol=0.1, num_particles=model.num_particles)
#
# The full argument lists look like:
# sampler = cydnest.sampler(model, sample_dir="./", max_num_saves = 10000, ptol=0.1,
# num_particles=1, thread_steps_factor = 10,
# max_num_levels = 0, lam = 10, beta = 100
# new_level_interval_factor = 2, save_interval_factor = 2)
#
# run sampler
logz = sampler.run()
comm.Barrier()
# ouput evidence
if rank == 0:
print("Evidence:", logz)
psample = np.loadtxt(sampler.get_sample_dir() +"/posterior_sample" + sampler.get_sample_tag() + ".txt")
psample_info = np.loadtxt(sampler.get_sample_dir() +"/posterior_sample_info" + sampler.get_sample_tag() + ".txt")
fig = plt.figure(figsize=(15, 12))
ax = fig.add_subplot(111, projection='3d')
X = np.arange(-1.5, 1.5, 0.01)
Y = np.arange(-1.5, 1.5, 0.01)
X, Y = np.meshgrid(X, Y)
Z = -(10.0*2 + (X**2 - 10*np.cos(2.0*np.pi*X)) + (Y**2 - 10*np.cos(2.0*np.pi*Y)) )
ax.plot_surface(X, Y, Z, cmap=cm.ocean, rstride=2, cstride=2, linewidth=0, antialiased=False, zorder=0)
idx = np.where((np.abs(psample[:, 0]) <1.4) & (np.abs(psample[:, 1]) <1.4))
ax.plot(psample[idx[0], 0], psample[idx[0], 1], psample_info[idx[0]], ls='none', marker='+', zorder=10)
ax.set_xlim(-1.5, 1.5)
ax.set_ylim(-1.5, 1.5)
ax.set_xlabel(r'$\theta_1$')
ax.set_ylabel(r'$\theta_2$')
ax.set_zlabel(r'$\log L$')
fig.savefig("fig_rastrigin.jpg", bbox_inches='tight')
plt.show()
# do postprocess, plot, show the properties of sampling
cydnest.postprocess(sampler.get_sample_dir(), sampler.get_sample_tag(), temperature=1.0, doplot=True) | 37.225352 | 118 | 0.66137 |
from mpi4py import MPI
import numpy as np
import cydnest
import matplotlib.pyplot as plt
from matplotlib import cm
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
def randh():
return 10.0**(1.5 - 3*np.abs(np.random.randn()/np.sqrt(-np.log(np.random.rand()))))*np.random.randn()
def wrap(x, a, b):
assert b > a
return (x - a)%(b - a) + a
class Model(object):
def __init__(self, num_params=1, num_particles=1):
self.num_particles = num_particles
self.num_params = num_params
self.param_range = [[-5.12, 5.12]]*num_params
self.prior_type = ["Uniform"]*num_params
self.prior_info = [[0.0, 1.0]]*num_params
self.which_param_update = 0
self.which_particle_update = 0
self.value_perturb = [0.0]*self.num_particles
self.value_accept = [0.0]*self.num_particles
def accept_action(self):
if self.which_param_update < 1:
self.value_accept[self.which_particle_update] = self.value_perturb[self.which_particle_update]
def kill_action(self, i, i_copy):
self.value_accept[i] = self.value_accept[i_copy]
return
def from_prior(self):
coords = np.zeros(self.num_params)
for i in range(self.num_params):
if self.prior_type[i] == "Uniform":
coords[i] = np.random.uniform(self.param_range[i][0], self.param_range[i][1])
elif self.prior_type[i] == "Gaussian":
coords[i] = np.random.randn() * self.prior_info[i][1] + self.prior_info[0]
wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
elif self.prior_type[i] == "Log":
coords[i] = np.random.uniform(np.log(self.param_range[i][0]), np.log(self.param_range[i][1]))
coords[i] = np.exp(coords[i])
return coords
def perturb(self, coords):
i = np.random.randint(self.num_params)
self.which_param_update = i
LogH = 0.0
width = (self.param_range[i][1]-self.param_range[i][0])
if self.prior_type[i] == "Uniform":
coords[i] += width*randh()
coords[i] = wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
elif self.prior_type[i] == "Gaussian":
LogH -= ( -0.5* (coords[i] - self.prior_info[i][0])**2/self.prior_info[i][1]**2 )
coords[i] += width*randh()
coords[i] = wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
LogH += ( -0.5* (coords[i] - self.prior_info[i][0])**2/self.prior_info[i][1]**2 )
elif self.prior_type[i] == "Log":
LogH -= ( -np.log(coords[i]) )
coords[i] += width*randh()
coords[i] = wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
LogH += ( -np.log(coords[i]) )
return LogH
def log_likelihood_initial(self, coords):
self.which_particle_update = cydnest.get_which_particle_update()
self.value_accept[self.which_particle_update] = coords[0]**2 - 10*np.cos(2.0*np.pi*coords[0])
value = self.value_accept[self.which_particle_update]
return -(10.0*2 + (value) + (coords[1]**2 - 10*np.cos(2.0*np.pi*coords[1])) )
def log_likelihood(self, coords):
self.which_particle_update = cydnest.get_which_particle_update()
value = 0.0
if self.which_param_update < 1:
self.value_perturb[self.which_particle_update] = coords[0]**2 - 10*np.cos(2.0*np.pi*coords[0])
value = self.value_perturb[self.which_particle_update]
else:
value = self.value_accept[self.which_particle_update]
return -(10.0*2 + (value) + (coords[1]**2 - 10*np.cos(2.0*np.pi*coords[1])) )
model = Model(num_params=2, num_particles=2)
sampler = cydnest.sampler(model, sample_dir="./", max_num_saves = 10000, ptol=0.1, num_particles=model.num_particles)
logz = sampler.run()
comm.Barrier()
if rank == 0:
print("Evidence:", logz)
psample = np.loadtxt(sampler.get_sample_dir() +"/posterior_sample" + sampler.get_sample_tag() + ".txt")
psample_info = np.loadtxt(sampler.get_sample_dir() +"/posterior_sample_info" + sampler.get_sample_tag() + ".txt")
fig = plt.figure(figsize=(15, 12))
ax = fig.add_subplot(111, projection='3d')
X = np.arange(-1.5, 1.5, 0.01)
Y = np.arange(-1.5, 1.5, 0.01)
X, Y = np.meshgrid(X, Y)
Z = -(10.0*2 + (X**2 - 10*np.cos(2.0*np.pi*X)) + (Y**2 - 10*np.cos(2.0*np.pi*Y)) )
ax.plot_surface(X, Y, Z, cmap=cm.ocean, rstride=2, cstride=2, linewidth=0, antialiased=False, zorder=0)
idx = np.where((np.abs(psample[:, 0]) <1.4) & (np.abs(psample[:, 1]) <1.4))
ax.plot(psample[idx[0], 0], psample[idx[0], 1], psample_info[idx[0]], ls='none', marker='+', zorder=10)
ax.set_xlim(-1.5, 1.5)
ax.set_ylim(-1.5, 1.5)
ax.set_xlabel(r'$\theta_1$')
ax.set_ylabel(r'$\theta_2$')
ax.set_zlabel(r'$\log L$')
fig.savefig("fig_rastrigin.jpg", bbox_inches='tight')
plt.show()
cydnest.postprocess(sampler.get_sample_dir(), sampler.get_sample_tag(), temperature=1.0, doplot=True) | true | true |
f72bd6613a23fb41a75040d39b206992b03cd8d2 | 27,303 | py | Python | catalyst/marketplace/marketplace.py | echo-ray/catalyst | 8b4274d17f0a42ee4d1d5e09d30fb0919aea2a51 | [
"Apache-2.0"
] | null | null | null | catalyst/marketplace/marketplace.py | echo-ray/catalyst | 8b4274d17f0a42ee4d1d5e09d30fb0919aea2a51 | [
"Apache-2.0"
] | null | null | null | catalyst/marketplace/marketplace.py | echo-ray/catalyst | 8b4274d17f0a42ee4d1d5e09d30fb0919aea2a51 | [
"Apache-2.0"
] | null | null | null | from __future__ import print_function
import glob
import json
import os
import re
import shutil
import sys
import time
import webbrowser
import bcolz
import logbook
import pandas as pd
import requests
from requests_toolbelt import MultipartDecoder
from requests_toolbelt.multipart.decoder import \
NonMultipartContentTypeException
from catalyst.constants import (
LOG_LEVEL, AUTH_SERVER, ETH_REMOTE_NODE, MARKETPLACE_CONTRACT,
MARKETPLACE_CONTRACT_ABI, ENIGMA_CONTRACT, ENIGMA_CONTRACT_ABI)
from catalyst.exchange.utils.stats_utils import set_print_settings
from catalyst.marketplace.marketplace_errors import (
MarketplacePubAddressEmpty, MarketplaceDatasetNotFound,
MarketplaceNoAddressMatch, MarketplaceHTTPRequest,
MarketplaceNoCSVFiles, MarketplaceRequiresPython3)
from catalyst.marketplace.utils.auth_utils import get_key_secret, \
get_signed_headers
from catalyst.marketplace.utils.bundle_utils import merge_bundles
from catalyst.marketplace.utils.eth_utils import bin_hex, from_grains, \
to_grains
from catalyst.marketplace.utils.path_utils import get_bundle_folder, \
get_data_source_folder, get_marketplace_folder, \
get_user_pubaddr, get_temp_bundles_folder, extract_bundle
from catalyst.utils.paths import ensure_directory
if sys.version_info.major < 3:
import urllib
else:
import urllib.request as urllib
log = logbook.Logger('Marketplace', level=LOG_LEVEL)
class Marketplace:
def __init__(self):
global Web3
try:
from web3 import Web3, HTTPProvider
except ImportError:
raise MarketplaceRequiresPython3()
self.addresses = get_user_pubaddr()
if self.addresses[0]['pubAddr'] == '':
raise MarketplacePubAddressEmpty(
filename=os.path.join(
get_marketplace_folder(), 'addresses.json')
)
self.default_account = self.addresses[0]['pubAddr']
self.web3 = Web3(HTTPProvider(ETH_REMOTE_NODE))
contract_url = urllib.urlopen(MARKETPLACE_CONTRACT)
self.mkt_contract_address = Web3.toChecksumAddress(
contract_url.readline().decode(
contract_url.info().get_content_charset()).strip())
abi_url = urllib.urlopen(MARKETPLACE_CONTRACT_ABI)
abi = json.load(abi_url)
self.mkt_contract = self.web3.eth.contract(
self.mkt_contract_address,
abi=abi,
)
contract_url = urllib.urlopen(ENIGMA_CONTRACT)
self.eng_contract_address = Web3.toChecksumAddress(
contract_url.readline().decode(
contract_url.info().get_content_charset()).strip())
abi_url = urllib.urlopen(ENIGMA_CONTRACT_ABI)
abi = json.load(abi_url)
self.eng_contract = self.web3.eth.contract(
self.eng_contract_address,
abi=abi,
)
# def get_data_sources_map(self):
# return [
# dict(
# name='Marketcap',
# desc='The marketcap value in USD.',
# start_date=pd.to_datetime('2017-01-01'),
# end_date=pd.to_datetime('2018-01-15'),
# data_frequencies=['daily'],
# ),
# dict(
# name='GitHub',
# desc='The rate of development activity on GitHub.',
# start_date=pd.to_datetime('2017-01-01'),
# end_date=pd.to_datetime('2018-01-15'),
# data_frequencies=['daily', 'hour'],
# ),
# dict(
# name='Influencers',
# desc='Tweets & related sentiments by selected influencers.',
# start_date=pd.to_datetime('2017-01-01'),
# end_date=pd.to_datetime('2018-01-15'),
# data_frequencies=['daily', 'hour', 'minute'],
# ),
# ]
def to_text(self, hex):
return Web3.toText(hex).rstrip('\0')
def choose_pubaddr(self):
if len(self.addresses) == 1:
address = self.addresses[0]['pubAddr']
address_i = 0
print('Using {} for this transaction.'.format(address))
else:
while True:
for i in range(0, len(self.addresses)):
print('{}\t{}\t{}'.format(
i,
self.addresses[i]['pubAddr'],
self.addresses[i]['desc'])
)
address_i = int(input('Choose your address associated with '
'this transaction: [default: 0] ') or 0)
if not (0 <= address_i < len(self.addresses)):
print('Please choose a number between 0 and {}\n'.format(
len(self.addresses) - 1))
else:
address = Web3.toChecksumAddress(
self.addresses[address_i]['pubAddr'])
break
return address, address_i
def sign_transaction(self, tx):
url = 'https://www.myetherwallet.com/#offline-transaction'
print('\nVisit {url} and enter the following parameters:\n\n'
'From Address:\t\t{_from}\n'
'\n\tClick the "Generate Information" button\n\n'
'To Address:\t\t{to}\n'
'Value / Amount to Send:\t{value}\n'
'Gas Limit:\t\t{gas}\n'
'Gas Price:\t\t[Accept the default value]\n'
'Nonce:\t\t\t{nonce}\n'
'Data:\t\t\t{data}\n'.format(
url=url,
_from=tx['from'],
to=tx['to'],
value=tx['value'],
gas=tx['gas'],
nonce=tx['nonce'],
data=tx['data'], )
)
webbrowser.open_new(url)
signed_tx = input('Copy and Paste the "Signed Transaction" '
'field here:\n')
if signed_tx.startswith('0x'):
signed_tx = signed_tx[2:]
return signed_tx
def check_transaction(self, tx_hash):
if 'ropsten' in ETH_REMOTE_NODE:
etherscan = 'https://ropsten.etherscan.io/tx/'
elif 'rinkeby' in ETH_REMOTE_NODE:
etherscan = 'https://rinkeby.etherscan.io/tx/'
else:
etherscan = 'https://etherscan.io/tx/'
etherscan = '{}{}'.format(etherscan, tx_hash)
print('\nYou can check the outcome of your transaction here:\n'
'{}\n\n'.format(etherscan))
def _list(self):
data_sources = self.mkt_contract.functions.getAllProviders().call()
data = []
for index, data_source in enumerate(data_sources):
if index > 0:
if 'test' not in Web3.toText(data_source).lower():
data.append(
dict(
dataset=self.to_text(data_source)
)
)
return pd.DataFrame(data)
def list(self):
df = self._list()
set_print_settings()
if df.empty:
print('There are no datasets available yet.')
else:
print(df)
def subscribe(self, dataset=None):
if dataset is None:
df_sets = self._list()
if df_sets.empty:
print('There are no datasets available yet.')
return
set_print_settings()
while True:
print(df_sets)
dataset_num = input('Choose the dataset you want to '
'subscribe to [0..{}]: '.format(
df_sets.size - 1))
try:
dataset_num = int(dataset_num)
except ValueError:
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
if dataset_num not in range(0, df_sets.size):
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
dataset = df_sets.iloc[dataset_num]['dataset']
break
dataset = dataset.lower()
address = self.choose_pubaddr()[0]
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(dataset)
).call()
if not provider_info[4]:
print('The requested "{}" dataset is not registered in '
'the Data Marketplace.'.format(dataset))
return
grains = provider_info[1]
price = from_grains(grains)
subscribed = self.mkt_contract.functions.checkAddressSubscription(
address, Web3.toHex(dataset)
).call()
if subscribed[5]:
print(
'\nYou are already subscribed to the "{}" dataset.\n'
'Your subscription started on {} UTC, and is valid until '
'{} UTC.'.format(
dataset,
pd.to_datetime(subscribed[3], unit='s', utc=True),
pd.to_datetime(subscribed[4], unit='s', utc=True)
)
)
return
print('\nThe price for a monthly subscription to this dataset is'
' {} ENG'.format(price))
print(
'Checking that the ENG balance in {} is greater than {} '
'ENG... '.format(address, price), end=''
)
wallet_address = address[2:]
balance = self.web3.eth.call({
'from': address,
'to': self.eng_contract_address,
'data': '0x70a08231000000000000000000000000{}'.format(
wallet_address
)
})
try:
balance = Web3.toInt(balance) # web3 >= 4.0.0b7
except TypeError:
balance = Web3.toInt(hexstr=balance) # web3 <= 4.0.0b6
if balance > grains:
print('OK.')
else:
print('FAIL.\n\nAddress {} balance is {} ENG,\nwhich is lower '
'than the price of the dataset that you are trying to\n'
'buy: {} ENG. Get enough ENG to cover the costs of the '
'monthly\nsubscription for what you are trying to buy, '
'and try again.'.format(
address, from_grains(balance), price))
return
while True:
agree_pay = input('Please confirm that you agree to pay {} ENG '
'for a monthly subscription to the dataset "{}" '
'starting today. [default: Y] '.format(
price, dataset)) or 'y'
if agree_pay.lower() not in ('y', 'n'):
print("Please answer Y or N.")
else:
if agree_pay.lower() == 'y':
break
else:
return
print('Ready to subscribe to dataset {}.\n'.format(dataset))
print('In order to execute the subscription, you will need to sign '
'two different transactions:\n'
'1. First transaction is to authorize the Marketplace contract '
'to spend {} ENG on your behalf.\n'
'2. Second transaction is the actual subscription for the '
'desired dataset'.format(price))
tx = self.eng_contract.functions.approve(
self.mkt_contract_address,
grains,
).buildTransaction(
{'from': address,
'nonce': self.web3.eth.getTransactionCount(address)}
)
signed_tx = self.sign_transaction(tx)
try:
tx_hash = '0x{}'.format(
bin_hex(self.web3.eth.sendRawTransaction(signed_tx))
)
print(
'\nThis is the TxHash for this transaction: {}'.format(tx_hash)
)
except Exception as e:
print('Unable to subscribe to data source: {}'.format(e))
return
self.check_transaction(tx_hash)
print('Waiting for the first transaction to succeed...')
while True:
try:
if self.web3.eth.getTransactionReceipt(tx_hash).status:
break
else:
print('\nTransaction failed. Aborting...')
return
except AttributeError:
pass
for i in range(0, 10):
print('.', end='', flush=True)
time.sleep(1)
print('\nFirst transaction successful!\n'
'Now processing second transaction.')
tx = self.mkt_contract.functions.subscribe(
Web3.toHex(dataset),
).buildTransaction({
'from': address,
'nonce': self.web3.eth.getTransactionCount(address)})
signed_tx = self.sign_transaction(tx)
try:
tx_hash = '0x{}'.format(bin_hex(
self.web3.eth.sendRawTransaction(signed_tx)))
print('\nThis is the TxHash for this transaction: '
'{}'.format(tx_hash))
except Exception as e:
print('Unable to subscribe to data source: {}'.format(e))
return
self.check_transaction(tx_hash)
print('Waiting for the second transaction to succeed...')
while True:
try:
if self.web3.eth.getTransactionReceipt(tx_hash).status:
break
else:
print('\nTransaction failed. Aborting...')
return
except AttributeError:
pass
for i in range(0, 10):
print('.', end='', flush=True)
time.sleep(1)
print('\nSecond transaction successful!\n'
'You have successfully subscribed to dataset {} with'
'address {}.\n'
'You can now ingest this dataset anytime during the '
'next month by running the following command:\n'
'catalyst marketplace ingest --dataset={}'.format(
dataset, address, dataset))
def process_temp_bundle(self, ds_name, path):
"""
Merge the temp bundle into the main bundle for the specified
data source.
Parameters
----------
ds_name
path
Returns
-------
"""
tmp_bundle = extract_bundle(path)
bundle_folder = get_data_source_folder(ds_name)
ensure_directory(bundle_folder)
if os.listdir(bundle_folder):
zsource = bcolz.ctable(rootdir=tmp_bundle, mode='r')
ztarget = bcolz.ctable(rootdir=bundle_folder, mode='r')
merge_bundles(zsource, ztarget)
else:
os.rename(tmp_bundle, bundle_folder)
pass
def ingest(self, ds_name=None, start=None, end=None, force_download=False):
if ds_name is None:
df_sets = self._list()
if df_sets.empty:
print('There are no datasets available yet.')
return
set_print_settings()
while True:
print(df_sets)
dataset_num = input('Choose the dataset you want to '
'ingest [0..{}]: '.format(
df_sets.size - 1))
try:
dataset_num = int(dataset_num)
except ValueError:
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
if dataset_num not in range(0, df_sets.size):
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
ds_name = df_sets.iloc[dataset_num]['dataset']
break
# ds_name = ds_name.lower()
# TODO: catch error conditions
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(ds_name)
).call()
if not provider_info[4]:
print('The requested "{}" dataset is not registered in '
'the Data Marketplace.'.format(ds_name))
return
address, address_i = self.choose_pubaddr()
fns = self.mkt_contract.functions
check_sub = fns.checkAddressSubscription(
address, Web3.toHex(ds_name)
).call()
if check_sub[0] != address or self.to_text(check_sub[1]) != ds_name:
print('You are not subscribed to dataset "{}" with address {}. '
'Plese subscribe first.'.format(ds_name, address))
return
if not check_sub[5]:
print('Your subscription to dataset "{}" expired on {} UTC.'
'Please renew your subscription by running:\n'
'catalyst marketplace subscribe --dataset={}'.format(
ds_name,
pd.to_datetime(check_sub[4], unit='s', utc=True),
ds_name)
)
if 'key' in self.addresses[address_i]:
key = self.addresses[address_i]['key']
secret = self.addresses[address_i]['secret']
else:
key, secret = get_key_secret(address)
headers = get_signed_headers(ds_name, key, secret)
log.debug('Starting download of dataset for ingestion...')
r = requests.post(
'{}/marketplace/ingest'.format(AUTH_SERVER),
headers=headers,
stream=True,
)
if r.status_code == 200:
target_path = get_temp_bundles_folder()
try:
decoder = MultipartDecoder.from_response(r)
for part in decoder.parts:
h = part.headers[b'Content-Disposition'].decode('utf-8')
# Extracting the filename from the header
name = re.search(r'filename="(.*)"', h).group(1)
filename = os.path.join(target_path, name)
with open(filename, 'wb') as f:
# for chunk in part.content.iter_content(
# chunk_size=1024):
# if chunk: # filter out keep-alive new chunks
# f.write(chunk)
f.write(part.content)
self.process_temp_bundle(ds_name, filename)
except NonMultipartContentTypeException:
response = r.json()
raise MarketplaceHTTPRequest(
request='ingest dataset',
error=response,
)
else:
raise MarketplaceHTTPRequest(
request='ingest dataset',
error=r.status_code,
)
log.info('{} ingested successfully'.format(ds_name))
def get_dataset(self, ds_name, start=None, end=None):
ds_name = ds_name.lower()
# TODO: filter ctable by start and end date
bundle_folder = get_data_source_folder(ds_name)
z = bcolz.ctable(rootdir=bundle_folder, mode='r')
df = z.todataframe() # type: pd.DataFrame
df.set_index(['date', 'symbol'], drop=True, inplace=True)
# TODO: implement the filter more carefully
# if start and end is None:
# df = df.xs(start, level=0)
return df
def clean(self, ds_name=None, data_frequency=None):
if ds_name is None:
mktplace_root = get_marketplace_folder()
folders = [os.path.basename(f.rstrip('/'))
for f in glob.glob('{}/*/'.format(mktplace_root))
if 'temp_bundles' not in f]
while True:
for idx, f in enumerate(folders):
print('{}\t{}'.format(idx, f))
dataset_num = input('Choose the dataset you want to '
'clean [0..{}]: '.format(
len(folders) - 1))
try:
dataset_num = int(dataset_num)
except ValueError:
print('Enter a number between 0 and {}'.format(
len(folders) - 1))
else:
if dataset_num not in range(0, len(folders)):
print('Enter a number between 0 and {}'.format(
len(folders) - 1))
else:
ds_name = folders[dataset_num]
break
ds_name = ds_name.lower()
if data_frequency is None:
folder = get_data_source_folder(ds_name)
else:
folder = get_bundle_folder(ds_name, data_frequency)
shutil.rmtree(folder)
pass
def create_metadata(self, key, secret, ds_name, data_frequency, desc,
has_history=True, has_live=True):
"""
Returns
-------
"""
headers = get_signed_headers(ds_name, key, secret)
r = requests.post(
'{}/marketplace/register'.format(AUTH_SERVER),
json=dict(
ds_name=ds_name,
desc=desc,
data_frequency=data_frequency,
has_history=has_history,
has_live=has_live,
),
headers=headers,
)
if r.status_code != 200:
raise MarketplaceHTTPRequest(
request='register', error=r.status_code
)
if 'error' in r.json():
raise MarketplaceHTTPRequest(
request='upload file', error=r.json()['error']
)
def register(self):
while True:
desc = input('Enter the name of the dataset to register: ')
dataset = desc.lower()
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(dataset)
).call()
if provider_info[4]:
print('There is already a dataset registered under '
'the name "{}". Please choose a different '
'name.'.format(dataset))
else:
break
price = int(
input(
'Enter the price for a monthly subscription to '
'this dataset in ENG: '
)
)
while True:
freq = input('Enter the data frequency [daily, hourly, minute]: ')
if freq.lower() not in ('daily', 'hourly', 'minute'):
print('Not a valid frequency.')
else:
break
while True:
reg_pub = input(
'Does it include historical data? [default: Y]: '
) or 'y'
if reg_pub.lower() not in ('y', 'n'):
print('Please answer Y or N.')
else:
if reg_pub.lower() == 'y':
has_history = True
else:
has_history = False
break
while True:
reg_pub = input(
'Doest it include live data? [default: Y]: '
) or 'y'
if reg_pub.lower() not in ('y', 'n'):
print('Please answer Y or N.')
else:
if reg_pub.lower() == 'y':
has_live = True
else:
has_live = False
break
address, address_i = self.choose_pubaddr()
if 'key' in self.addresses[address_i]:
key = self.addresses[address_i]['key']
secret = self.addresses[address_i]['secret']
else:
key, secret = get_key_secret(address)
grains = to_grains(price)
tx = self.mkt_contract.functions.register(
Web3.toHex(dataset),
grains,
address,
).buildTransaction(
{'from': address,
'nonce': self.web3.eth.getTransactionCount(address)}
)
signed_tx = self.sign_transaction(tx)
try:
tx_hash = '0x{}'.format(
bin_hex(self.web3.eth.sendRawTransaction(signed_tx))
)
print(
'\nThis is the TxHash for this transaction: {}'.format(tx_hash)
)
except Exception as e:
print('Unable to register the requested dataset: {}'.format(e))
return
self.check_transaction(tx_hash)
print('Waiting for the transaction to succeed...')
while True:
try:
if self.web3.eth.getTransactionReceipt(tx_hash).status:
break
else:
print('\nTransaction failed. Aborting...')
return
except AttributeError:
pass
for i in range(0, 10):
print('.', end='', flush=True)
time.sleep(1)
print('\nWarming up the {} dataset'.format(dataset))
self.create_metadata(
key=key,
secret=secret,
ds_name=dataset,
data_frequency=freq,
desc=desc,
has_history=has_history,
has_live=has_live,
)
print('\n{} registered successfully'.format(dataset))
def publish(self, dataset, datadir, watch):
dataset = dataset.lower()
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(dataset)
).call()
if not provider_info[4]:
raise MarketplaceDatasetNotFound(dataset=dataset)
match = next(
(l for l in self.addresses if l['pubAddr'] == provider_info[0]),
None
)
if not match:
raise MarketplaceNoAddressMatch(
dataset=dataset,
address=provider_info[0])
print('Using address: {} to publish this dataset.'.format(
provider_info[0]))
if 'key' in match:
key = match['key']
secret = match['secret']
else:
key, secret = get_key_secret(provider_info[0])
headers = get_signed_headers(dataset, key, secret)
filenames = glob.glob(os.path.join(datadir, '*.csv'))
if not filenames:
raise MarketplaceNoCSVFiles(datadir=datadir)
files = []
for file in filenames:
files.append(('file', open(file, 'rb')))
r = requests.post('{}/marketplace/publish'.format(AUTH_SERVER),
files=files,
headers=headers)
if r.status_code != 200:
raise MarketplaceHTTPRequest(request='upload file',
error=r.status_code)
if 'error' in r.json():
raise MarketplaceHTTPRequest(request='upload file',
error=r.json()['error'])
print('Dataset {} uploaded successfully.'.format(dataset))
| 34.430013 | 79 | 0.517892 | from __future__ import print_function
import glob
import json
import os
import re
import shutil
import sys
import time
import webbrowser
import bcolz
import logbook
import pandas as pd
import requests
from requests_toolbelt import MultipartDecoder
from requests_toolbelt.multipart.decoder import \
NonMultipartContentTypeException
from catalyst.constants import (
LOG_LEVEL, AUTH_SERVER, ETH_REMOTE_NODE, MARKETPLACE_CONTRACT,
MARKETPLACE_CONTRACT_ABI, ENIGMA_CONTRACT, ENIGMA_CONTRACT_ABI)
from catalyst.exchange.utils.stats_utils import set_print_settings
from catalyst.marketplace.marketplace_errors import (
MarketplacePubAddressEmpty, MarketplaceDatasetNotFound,
MarketplaceNoAddressMatch, MarketplaceHTTPRequest,
MarketplaceNoCSVFiles, MarketplaceRequiresPython3)
from catalyst.marketplace.utils.auth_utils import get_key_secret, \
get_signed_headers
from catalyst.marketplace.utils.bundle_utils import merge_bundles
from catalyst.marketplace.utils.eth_utils import bin_hex, from_grains, \
to_grains
from catalyst.marketplace.utils.path_utils import get_bundle_folder, \
get_data_source_folder, get_marketplace_folder, \
get_user_pubaddr, get_temp_bundles_folder, extract_bundle
from catalyst.utils.paths import ensure_directory
if sys.version_info.major < 3:
import urllib
else:
import urllib.request as urllib
log = logbook.Logger('Marketplace', level=LOG_LEVEL)
class Marketplace:
def __init__(self):
global Web3
try:
from web3 import Web3, HTTPProvider
except ImportError:
raise MarketplaceRequiresPython3()
self.addresses = get_user_pubaddr()
if self.addresses[0]['pubAddr'] == '':
raise MarketplacePubAddressEmpty(
filename=os.path.join(
get_marketplace_folder(), 'addresses.json')
)
self.default_account = self.addresses[0]['pubAddr']
self.web3 = Web3(HTTPProvider(ETH_REMOTE_NODE))
contract_url = urllib.urlopen(MARKETPLACE_CONTRACT)
self.mkt_contract_address = Web3.toChecksumAddress(
contract_url.readline().decode(
contract_url.info().get_content_charset()).strip())
abi_url = urllib.urlopen(MARKETPLACE_CONTRACT_ABI)
abi = json.load(abi_url)
self.mkt_contract = self.web3.eth.contract(
self.mkt_contract_address,
abi=abi,
)
contract_url = urllib.urlopen(ENIGMA_CONTRACT)
self.eng_contract_address = Web3.toChecksumAddress(
contract_url.readline().decode(
contract_url.info().get_content_charset()).strip())
abi_url = urllib.urlopen(ENIGMA_CONTRACT_ABI)
abi = json.load(abi_url)
self.eng_contract = self.web3.eth.contract(
self.eng_contract_address,
abi=abi,
)
def to_text(self, hex):
return Web3.toText(hex).rstrip('\0')
def choose_pubaddr(self):
if len(self.addresses) == 1:
address = self.addresses[0]['pubAddr']
address_i = 0
print('Using {} for this transaction.'.format(address))
else:
while True:
for i in range(0, len(self.addresses)):
print('{}\t{}\t{}'.format(
i,
self.addresses[i]['pubAddr'],
self.addresses[i]['desc'])
)
address_i = int(input('Choose your address associated with '
'this transaction: [default: 0] ') or 0)
if not (0 <= address_i < len(self.addresses)):
print('Please choose a number between 0 and {}\n'.format(
len(self.addresses) - 1))
else:
address = Web3.toChecksumAddress(
self.addresses[address_i]['pubAddr'])
break
return address, address_i
def sign_transaction(self, tx):
url = 'https://www.myetherwallet.com/#offline-transaction'
print('\nVisit {url} and enter the following parameters:\n\n'
'From Address:\t\t{_from}\n'
'\n\tClick the "Generate Information" button\n\n'
'To Address:\t\t{to}\n'
'Value / Amount to Send:\t{value}\n'
'Gas Limit:\t\t{gas}\n'
'Gas Price:\t\t[Accept the default value]\n'
'Nonce:\t\t\t{nonce}\n'
'Data:\t\t\t{data}\n'.format(
url=url,
_from=tx['from'],
to=tx['to'],
value=tx['value'],
gas=tx['gas'],
nonce=tx['nonce'],
data=tx['data'], )
)
webbrowser.open_new(url)
signed_tx = input('Copy and Paste the "Signed Transaction" '
'field here:\n')
if signed_tx.startswith('0x'):
signed_tx = signed_tx[2:]
return signed_tx
def check_transaction(self, tx_hash):
if 'ropsten' in ETH_REMOTE_NODE:
etherscan = 'https://ropsten.etherscan.io/tx/'
elif 'rinkeby' in ETH_REMOTE_NODE:
etherscan = 'https://rinkeby.etherscan.io/tx/'
else:
etherscan = 'https://etherscan.io/tx/'
etherscan = '{}{}'.format(etherscan, tx_hash)
print('\nYou can check the outcome of your transaction here:\n'
'{}\n\n'.format(etherscan))
def _list(self):
data_sources = self.mkt_contract.functions.getAllProviders().call()
data = []
for index, data_source in enumerate(data_sources):
if index > 0:
if 'test' not in Web3.toText(data_source).lower():
data.append(
dict(
dataset=self.to_text(data_source)
)
)
return pd.DataFrame(data)
def list(self):
df = self._list()
set_print_settings()
if df.empty:
print('There are no datasets available yet.')
else:
print(df)
def subscribe(self, dataset=None):
if dataset is None:
df_sets = self._list()
if df_sets.empty:
print('There are no datasets available yet.')
return
set_print_settings()
while True:
print(df_sets)
dataset_num = input('Choose the dataset you want to '
'subscribe to [0..{}]: '.format(
df_sets.size - 1))
try:
dataset_num = int(dataset_num)
except ValueError:
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
if dataset_num not in range(0, df_sets.size):
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
dataset = df_sets.iloc[dataset_num]['dataset']
break
dataset = dataset.lower()
address = self.choose_pubaddr()[0]
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(dataset)
).call()
if not provider_info[4]:
print('The requested "{}" dataset is not registered in '
'the Data Marketplace.'.format(dataset))
return
grains = provider_info[1]
price = from_grains(grains)
subscribed = self.mkt_contract.functions.checkAddressSubscription(
address, Web3.toHex(dataset)
).call()
if subscribed[5]:
print(
'\nYou are already subscribed to the "{}" dataset.\n'
'Your subscription started on {} UTC, and is valid until '
'{} UTC.'.format(
dataset,
pd.to_datetime(subscribed[3], unit='s', utc=True),
pd.to_datetime(subscribed[4], unit='s', utc=True)
)
)
return
print('\nThe price for a monthly subscription to this dataset is'
' {} ENG'.format(price))
print(
'Checking that the ENG balance in {} is greater than {} '
'ENG... '.format(address, price), end=''
)
wallet_address = address[2:]
balance = self.web3.eth.call({
'from': address,
'to': self.eng_contract_address,
'data': '0x70a08231000000000000000000000000{}'.format(
wallet_address
)
})
try:
balance = Web3.toInt(balance)
except TypeError:
balance = Web3.toInt(hexstr=balance)
if balance > grains:
print('OK.')
else:
print('FAIL.\n\nAddress {} balance is {} ENG,\nwhich is lower '
'than the price of the dataset that you are trying to\n'
'buy: {} ENG. Get enough ENG to cover the costs of the '
'monthly\nsubscription for what you are trying to buy, '
'and try again.'.format(
address, from_grains(balance), price))
return
while True:
agree_pay = input('Please confirm that you agree to pay {} ENG '
'for a monthly subscription to the dataset "{}" '
'starting today. [default: Y] '.format(
price, dataset)) or 'y'
if agree_pay.lower() not in ('y', 'n'):
print("Please answer Y or N.")
else:
if agree_pay.lower() == 'y':
break
else:
return
print('Ready to subscribe to dataset {}.\n'.format(dataset))
print('In order to execute the subscription, you will need to sign '
'two different transactions:\n'
'1. First transaction is to authorize the Marketplace contract '
'to spend {} ENG on your behalf.\n'
'2. Second transaction is the actual subscription for the '
'desired dataset'.format(price))
tx = self.eng_contract.functions.approve(
self.mkt_contract_address,
grains,
).buildTransaction(
{'from': address,
'nonce': self.web3.eth.getTransactionCount(address)}
)
signed_tx = self.sign_transaction(tx)
try:
tx_hash = '0x{}'.format(
bin_hex(self.web3.eth.sendRawTransaction(signed_tx))
)
print(
'\nThis is the TxHash for this transaction: {}'.format(tx_hash)
)
except Exception as e:
print('Unable to subscribe to data source: {}'.format(e))
return
self.check_transaction(tx_hash)
print('Waiting for the first transaction to succeed...')
while True:
try:
if self.web3.eth.getTransactionReceipt(tx_hash).status:
break
else:
print('\nTransaction failed. Aborting...')
return
except AttributeError:
pass
for i in range(0, 10):
print('.', end='', flush=True)
time.sleep(1)
print('\nFirst transaction successful!\n'
'Now processing second transaction.')
tx = self.mkt_contract.functions.subscribe(
Web3.toHex(dataset),
).buildTransaction({
'from': address,
'nonce': self.web3.eth.getTransactionCount(address)})
signed_tx = self.sign_transaction(tx)
try:
tx_hash = '0x{}'.format(bin_hex(
self.web3.eth.sendRawTransaction(signed_tx)))
print('\nThis is the TxHash for this transaction: '
'{}'.format(tx_hash))
except Exception as e:
print('Unable to subscribe to data source: {}'.format(e))
return
self.check_transaction(tx_hash)
print('Waiting for the second transaction to succeed...')
while True:
try:
if self.web3.eth.getTransactionReceipt(tx_hash).status:
break
else:
print('\nTransaction failed. Aborting...')
return
except AttributeError:
pass
for i in range(0, 10):
print('.', end='', flush=True)
time.sleep(1)
print('\nSecond transaction successful!\n'
'You have successfully subscribed to dataset {} with'
'address {}.\n'
'You can now ingest this dataset anytime during the '
'next month by running the following command:\n'
'catalyst marketplace ingest --dataset={}'.format(
dataset, address, dataset))
def process_temp_bundle(self, ds_name, path):
tmp_bundle = extract_bundle(path)
bundle_folder = get_data_source_folder(ds_name)
ensure_directory(bundle_folder)
if os.listdir(bundle_folder):
zsource = bcolz.ctable(rootdir=tmp_bundle, mode='r')
ztarget = bcolz.ctable(rootdir=bundle_folder, mode='r')
merge_bundles(zsource, ztarget)
else:
os.rename(tmp_bundle, bundle_folder)
pass
def ingest(self, ds_name=None, start=None, end=None, force_download=False):
if ds_name is None:
df_sets = self._list()
if df_sets.empty:
print('There are no datasets available yet.')
return
set_print_settings()
while True:
print(df_sets)
dataset_num = input('Choose the dataset you want to '
'ingest [0..{}]: '.format(
df_sets.size - 1))
try:
dataset_num = int(dataset_num)
except ValueError:
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
if dataset_num not in range(0, df_sets.size):
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
ds_name = df_sets.iloc[dataset_num]['dataset']
break
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(ds_name)
).call()
if not provider_info[4]:
print('The requested "{}" dataset is not registered in '
'the Data Marketplace.'.format(ds_name))
return
address, address_i = self.choose_pubaddr()
fns = self.mkt_contract.functions
check_sub = fns.checkAddressSubscription(
address, Web3.toHex(ds_name)
).call()
if check_sub[0] != address or self.to_text(check_sub[1]) != ds_name:
print('You are not subscribed to dataset "{}" with address {}. '
'Plese subscribe first.'.format(ds_name, address))
return
if not check_sub[5]:
print('Your subscription to dataset "{}" expired on {} UTC.'
'Please renew your subscription by running:\n'
'catalyst marketplace subscribe --dataset={}'.format(
ds_name,
pd.to_datetime(check_sub[4], unit='s', utc=True),
ds_name)
)
if 'key' in self.addresses[address_i]:
key = self.addresses[address_i]['key']
secret = self.addresses[address_i]['secret']
else:
key, secret = get_key_secret(address)
headers = get_signed_headers(ds_name, key, secret)
log.debug('Starting download of dataset for ingestion...')
r = requests.post(
'{}/marketplace/ingest'.format(AUTH_SERVER),
headers=headers,
stream=True,
)
if r.status_code == 200:
target_path = get_temp_bundles_folder()
try:
decoder = MultipartDecoder.from_response(r)
for part in decoder.parts:
h = part.headers[b'Content-Disposition'].decode('utf-8')
name = re.search(r'filename="(.*)"', h).group(1)
filename = os.path.join(target_path, name)
with open(filename, 'wb') as f:
f.write(part.content)
self.process_temp_bundle(ds_name, filename)
except NonMultipartContentTypeException:
response = r.json()
raise MarketplaceHTTPRequest(
request='ingest dataset',
error=response,
)
else:
raise MarketplaceHTTPRequest(
request='ingest dataset',
error=r.status_code,
)
log.info('{} ingested successfully'.format(ds_name))
def get_dataset(self, ds_name, start=None, end=None):
ds_name = ds_name.lower()
bundle_folder = get_data_source_folder(ds_name)
z = bcolz.ctable(rootdir=bundle_folder, mode='r')
df = z.todataframe()
df.set_index(['date', 'symbol'], drop=True, inplace=True)
return df
def clean(self, ds_name=None, data_frequency=None):
if ds_name is None:
mktplace_root = get_marketplace_folder()
folders = [os.path.basename(f.rstrip('/'))
for f in glob.glob('{}/*/'.format(mktplace_root))
if 'temp_bundles' not in f]
while True:
for idx, f in enumerate(folders):
print('{}\t{}'.format(idx, f))
dataset_num = input('Choose the dataset you want to '
'clean [0..{}]: '.format(
len(folders) - 1))
try:
dataset_num = int(dataset_num)
except ValueError:
print('Enter a number between 0 and {}'.format(
len(folders) - 1))
else:
if dataset_num not in range(0, len(folders)):
print('Enter a number between 0 and {}'.format(
len(folders) - 1))
else:
ds_name = folders[dataset_num]
break
ds_name = ds_name.lower()
if data_frequency is None:
folder = get_data_source_folder(ds_name)
else:
folder = get_bundle_folder(ds_name, data_frequency)
shutil.rmtree(folder)
pass
def create_metadata(self, key, secret, ds_name, data_frequency, desc,
has_history=True, has_live=True):
headers = get_signed_headers(ds_name, key, secret)
r = requests.post(
'{}/marketplace/register'.format(AUTH_SERVER),
json=dict(
ds_name=ds_name,
desc=desc,
data_frequency=data_frequency,
has_history=has_history,
has_live=has_live,
),
headers=headers,
)
if r.status_code != 200:
raise MarketplaceHTTPRequest(
request='register', error=r.status_code
)
if 'error' in r.json():
raise MarketplaceHTTPRequest(
request='upload file', error=r.json()['error']
)
def register(self):
while True:
desc = input('Enter the name of the dataset to register: ')
dataset = desc.lower()
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(dataset)
).call()
if provider_info[4]:
print('There is already a dataset registered under '
'the name "{}". Please choose a different '
'name.'.format(dataset))
else:
break
price = int(
input(
'Enter the price for a monthly subscription to '
'this dataset in ENG: '
)
)
while True:
freq = input('Enter the data frequency [daily, hourly, minute]: ')
if freq.lower() not in ('daily', 'hourly', 'minute'):
print('Not a valid frequency.')
else:
break
while True:
reg_pub = input(
'Does it include historical data? [default: Y]: '
) or 'y'
if reg_pub.lower() not in ('y', 'n'):
print('Please answer Y or N.')
else:
if reg_pub.lower() == 'y':
has_history = True
else:
has_history = False
break
while True:
reg_pub = input(
'Doest it include live data? [default: Y]: '
) or 'y'
if reg_pub.lower() not in ('y', 'n'):
print('Please answer Y or N.')
else:
if reg_pub.lower() == 'y':
has_live = True
else:
has_live = False
break
address, address_i = self.choose_pubaddr()
if 'key' in self.addresses[address_i]:
key = self.addresses[address_i]['key']
secret = self.addresses[address_i]['secret']
else:
key, secret = get_key_secret(address)
grains = to_grains(price)
tx = self.mkt_contract.functions.register(
Web3.toHex(dataset),
grains,
address,
).buildTransaction(
{'from': address,
'nonce': self.web3.eth.getTransactionCount(address)}
)
signed_tx = self.sign_transaction(tx)
try:
tx_hash = '0x{}'.format(
bin_hex(self.web3.eth.sendRawTransaction(signed_tx))
)
print(
'\nThis is the TxHash for this transaction: {}'.format(tx_hash)
)
except Exception as e:
print('Unable to register the requested dataset: {}'.format(e))
return
self.check_transaction(tx_hash)
print('Waiting for the transaction to succeed...')
while True:
try:
if self.web3.eth.getTransactionReceipt(tx_hash).status:
break
else:
print('\nTransaction failed. Aborting...')
return
except AttributeError:
pass
for i in range(0, 10):
print('.', end='', flush=True)
time.sleep(1)
print('\nWarming up the {} dataset'.format(dataset))
self.create_metadata(
key=key,
secret=secret,
ds_name=dataset,
data_frequency=freq,
desc=desc,
has_history=has_history,
has_live=has_live,
)
print('\n{} registered successfully'.format(dataset))
def publish(self, dataset, datadir, watch):
dataset = dataset.lower()
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(dataset)
).call()
if not provider_info[4]:
raise MarketplaceDatasetNotFound(dataset=dataset)
match = next(
(l for l in self.addresses if l['pubAddr'] == provider_info[0]),
None
)
if not match:
raise MarketplaceNoAddressMatch(
dataset=dataset,
address=provider_info[0])
print('Using address: {} to publish this dataset.'.format(
provider_info[0]))
if 'key' in match:
key = match['key']
secret = match['secret']
else:
key, secret = get_key_secret(provider_info[0])
headers = get_signed_headers(dataset, key, secret)
filenames = glob.glob(os.path.join(datadir, '*.csv'))
if not filenames:
raise MarketplaceNoCSVFiles(datadir=datadir)
files = []
for file in filenames:
files.append(('file', open(file, 'rb')))
r = requests.post('{}/marketplace/publish'.format(AUTH_SERVER),
files=files,
headers=headers)
if r.status_code != 200:
raise MarketplaceHTTPRequest(request='upload file',
error=r.status_code)
if 'error' in r.json():
raise MarketplaceHTTPRequest(request='upload file',
error=r.json()['error'])
print('Dataset {} uploaded successfully.'.format(dataset))
| true | true |
f72bd87c0f6308b1597913764a10b8ad4d63132c | 2,172 | py | Python | coevolution_transformer/model/msa_embeddings.py | microsoft/Protein-Folding | f534b2dd1e3f192fbcdadf234f25828c7f458a58 | [
"MIT"
] | 1 | 2021-11-12T03:00:28.000Z | 2021-11-12T03:00:28.000Z | coevolution_transformer/model/msa_embeddings.py | microsoft/Protein-Folding | f534b2dd1e3f192fbcdadf234f25828c7f458a58 | [
"MIT"
] | 1 | 2022-01-11T17:09:26.000Z | 2022-01-11T17:09:26.000Z | coevolution_transformer/model/msa_embeddings.py | microsoft/Protein-Folding | f534b2dd1e3f192fbcdadf234f25828c7f458a58 | [
"MIT"
] | 2 | 2021-11-10T11:42:11.000Z | 2021-12-09T05:37:09.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import math
import torch
from torch import nn
import torch.nn.functional as F
class PositionalEncoding(nn.Module):
def __init__(self, d_model, max_len=1 << 13):
super(PositionalEncoding, self).__init__()
self.ninp = d_model
pe = torch.zeros(max_len, d_model)
position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term) # (L, C)
self.register_buffer("pe", pe)
def forward(self, idx):
"""
idx: (B, L)
return: (B, L, C)
"""
return self.pe[idx]
class MSAEmbeddings(nn.Module):
def __init__(self, msa_gap, embed_dim, dropout):
super(MSAEmbeddings, self).__init__()
self.embed_dim = embed_dim
self.onehot = nn.Embedding(24, 24)
self.onehot.weight.data = torch.eye(24)
self.onehot.weight.requires_grad = False
self.msa_embeddings = nn.Linear((msa_gap * 2 + 2) * 24 + 2, embed_dim)
self.position_embeddings = PositionalEncoding(embed_dim)
self.layer_norm = nn.LayerNorm(embed_dim)
self.dropout = nn.Dropout(dropout)
def forward(self, seq_ids, msa_ids, position_ids):
"""
seq_ids: (B, L)
msa_ids: (B, K, *, L)
position_ids: (B, L)
return: (B, K, L, C)
"""
B, K, _, L = msa_ids.shape
seq = self.onehot(seq_ids)
msa_ids = msa_ids.transpose(-2, -1)
boundary = msa_ids[..., -2:].float()
msa = self.onehot(msa_ids[..., :-2]).reshape(B, K, L, -1)
msa = torch.cat([seq[:, None].repeat(1, msa.shape[1], 1, 1), msa, boundary], dim=-1)
msa_emb = self.msa_embeddings(msa)
pos_emb = self.position_embeddings(position_ids)
embeddings = msa_emb * math.sqrt(self.embed_dim) + pos_emb[:, None]
embeddings = self.layer_norm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
| 36.2 | 98 | 0.603131 |
import math
import torch
from torch import nn
import torch.nn.functional as F
class PositionalEncoding(nn.Module):
def __init__(self, d_model, max_len=1 << 13):
super(PositionalEncoding, self).__init__()
self.ninp = d_model
pe = torch.zeros(max_len, d_model)
position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
self.register_buffer("pe", pe)
def forward(self, idx):
return self.pe[idx]
class MSAEmbeddings(nn.Module):
def __init__(self, msa_gap, embed_dim, dropout):
super(MSAEmbeddings, self).__init__()
self.embed_dim = embed_dim
self.onehot = nn.Embedding(24, 24)
self.onehot.weight.data = torch.eye(24)
self.onehot.weight.requires_grad = False
self.msa_embeddings = nn.Linear((msa_gap * 2 + 2) * 24 + 2, embed_dim)
self.position_embeddings = PositionalEncoding(embed_dim)
self.layer_norm = nn.LayerNorm(embed_dim)
self.dropout = nn.Dropout(dropout)
def forward(self, seq_ids, msa_ids, position_ids):
B, K, _, L = msa_ids.shape
seq = self.onehot(seq_ids)
msa_ids = msa_ids.transpose(-2, -1)
boundary = msa_ids[..., -2:].float()
msa = self.onehot(msa_ids[..., :-2]).reshape(B, K, L, -1)
msa = torch.cat([seq[:, None].repeat(1, msa.shape[1], 1, 1), msa, boundary], dim=-1)
msa_emb = self.msa_embeddings(msa)
pos_emb = self.position_embeddings(position_ids)
embeddings = msa_emb * math.sqrt(self.embed_dim) + pos_emb[:, None]
embeddings = self.layer_norm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
| true | true |
f72bd8ae65fb7e4bb2f7694fe3f07aad90af90c7 | 5,513 | py | Python | pywavefront/material.py | elgrandt/ShooterInc | 0552e563d9ea81b0e0a0c4be1648412aa52c56e6 | [
"MIT"
] | null | null | null | pywavefront/material.py | elgrandt/ShooterInc | 0552e563d9ea81b0e0a0c4be1648412aa52c56e6 | [
"MIT"
] | null | null | null | pywavefront/material.py | elgrandt/ShooterInc | 0552e563d9ea81b0e0a0c4be1648412aa52c56e6 | [
"MIT"
] | null | null | null | # ----------------------------------------------------------------------------
# PyWavefront
# Copyright (c) 2013 Kurt Yoder
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of PyWavefront nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
from OpenGL.GL import *
import pywavefront.parser as parser
import pywavefront.texture as texture
class Material(object):
def __init__(self, name):
self.name = name
self.diffuse = [.8, .8, .8, 1.]
self.ambient = [.2, .2, .2, 1.]
self.specular = [0., 0., 0., 1.]
self.emissive = [0., 0., 0., 1.]
self.shininess = 0.
self.texture = None
# Interleaved array of floats in GL_T2F_N3F_V3F format
self.vertices = []
self.gl_floats = None
def pad_light(self, values):
"""Accept an array of up to 4 values, and return an array of 4 values.
If the input array is less than length 4, pad it with zeroes until it
is length 4. Also ensure each value is a float"""
while len(values) < 4:
values.append(0.)
return list(map(float, values))
def set_alpha(self, alpha):
"""Set alpha/last value on all four lighting attributes."""
alpha = float(alpha)
self.diffuse[3] = alpha
self.ambient[3] = alpha
self.specular[3] = alpha
self.emissive[3] = alpha
def set_diffuse(self, values=[]):
self.diffuse = self.pad_light(values)
def set_ambient(self, values=[]):
self.ambient = self.pad_light(values)
def set_specular(self, values=[]):
self.specular = self.pad_light(values)
def set_emissive(self, values=[]):
self.emissive = self.pad_light(values)
def set_texture(self, path):
self.texture = texture.Texture(path)
def unset_texture(self):
self.texture = None
def gl_light(self, lighting):
"""Return a GLfloat with length 4, containing the 4 lighting values."""
return (GLfloat * 4)(*(lighting))
def draw(self, face=GL_FRONT_AND_BACK):
glEnable(GL_TEXTURE_2D)
glColor4f(1,1,1,1)
glMaterialfv(face, GL_DIFFUSE, self.gl_light(self.diffuse) )
glMaterialfv(face, GL_AMBIENT, self.gl_light(self.ambient) )
glMaterialfv(face, GL_SPECULAR, self.gl_light(self.specular) )
glMaterialfv(face, GL_EMISSION, self.gl_light(self.emissive) )
glMaterialf(face, GL_SHININESS, self.shininess)
if self.texture:
self.texture.draw()
if self.gl_floats is None:
self.gl_floats = (GLfloat * len(self.vertices))(*self.vertices)
self.triangle_count = len(self.vertices) / 8
glInterleavedArrays(GL_T2F_N3F_V3F, 0, self.gl_floats)
glDrawArrays(GL_TRIANGLES, 0, int(self.triangle_count))
glDisable(GL_TEXTURE_2D)
class MaterialParser(parser.Parser):
"""Object to parse lines of a materials definition file."""
def __init__(self, file_path, path):
self.materials = {}
self.path = path
self.this_material = None
self.read_file(file_path)
def parse_newmtl(self, args):
[newmtl] = args
self.this_material = Material(newmtl)
self.materials[self.this_material.name] = self.this_material
def parse_Kd(self, args):
self.this_material.set_diffuse(args)
def parse_Ka(self, args):
self.this_material.set_ambient(args)
def parse_Ks(self, args):
self.this_material.set_specular(args)
def parse_Ke(self, args):
self.this_material.set_emissive(args)
def parse_Ns(self, args):
[Ns] = args
self.this_material.shininess = float(Ns)
def parse_d(self, args):
[d] = args
self.this_material.set_alpha(d)
def parse_map_Kd(self, args):
[Kd] = args
self.this_material.set_texture(self.path + "textures/" + Kd)
def parse_Ni(self, args):
# unimplemented
return
def parse_illum(self, args):
# unimplemented
return
| 35.339744 | 79 | 0.649737 |
from OpenGL.GL import *
import pywavefront.parser as parser
import pywavefront.texture as texture
class Material(object):
def __init__(self, name):
self.name = name
self.diffuse = [.8, .8, .8, 1.]
self.ambient = [.2, .2, .2, 1.]
self.specular = [0., 0., 0., 1.]
self.emissive = [0., 0., 0., 1.]
self.shininess = 0.
self.texture = None
self.vertices = []
self.gl_floats = None
def pad_light(self, values):
while len(values) < 4:
values.append(0.)
return list(map(float, values))
def set_alpha(self, alpha):
alpha = float(alpha)
self.diffuse[3] = alpha
self.ambient[3] = alpha
self.specular[3] = alpha
self.emissive[3] = alpha
def set_diffuse(self, values=[]):
self.diffuse = self.pad_light(values)
def set_ambient(self, values=[]):
self.ambient = self.pad_light(values)
def set_specular(self, values=[]):
self.specular = self.pad_light(values)
def set_emissive(self, values=[]):
self.emissive = self.pad_light(values)
def set_texture(self, path):
self.texture = texture.Texture(path)
def unset_texture(self):
self.texture = None
def gl_light(self, lighting):
return (GLfloat * 4)(*(lighting))
def draw(self, face=GL_FRONT_AND_BACK):
glEnable(GL_TEXTURE_2D)
glColor4f(1,1,1,1)
glMaterialfv(face, GL_DIFFUSE, self.gl_light(self.diffuse) )
glMaterialfv(face, GL_AMBIENT, self.gl_light(self.ambient) )
glMaterialfv(face, GL_SPECULAR, self.gl_light(self.specular) )
glMaterialfv(face, GL_EMISSION, self.gl_light(self.emissive) )
glMaterialf(face, GL_SHININESS, self.shininess)
if self.texture:
self.texture.draw()
if self.gl_floats is None:
self.gl_floats = (GLfloat * len(self.vertices))(*self.vertices)
self.triangle_count = len(self.vertices) / 8
glInterleavedArrays(GL_T2F_N3F_V3F, 0, self.gl_floats)
glDrawArrays(GL_TRIANGLES, 0, int(self.triangle_count))
glDisable(GL_TEXTURE_2D)
class MaterialParser(parser.Parser):
def __init__(self, file_path, path):
self.materials = {}
self.path = path
self.this_material = None
self.read_file(file_path)
def parse_newmtl(self, args):
[newmtl] = args
self.this_material = Material(newmtl)
self.materials[self.this_material.name] = self.this_material
def parse_Kd(self, args):
self.this_material.set_diffuse(args)
def parse_Ka(self, args):
self.this_material.set_ambient(args)
def parse_Ks(self, args):
self.this_material.set_specular(args)
def parse_Ke(self, args):
self.this_material.set_emissive(args)
def parse_Ns(self, args):
[Ns] = args
self.this_material.shininess = float(Ns)
def parse_d(self, args):
[d] = args
self.this_material.set_alpha(d)
def parse_map_Kd(self, args):
[Kd] = args
self.this_material.set_texture(self.path + "textures/" + Kd)
def parse_Ni(self, args):
return
def parse_illum(self, args):
return
| true | true |
f72bd8c09290fd4650b93bed148f010025633ae2 | 5,361 | py | Python | hla_hed.py | sunhuaibo/HLA-HED | bb0672e62a20baad80f5f154c9220bf8e5b8b28c | [
"MIT"
] | null | null | null | hla_hed.py | sunhuaibo/HLA-HED | bb0672e62a20baad80f5f154c9220bf8e5b8b28c | [
"MIT"
] | null | null | null | hla_hed.py | sunhuaibo/HLA-HED | bb0672e62a20baad80f5f154c9220bf8e5b8b28c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding=utf-8 -*-
# =====================================
# Author: Huaibo Sun
# E-mail: huaibo_sun@foxmail.com
# date: 2022-03-31
# =====================================
import os
import pandas as pd
from Bio import SeqIO
from pathlib import Path
from itertools import combinations
from argparse import ArgumentParser, RawDescriptionHelpFormatter
def get_opt():
"""
Input HLA file format
Sample A1 A2 B1 B2 C1 C2
p1 A*01:01 A*01:03 B*07:01 B*07:02 C*01:01 C*01:02
p2 A*01:01 A*01:03 B*07:01 B*07:02 C*01:01 C*01:02
If you use this tool, please cite the following three papers.
Grantham R. Amino acid difference formula to help explain protein evolution. Science. 1974 Sep 6;185(4154):862-4. doi: 10.1126/science.185.4154.862. PMID: 4843792.
Pierini F, Lenz TL. Divergent Allele Advantage at Human MHC Genes: Signatures of Past and Ongoing Selection. Mol Biol Evol. 2018 Sep 1;35(9):2145-2158. doi: 10.1093/molbev/msy116. PMID: 29893875; PMCID: PMC6106954.
Chowell D, Krishna C, Pierini F, Makarov V, Rizvi NA, Kuo F, Morris LGT, Riaz N, Lenz TL, Chan TA. Evolutionary divergence of HLA class I genotype impacts efficacy of cancer immunotherapy. Nat Med. 2019 Nov;25(11):1715-1720. doi: 10.1038/s41591-019-0639-4. Epub 2019 Nov 7. PMID: 31700181; PMCID: PMC7938381.
"""
script = os.path.dirname(os.path.abspath(__file__))
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter, epilog=get_opt.__doc__)
parser.add_argument("-d", default=f"{script}/database/grantham_matrix.txt", help="Distance matrix for all amino acids, default: database/grantham_matrix.txt. (reference: DOI: 10.1126/science.185.4154.862)")
parser.add_argument("-f", default=f"{script}/database/ABC_prot.fa", help="Amino acid sequences in fasta format, default: database/ABC_prot.fa.")
parser.add_argument("-i", required=True, help="Input file of tab-delimited with individual HLA typing.")
parser.add_argument("-p", action="store_true", help="Paired HED score.")
parser.add_argument("-o", required=True, help="Output file name.")
parse = parser.parse_args()
return(parse)
def check_file(infile):
if not infile.exists:
raise Exception(f"{str(infile)} file is not exist")
def read_fasta(infile):
infile = Path(infile)
check_file(infile)
record = SeqIO.parse(infile, "fasta")
seq_array = {seq.id: str(seq.seq) for seq in record}
seq_len = [len(value) for value in seq_array.values()]
if len(set(seq_len)) != 1:
raise Exception("Input sequences length is not equality")
return(seq_array)
def read_aa(infile):
infile = Path(infile)
check_file(infile)
df = pd.read_csv(infile, header=0, sep="\t", index_col=0)
aa_pairwise_dis = df.to_dict()
return(aa_pairwise_dis)
def calculate_distange(hla1, hla2, sequence, distance):
seq_hla1 = sequence.get(hla1, False)
seq_hla2 = sequence.get(hla2, False)
if not seq_hla1 or not seq_hla2:
return("NA")
else:
seq_len = len(seq_hla1)
dis = 0
for i in range(seq_len):
aa1 = seq_hla1[i]
aa2 = seq_hla2[i]
dis += distance[aa1][aa2]
dis = dis / seq_len
return(dis)
def main():
opt = get_opt()
seq_array = read_fasta(opt.f)
aa_pairwise_dis = read_aa(opt.d)
infile = Path(opt.i)
outfile = Path(opt.o)
check_file(infile)
df = pd.read_csv(infile, header=0, sep="\t")
if opt.p:
df2 = pd.melt(df, id_vars=["Sample"], value_vars=["A1", "A2", "B1","B2", "C1","C2"])
alleles = set(df2["value"].values.tolist())
alleles_pair = combinations(alleles, 2)
outheader = ["Allele1","Allele2","HED"]
with open(outfile, "w") as fw:
fw.write("\t".join(outheader) + "\n")
for allele1, allele2 in alleles_pair:
dis_hla_pair = calculate_distange(allele1, allele2, seq_array, aa_pairwise_dis)
outline = [allele1, allele2, dis_hla_pair]
outline = [str(x) for x in outline]
fw.write("\t".join(outline) + "\n")
else:
outheader = ["Sample","HED_A","HED_B","HED_C","Mean_HE"]
with open(outfile, "w") as fw:
fw.write("\t".join(outheader) + "\n")
for _, line in df.iterrows():
hla_a1 = line["A1"]
hla_a2 = line["A2"]
dis_hla_a = calculate_distange(hla_a1, hla_a2, seq_array, aa_pairwise_dis)
hla_b1 = line["B1"]
hla_b2 = line["B2"]
dis_hla_b = calculate_distange(hla_b1, hla_b2, seq_array, aa_pairwise_dis)
hla_c1 = line["C1"]
hla_c2 = line["C2"]
dis_hla_c = calculate_distange(hla_c1, hla_c2, seq_array, aa_pairwise_dis)
if dis_hla_a == "NA" or dis_hla_b == "NA" or dis_hla_c == "NA":
dis_mean = "NA"
else:
dis_mean = (dis_hla_a + dis_hla_b + dis_hla_c) / 3
outline = [line["Sample"], dis_hla_a, dis_hla_b, dis_hla_c, dis_mean]
outline = [str(x) for x in outline]
fw.write("\t".join(outline) + "\n")
if __name__ == "__main__":
main() | 39.711111 | 312 | 0.610707 |
import os
import pandas as pd
from Bio import SeqIO
from pathlib import Path
from itertools import combinations
from argparse import ArgumentParser, RawDescriptionHelpFormatter
def get_opt():
script = os.path.dirname(os.path.abspath(__file__))
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter, epilog=get_opt.__doc__)
parser.add_argument("-d", default=f"{script}/database/grantham_matrix.txt", help="Distance matrix for all amino acids, default: database/grantham_matrix.txt. (reference: DOI: 10.1126/science.185.4154.862)")
parser.add_argument("-f", default=f"{script}/database/ABC_prot.fa", help="Amino acid sequences in fasta format, default: database/ABC_prot.fa.")
parser.add_argument("-i", required=True, help="Input file of tab-delimited with individual HLA typing.")
parser.add_argument("-p", action="store_true", help="Paired HED score.")
parser.add_argument("-o", required=True, help="Output file name.")
parse = parser.parse_args()
return(parse)
def check_file(infile):
if not infile.exists:
raise Exception(f"{str(infile)} file is not exist")
def read_fasta(infile):
infile = Path(infile)
check_file(infile)
record = SeqIO.parse(infile, "fasta")
seq_array = {seq.id: str(seq.seq) for seq in record}
seq_len = [len(value) for value in seq_array.values()]
if len(set(seq_len)) != 1:
raise Exception("Input sequences length is not equality")
return(seq_array)
def read_aa(infile):
infile = Path(infile)
check_file(infile)
df = pd.read_csv(infile, header=0, sep="\t", index_col=0)
aa_pairwise_dis = df.to_dict()
return(aa_pairwise_dis)
def calculate_distange(hla1, hla2, sequence, distance):
seq_hla1 = sequence.get(hla1, False)
seq_hla2 = sequence.get(hla2, False)
if not seq_hla1 or not seq_hla2:
return("NA")
else:
seq_len = len(seq_hla1)
dis = 0
for i in range(seq_len):
aa1 = seq_hla1[i]
aa2 = seq_hla2[i]
dis += distance[aa1][aa2]
dis = dis / seq_len
return(dis)
def main():
opt = get_opt()
seq_array = read_fasta(opt.f)
aa_pairwise_dis = read_aa(opt.d)
infile = Path(opt.i)
outfile = Path(opt.o)
check_file(infile)
df = pd.read_csv(infile, header=0, sep="\t")
if opt.p:
df2 = pd.melt(df, id_vars=["Sample"], value_vars=["A1", "A2", "B1","B2", "C1","C2"])
alleles = set(df2["value"].values.tolist())
alleles_pair = combinations(alleles, 2)
outheader = ["Allele1","Allele2","HED"]
with open(outfile, "w") as fw:
fw.write("\t".join(outheader) + "\n")
for allele1, allele2 in alleles_pair:
dis_hla_pair = calculate_distange(allele1, allele2, seq_array, aa_pairwise_dis)
outline = [allele1, allele2, dis_hla_pair]
outline = [str(x) for x in outline]
fw.write("\t".join(outline) + "\n")
else:
outheader = ["Sample","HED_A","HED_B","HED_C","Mean_HE"]
with open(outfile, "w") as fw:
fw.write("\t".join(outheader) + "\n")
for _, line in df.iterrows():
hla_a1 = line["A1"]
hla_a2 = line["A2"]
dis_hla_a = calculate_distange(hla_a1, hla_a2, seq_array, aa_pairwise_dis)
hla_b1 = line["B1"]
hla_b2 = line["B2"]
dis_hla_b = calculate_distange(hla_b1, hla_b2, seq_array, aa_pairwise_dis)
hla_c1 = line["C1"]
hla_c2 = line["C2"]
dis_hla_c = calculate_distange(hla_c1, hla_c2, seq_array, aa_pairwise_dis)
if dis_hla_a == "NA" or dis_hla_b == "NA" or dis_hla_c == "NA":
dis_mean = "NA"
else:
dis_mean = (dis_hla_a + dis_hla_b + dis_hla_c) / 3
outline = [line["Sample"], dis_hla_a, dis_hla_b, dis_hla_c, dis_mean]
outline = [str(x) for x in outline]
fw.write("\t".join(outline) + "\n")
if __name__ == "__main__":
main() | true | true |
f72bd93c4d17e05aa0d2db88c1b2ffb816c8ad18 | 2,936 | py | Python | sdk/monitor/azure-monitor-query/setup.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-03-09T08:59:13.000Z | 2022-03-09T08:59:13.000Z | sdk/monitor/azure-monitor-query/setup.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | sdk/monitor/azure-monitor-query/setup.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import re
import os.path
from io import open
from setuptools import find_packages, setup
# Change the PACKAGE_NAME only to change folder and different name
PACKAGE_NAME = "azure-monitor-query"
PACKAGE_PPRINT_NAME = "Azure Monitor Query"
# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
# azure v0.x is not compatible with this package
# azure v0.x used to have a __version__ attribute (newer versions don't)
try:
import azure
try:
ver = azure.__version__
raise Exception(
'This package is incompatible with azure=={}. '.format(ver) +
'Uninstall it with "pip uninstall azure".'
)
except AttributeError:
pass
except ImportError:
pass
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py')
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('CHANGELOG.md', encoding='utf-8') as f:
changelog = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + changelog,
long_description_content_type='text/markdown',
license='MIT License',
author='Microsoft Corporation',
author_email='azpysdkhelp@microsoft.com',
url='https://github.com/Azure/azure-sdk-for-python',
classifiers=[
"Development Status :: 5 - Production/Stable",
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'License :: OSI Approved :: MIT License',
],
python_requires=">=3.6",
zip_safe=False,
packages=find_packages(exclude=[
'tests',
'samples',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.monitor',
]),
install_requires=[
'msrest>=0.6.19',
'azure-core<2.0.0,>=1.12.0',
]
)
| 33.363636 | 85 | 0.609673 |
import re
import os.path
from io import open
from setuptools import find_packages, setup
PACKAGE_NAME = "azure-monitor-query"
PACKAGE_PPRINT_NAME = "Azure Monitor Query"
package_folder_path = PACKAGE_NAME.replace('-', '/')
namespace_name = PACKAGE_NAME.replace('-', '.')
try:
import azure
try:
ver = azure.__version__
raise Exception(
'This package is incompatible with azure=={}. '.format(ver) +
'Uninstall it with "pip uninstall azure".'
)
except AttributeError:
pass
except ImportError:
pass
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py')
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('CHANGELOG.md', encoding='utf-8') as f:
changelog = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + changelog,
long_description_content_type='text/markdown',
license='MIT License',
author='Microsoft Corporation',
author_email='azpysdkhelp@microsoft.com',
url='https://github.com/Azure/azure-sdk-for-python',
classifiers=[
"Development Status :: 5 - Production/Stable",
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'License :: OSI Approved :: MIT License',
],
python_requires=">=3.6",
zip_safe=False,
packages=find_packages(exclude=[
'tests',
'samples',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.monitor',
]),
install_requires=[
'msrest>=0.6.19',
'azure-core<2.0.0,>=1.12.0',
]
)
| true | true |
f72bd93f9a0200f463455bbe0367f69059d57081 | 1,045 | py | Python | src/serialize_tree.py | kemingy/daily-coding-problem | 0839311ec0848f8f0b4a9edba817ecceb8f944a0 | [
"Unlicense"
] | 3 | 2019-03-06T03:14:56.000Z | 2020-01-07T16:00:48.000Z | src/serialize_tree.py | kemingy/daily-coding-problem | 0839311ec0848f8f0b4a9edba817ecceb8f944a0 | [
"Unlicense"
] | null | null | null | src/serialize_tree.py | kemingy/daily-coding-problem | 0839311ec0848f8f0b4a9edba817ecceb8f944a0 | [
"Unlicense"
] | null | null | null | # Given the root to a binary tree, implement serialize(root), which serializes
# the tree into a string, and deserialize(s), which deserializes the string back
# into the tree.
class Node:
def __init__(self, val, left=None, right=None):
self.val = val
self.left = left
self.right = right
def serialize(node):
ans = []
level = [node]
while level:
children = []
for n in level:
if n:
children.append(n.left)
children.append(n.right)
ans.append(n.val if n else None)
level = children
return ans
def deserialize(node, index=0):
if not node[index:]:
return None
root = Node(node[index], deserialize(node, index*2+1), deserialize(node, index*2+2))
return root
if __name__ == '__main__':
node = Node('root', Node('left', Node('left.left')), Node('right'))
print(serialize(node))
assert deserialize(serialize(node)).left.left.val == 'left.left'
| 25.487805 | 89 | 0.579904 |
class Node:
def __init__(self, val, left=None, right=None):
self.val = val
self.left = left
self.right = right
def serialize(node):
ans = []
level = [node]
while level:
children = []
for n in level:
if n:
children.append(n.left)
children.append(n.right)
ans.append(n.val if n else None)
level = children
return ans
def deserialize(node, index=0):
if not node[index:]:
return None
root = Node(node[index], deserialize(node, index*2+1), deserialize(node, index*2+2))
return root
if __name__ == '__main__':
node = Node('root', Node('left', Node('left.left')), Node('right'))
print(serialize(node))
assert deserialize(serialize(node)).left.left.val == 'left.left'
| true | true |
f72bd97572c539099427f5e9007176593b7fce2c | 470 | py | Python | test_project/views/response_types/http_redirect.py | korantu/lona | 5039fa59f37cc32b9c789753af2ed8a8670ab611 | [
"MIT"
] | 230 | 2021-08-15T20:46:24.000Z | 2022-03-30T10:17:43.000Z | test_project/views/response_types/http_redirect.py | korantu/lona | 5039fa59f37cc32b9c789753af2ed8a8670ab611 | [
"MIT"
] | 176 | 2021-08-18T08:19:37.000Z | 2022-03-29T16:45:06.000Z | test_project/views/response_types/http_redirect.py | korantu/lona | 5039fa59f37cc32b9c789753af2ed8a8670ab611 | [
"MIT"
] | 13 | 2021-08-20T10:35:04.000Z | 2022-01-17T15:49:40.000Z | from lona.html import Strong, Div, H2, P
from lona.view import LonaView
class HTTPRedirectView(LonaView):
def handle_request(self, request):
s = Strong()
html = Div(
H2('Redirect'),
P('You will be HTTP redirected in ', s, ' seconds'),
)
for i in [3, 2, 1]:
s.set_text(str(i))
self.show(html)
self.sleep(1)
return {
'http_redirect': '/',
}
| 19.583333 | 64 | 0.497872 | from lona.html import Strong, Div, H2, P
from lona.view import LonaView
class HTTPRedirectView(LonaView):
def handle_request(self, request):
s = Strong()
html = Div(
H2('Redirect'),
P('You will be HTTP redirected in ', s, ' seconds'),
)
for i in [3, 2, 1]:
s.set_text(str(i))
self.show(html)
self.sleep(1)
return {
'http_redirect': '/',
}
| true | true |
f72bd9dea41c09da28f2a59df1bc45565df5f22d | 1,064 | py | Python | products/migrations/0003_reviews.py | ankit-ak/django-ecommerce-1 | 248127526c03c7c0f25a2df84365a0d0199b9693 | [
"MIT"
] | 4 | 2021-04-06T16:50:57.000Z | 2022-03-02T00:50:44.000Z | products/migrations/0003_reviews.py | ankit-ak/django-ecommerce-1 | 248127526c03c7c0f25a2df84365a0d0199b9693 | [
"MIT"
] | null | null | null | products/migrations/0003_reviews.py | ankit-ak/django-ecommerce-1 | 248127526c03c7c0f25a2df84365a0d0199b9693 | [
"MIT"
] | 7 | 2021-02-22T08:07:20.000Z | 2022-03-06T10:17:28.000Z | # Generated by Django 2.2.11 on 2020-04-01 18:44
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('products', '0002_auto_20200314_0741'),
]
operations = [
migrations.CreateModel(
name='Reviews',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rating', models.IntegerField(choices=[(1, 'Poor'), (2, 'Below Average'), (3, 'Average'), (4, 'Better than Average'), (5, 'Excellent')], default=3)),
('review', models.TextField()),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reviews', to='products.Product')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 39.407407 | 166 | 0.631579 |
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('products', '0002_auto_20200314_0741'),
]
operations = [
migrations.CreateModel(
name='Reviews',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rating', models.IntegerField(choices=[(1, 'Poor'), (2, 'Below Average'), (3, 'Average'), (4, 'Better than Average'), (5, 'Excellent')], default=3)),
('review', models.TextField()),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reviews', to='products.Product')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| true | true |
f72bda1f9861b327f93273a74d2beaf0c1884dd9 | 1,026 | py | Python | INBa/2015/Sarocvashin_M/task_4_23.py | YukkaSarasti/pythonintask | eadf4245abb65f4400a3bae30a4256b4658e009c | [
"Apache-2.0"
] | null | null | null | INBa/2015/Sarocvashin_M/task_4_23.py | YukkaSarasti/pythonintask | eadf4245abb65f4400a3bae30a4256b4658e009c | [
"Apache-2.0"
] | null | null | null | INBa/2015/Sarocvashin_M/task_4_23.py | YukkaSarasti/pythonintask | eadf4245abb65f4400a3bae30a4256b4658e009c | [
"Apache-2.0"
] | null | null | null | # Задача 4. Вариант 23
# Напишите программу, которая выводит имя, под которым скрывается Илья Арнольдович Файзильберг. Дополнительно необходимо вывести область интересов указанной личности, место рождения, годы рождения и смерти (если человек умер), вычислить возраст на данный момент (или момент смерти). Для хранения всех необходимых данных требуется использовать переменные. После вывода информации программа должна дожидаться пока пользователь нажмет Enter для выхода.
# Сароквашин Максим
# 25.02.2016
name = "Илья Арнольдович Файзильберг"
birthplace = "Одесса,СССР"
birthyear = int (1897)
deathyear = int (1937)
age = int (deathyear - birthyear)
interest = "Писатель"
print(name+" наиболее известен как Илья Ильф - русский советский писатель, журналист и сценарист. Соавтор Евгения Петрова")
print("Место рождения: "+birthplace)
print("Год рождения: "+str(birthyear))
print("Год смерти: "+str(deathyear))
print("Возраст смерти: "+str(age))
print("Область интересов: "+interest)
input("\nДля выхода нажмите Enter")
| 46.636364 | 449 | 0.783626 |
name = "Илья Арнольдович Файзильберг"
birthplace = "Одесса,СССР"
birthyear = int (1897)
deathyear = int (1937)
age = int (deathyear - birthyear)
interest = "Писатель"
print(name+" наиболее известен как Илья Ильф - русский советский писатель, журналист и сценарист. Соавтор Евгения Петрова")
print("Место рождения: "+birthplace)
print("Год рождения: "+str(birthyear))
print("Год смерти: "+str(deathyear))
print("Возраст смерти: "+str(age))
print("Область интересов: "+interest)
input("\nДля выхода нажмите Enter")
| true | true |
f72bdb03716d1c420cd5c2e0dab725ff70c1358a | 8,629 | py | Python | src/python3/sdp/math/interpolation.py | LeiShi/Synthetic-Diagnostics-Platform | 870120d3fd14b2a3c89c6e6e85625d1e9109a2de | [
"BSD-3-Clause"
] | 5 | 2019-08-16T22:08:19.000Z | 2021-02-24T02:47:05.000Z | src/python3/sdp/math/interpolation.py | justthepython/Synthetic-Diagnostics-Platform | 5f1cb5c29d182490acbd4f3c167f0e09ec211236 | [
"BSD-3-Clause"
] | 1 | 2016-05-11T12:58:00.000Z | 2016-05-11T17:18:36.000Z | src/python3/sdp/math/interpolation.py | justthepython/Synthetic-Diagnostics-Platform | 5f1cb5c29d182490acbd4f3c167f0e09ec211236 | [
"BSD-3-Clause"
] | 5 | 2018-04-29T12:35:59.000Z | 2020-01-10T03:38:30.000Z | """This module contains some useful interpolation methods
"""
import numpy as np
from scipy.interpolate import BarycentricInterpolator
class InterpolationError(Exception):
def __init__(self,value):
self.value = value
def __str__(self):
return repr(self.value)
class OutofBoundError(InterpolationError, ValueError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def linear_3d_3point(X,Y,Z,x,y,tol = 1e-8):
"""3D interpolation method
Linearly interpolate the value of z for given x,y.
By using 3 points data, the unknown value of z is assumed on the same plane.
The method used here is the cross product method. From P(x1,y1,z1),Q(x2,y2,z2),and R(x3,y3,z3), construct 2 vectors on the plane, PQ(x2-x1,y2-y1,z2-z1) and PR(x3-x1,y3-y1,z3-z1). Then do the cross product, PQ*PR = N. This gives the normal vector of the plane. The plane's equation is then 'N dot X = d', where X is an arbitary point and d to be determined. d can be easily gotten from any one of the given points, say P. d = N dot P. Then the equation of the plane is found. The equation can be written as 'ax+by+cz = d', then z can be solved for given x and y.
Arguments:
x1,y1,z1: coordinates of the first point
x2,y2,z2: the second point
x3,y3,z3: the third point
x,y: the x,y coordinates for the wanted
return value:
interpolated z value on given (x,y)
"""
x1,x2,x3 = X[0],X[1],X[2]
y1,y2,y3 = Y[0],Y[1],Y[2]
z0 = np.max(Z)
z1,z2,z3 = Z[0]/z0,Z[1]/z0,Z[2]/z0
Nx = (y2-y1)*(z3-z1)-(y3-y1)*(z2-z1)
Ny = (x3-x1)*(z2-z1)-(x2-x1)*(z3-z1)
Nz = (x2-x1)*(y3-y1)-(x3-x1)*(y2-y1)
z_base = (x2-x1)*(y3-y1)
print(Nx,Ny,Nz,z_base)
if(np.absolute(Nz/z_base) <= tol ):
raise InterpolationError('3 points interpolation failed: given points are on a plane vertical to XY plane, no z value being able to interpolated.')
d = Nx*x1 + Ny*y1 + Nz*z1
print(d, d-Nx*x-Ny*y)
return (d - Nx*x - Ny*y)/float(Nz)*z0
def trilinear_interp(X,Y,Z,F,x, fill_value=0.0):
""" Trilinear interpolation (3D) for 1 point on a cubic mesh
See Wikipedia for a better description than the following:
First choose a direction and interpolate all the corners along this
direction (so 8pts -> 4pts) at the value of the wanted point.
Choose a second direction and interpolate the 4pts at the wanted point
(4pts -> 2pts).
Finish with the interpolation along the last line
Arguments:
X -- 1D array containing the X coordinate of F
Y -- 1D array containing the Y coordinate of F
Z -- 1D array containing the Z coordinate of F
F -- 3D array containing the data
x -- position (3D) where the interpolation is wanted
return value:
interpolated z value on given (x,y)
"""
raise NameError('Does not work, should use RegularGridInterpolator')
if len(x.shape) == 1:
# if outside the box, put the value to fill_value
if x[0] < X[0] or x[1] < Y[0] or x[2] < Z[0]\
or x[0] > X[-1] or x[1] > Y[-1] or x[2] > Z[-1]:
return fill_value
else:
# First find the x,y,z coordinate of the corner of the cube
indx = np.where(X < x[0])[0].max()
indy = np.where(Y < x[1])[0].max()
indz = np.where(Z < x[2])[0].max()
# relative coordinates
rx = (x[0]-X[indx])/(X[indx+1]-X[indx])
ry = (x[1]-Y[indy])/(Y[indy+1]-Y[indy])
rz = (x[2]-Z[indz])/(Z[indz+1]-Z[indz])
# compute the first linear interpolation
temp = 1-rx
c00 = F[indx,indy,indz]*temp + F[indx+1,indy,indz]*rx
c10 = F[indx,indy+1,indz]*temp + F[indx+1,indy+1,indz]*rx
c01 = F[indx,indy,indz+1]*temp + F[indx+1,indy,indz+1]*rx
c11 = F[indx,indy+1,indz+1]*temp + F[indx+1,indy+1,indz+1]*rx
# compute the second linear interpolation
temp = 1-ry
c0 = c00*temp + c10*ry
c1 = c01*temp + c11*ry
# compute the last linear interpolation
return c0*(1-rz) + c1*rz
elif len(x.shape) == 2:
"""this part is the same that before but with a mesh (not only one point).
the comments will be only for trick due to the shape of the positions
abd not on the method (look the first part for them)
"""
G = np.zeros(len(x[:,0]))
# First find the x,y,z coordinate of the corner of the cube
ind = ~((x[:,0] < X[0]) | (x[:,1] < Y[0]) | (x[:,2] < Z[0]) |
(x[:,0] > X[-1]) | (x[:,1] > Y[-1]) | (x[:,2] > Z[-1]))
G[~ind] = fill_value
indx = np.where(X <= x[ind,0])[0].max()
indy = np.where(Y <= x[ind,1])[0].max()
indz = np.where(Z <= x[ind,2])[0].max()
# relative coordinates
rx = (x[ind,0]-X[indx])/(X[indx+1]-X[indx])
ry = (x[ind,1]-Y[indy])/(Y[indy+1]-Y[indy])
rz = (x[ind,2]-Z[indz])/(Z[indz+1]-Z[indz])
# compute the first linear interpolation
temp = 1-rx
c00 = F[indx,indy,indz]*temp + F[indx+1,indy,indz]*rx
c10 = F[indx,indy+1,indz]*temp + F[indx+1,indy+1,indz]*rx
c01 = F[indx,indy,indz+1]*temp + F[indx+1,indy,indz+1]*rx
c11 = F[indx,indy+1,indz+1]*temp + F[indx+1,indy+1,indz+1]*rx
# compute the second linear interpolation
temp = 1-ry
c0 = c00*temp + c10*ry
c1 = c01*temp + c11*ry
# compute the last linear interpolation
G[ind] = c0*(1-rz) + c1*rz
return G
else:
raise NameError('Error: wrong shape of the position to interpolate')
# BarycentricInterpolator with boundary check
class BoundaryWarnBarycentricInterpolator(BarycentricInterpolator):
"""Barycentric Interpolator with Boundary Check. Based on
:py:class:`scipy.interpolate.BarycentricInterpolator`.
The boundary is set as minimun x and maximum x. If called with x outside
the available range, a OutofBoundError will be raised.
__init__(xi, yi=None, axis=0, bound_error=True, fill_value=0)
:param xi: x coordinates for interpolation
:type xi: array of float
:param yi: Optional, y values on each xi location. If not given, need to be
provided later using :py:method`set_yi` method.
:type yi: array of float
:param int axis: the axis of yi along which the interpolator will be
created.
:param bool bound_error: If True, out of bound interpolation will result a
OutofBoundError. Otherwise fill_value will be used
. Default to be True
:param float fill_value: If bound_error is False, out of bound values will
be automatically filled with fill_value.
see :py:class:`scipy.interpolate.BarycentricInterpolator` for further
information.
"""
def __init__(self, xi, yi=None, axis=0, bound_error=True, fill_value=0):
self._xmin = np.min(xi)
self._xmax = np.max(xi)
self._bound_error = bound_error
self._fill_value = fill_value
super(BoundaryWarnBarycentricInterpolator, self).__init__(xi, yi, axis)
def __call__(self, x):
if (self._bound_error):
if np.any(x < self._xmin) or np.any(x > self._xmax):
raise OutofBoundError('x out of bound! xmin: {}, xmax: {}'.\
format(self._xmin, self._xmax))
return super(BoundaryWarnBarycentricInterpolator, self).__call__(x)
else:
outbound_idx = np.logical_or(x < self._xmin, x > self._xmax)
result = np.empty_like(x)
result[~outbound_idx] = super(BoundaryWarnBarycentricInterpolator,
self).__call__(x[~outbound_idx])
result[outbound_idx] = self._fill_value
return result
def add_xi(self, xi, yi=None):
super(BoundaryWarnBarycentricInterpolator, self).add_xi(xi, yi)
self._xmin = np.min( [np.min(xi), self._xmin] )
self._xmax = np.max( [np.max(xi), self._xmax] )
def set_yi(self, yi, axis=None):
yi = np.array(yi)
if not self._bound_error:
assert yi.ndim == 1
super(BoundaryWarnBarycentricInterpolator, self).set_yi(yi, axis)
| 40.70283 | 565 | 0.585004 |
import numpy as np
from scipy.interpolate import BarycentricInterpolator
class InterpolationError(Exception):
def __init__(self,value):
self.value = value
def __str__(self):
return repr(self.value)
class OutofBoundError(InterpolationError, ValueError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def linear_3d_3point(X,Y,Z,x,y,tol = 1e-8):
x1,x2,x3 = X[0],X[1],X[2]
y1,y2,y3 = Y[0],Y[1],Y[2]
z0 = np.max(Z)
z1,z2,z3 = Z[0]/z0,Z[1]/z0,Z[2]/z0
Nx = (y2-y1)*(z3-z1)-(y3-y1)*(z2-z1)
Ny = (x3-x1)*(z2-z1)-(x2-x1)*(z3-z1)
Nz = (x2-x1)*(y3-y1)-(x3-x1)*(y2-y1)
z_base = (x2-x1)*(y3-y1)
print(Nx,Ny,Nz,z_base)
if(np.absolute(Nz/z_base) <= tol ):
raise InterpolationError('3 points interpolation failed: given points are on a plane vertical to XY plane, no z value being able to interpolated.')
d = Nx*x1 + Ny*y1 + Nz*z1
print(d, d-Nx*x-Ny*y)
return (d - Nx*x - Ny*y)/float(Nz)*z0
def trilinear_interp(X,Y,Z,F,x, fill_value=0.0):
raise NameError('Does not work, should use RegularGridInterpolator')
if len(x.shape) == 1:
if x[0] < X[0] or x[1] < Y[0] or x[2] < Z[0]\
or x[0] > X[-1] or x[1] > Y[-1] or x[2] > Z[-1]:
return fill_value
else:
indx = np.where(X < x[0])[0].max()
indy = np.where(Y < x[1])[0].max()
indz = np.where(Z < x[2])[0].max()
rx = (x[0]-X[indx])/(X[indx+1]-X[indx])
ry = (x[1]-Y[indy])/(Y[indy+1]-Y[indy])
rz = (x[2]-Z[indz])/(Z[indz+1]-Z[indz])
temp = 1-rx
c00 = F[indx,indy,indz]*temp + F[indx+1,indy,indz]*rx
c10 = F[indx,indy+1,indz]*temp + F[indx+1,indy+1,indz]*rx
c01 = F[indx,indy,indz+1]*temp + F[indx+1,indy,indz+1]*rx
c11 = F[indx,indy+1,indz+1]*temp + F[indx+1,indy+1,indz+1]*rx
temp = 1-ry
c0 = c00*temp + c10*ry
c1 = c01*temp + c11*ry
return c0*(1-rz) + c1*rz
elif len(x.shape) == 2:
"""this part is the same that before but with a mesh (not only one point).
the comments will be only for trick due to the shape of the positions
abd not on the method (look the first part for them)
"""
G = np.zeros(len(x[:,0]))
ind = ~((x[:,0] < X[0]) | (x[:,1] < Y[0]) | (x[:,2] < Z[0]) |
(x[:,0] > X[-1]) | (x[:,1] > Y[-1]) | (x[:,2] > Z[-1]))
G[~ind] = fill_value
indx = np.where(X <= x[ind,0])[0].max()
indy = np.where(Y <= x[ind,1])[0].max()
indz = np.where(Z <= x[ind,2])[0].max()
rx = (x[ind,0]-X[indx])/(X[indx+1]-X[indx])
ry = (x[ind,1]-Y[indy])/(Y[indy+1]-Y[indy])
rz = (x[ind,2]-Z[indz])/(Z[indz+1]-Z[indz])
temp = 1-rx
c00 = F[indx,indy,indz]*temp + F[indx+1,indy,indz]*rx
c10 = F[indx,indy+1,indz]*temp + F[indx+1,indy+1,indz]*rx
c01 = F[indx,indy,indz+1]*temp + F[indx+1,indy,indz+1]*rx
c11 = F[indx,indy+1,indz+1]*temp + F[indx+1,indy+1,indz+1]*rx
temp = 1-ry
c0 = c00*temp + c10*ry
c1 = c01*temp + c11*ry
G[ind] = c0*(1-rz) + c1*rz
return G
else:
raise NameError('Error: wrong shape of the position to interpolate')
class BoundaryWarnBarycentricInterpolator(BarycentricInterpolator):
def __init__(self, xi, yi=None, axis=0, bound_error=True, fill_value=0):
self._xmin = np.min(xi)
self._xmax = np.max(xi)
self._bound_error = bound_error
self._fill_value = fill_value
super(BoundaryWarnBarycentricInterpolator, self).__init__(xi, yi, axis)
def __call__(self, x):
if (self._bound_error):
if np.any(x < self._xmin) or np.any(x > self._xmax):
raise OutofBoundError('x out of bound! xmin: {}, xmax: {}'.\
format(self._xmin, self._xmax))
return super(BoundaryWarnBarycentricInterpolator, self).__call__(x)
else:
outbound_idx = np.logical_or(x < self._xmin, x > self._xmax)
result = np.empty_like(x)
result[~outbound_idx] = super(BoundaryWarnBarycentricInterpolator,
self).__call__(x[~outbound_idx])
result[outbound_idx] = self._fill_value
return result
def add_xi(self, xi, yi=None):
super(BoundaryWarnBarycentricInterpolator, self).add_xi(xi, yi)
self._xmin = np.min( [np.min(xi), self._xmin] )
self._xmax = np.max( [np.max(xi), self._xmax] )
def set_yi(self, yi, axis=None):
yi = np.array(yi)
if not self._bound_error:
assert yi.ndim == 1
super(BoundaryWarnBarycentricInterpolator, self).set_yi(yi, axis)
| true | true |
f72bdb07358bd565efec3e28ef5e0ed2041de357 | 7,081 | py | Python | cla-backend/cla/tests/unit/test_user_event.py | tejasadg/easycla | 12284c957ab543d0d09c63aa8e82d70ecf09ccaf | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | null | null | null | cla-backend/cla/tests/unit/test_user_event.py | tejasadg/easycla | 12284c957ab543d0d09c63aa8e82d70ecf09ccaf | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | null | null | null | cla-backend/cla/tests/unit/test_user_event.py | tejasadg/easycla | 12284c957ab543d0d09c63aa8e82d70ecf09ccaf | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | null | null | null | # Copyright The Linux Foundation and each contributor to CommunityBridge.
# SPDX-License-Identifier: MIT
from unittest.mock import patch, Mock
import unittest
import pytest
from cla.models.dynamo_models import User, Project, Company, CCLAWhitelistRequest
from cla.models.event_types import EventType
from cla.controllers import user as user_controller
from cla.auth import AuthUser
@pytest.fixture
def create_event_user():
user_controller.create_event = Mock()
class TestRequestCompanyWhitelist:
def setup(self) -> None:
self.old_load = User.load
self.old_get_user_name = User.get_user_name
self.get_user_emails = User.get_user_emails
self.get_user_email = User.get_user_email
self.company_load = Company.load
self.get_company_name = Company.get_company_name
self.project_load = Project.load
self.get_project_name = Project.get_project_name
def teardown(self) -> None:
User.load = self.old_load
User.get_user_name = self.old_get_user_name
User.get_user_emails = self.get_user_emails
User.get_user_email = self.get_user_email
Company.load = self.company_load
Company.get_company_name = self.get_company_name
Project.load = self.project_load
Project.get_project_name = self.get_project_name
def test_request_company_whitelist(self, create_event_user, project, company, user):
""" Test user requesting to be added to the Approved List event """
with patch('cla.controllers.user.Event.create_event') as mock_event:
event_type = EventType.RequestCompanyWL
User.load = Mock()
User.get_user_name = Mock(return_value=user.get_user_name())
User.get_user_emails = Mock(return_value=[user.get_user_email()])
User.get_user_email = Mock(return_value=user.get_user_email())
Company.load = Mock()
Company.get_company_name = Mock(return_value=company.get_company_name())
Project.load = Mock()
Project.get_project_name = Mock(return_value=project.get_project_name())
user_controller.get_email_service = Mock()
user_controller.send = Mock()
user_controller.request_company_whitelist(
user.get_user_id(),
company.get_company_id(),
user.get_user_name(),
user.get_user_email(),
project.get_project_id(),
message="Please add",
recipient_name="Recipient Name",
recipient_email="Recipient Email",
)
event_data = (f'CLA: contributor {user.get_user_name()} requests to be Approved for the '
f'project: {project.get_project_name()} '
f'organization: {company.get_company_name()} '
f'as {user.get_user_name()} <{user.get_user_email()}>')
mock_event.assert_called_once_with(
event_user_id=user.get_user_id(),
event_project_id=project.get_project_id(),
event_company_id=company.get_company_id(),
event_type=event_type,
event_data=event_data,
event_summary=event_data,
contains_pii=True,
)
class TestInviteClaManager:
def setup(self):
self.user_load = User.load
self.load_project_by_name = Project.load_project_by_name
self.save = CCLAWhitelistRequest.save
def teardown(self):
User.load = self.user_load
Project.load_project_by_name = self.load_project_by_name
CCLAWhitelistRequest.save = self.save
@patch('cla.controllers.user.Event.create_event')
def test_invite_cla_manager(self, mock_event, create_event_user, user):
""" Test send email to CLA manager event """
User.load = Mock()
Project.load_project_by_name = Mock()
CCLAWhitelistRequest.save = Mock()
user_controller.send_email_to_cla_manager = Mock()
contributor_id = user.get_user_id()
contributor_name = user.get_user_name()
contributor_email = user.get_user_email()
cla_manager_name = "admin"
cla_manager_email = "foo@admin.com"
project_name = "foo_project"
company_name = "Test Company"
event_data = (f'sent email to CLA Manager: {cla_manager_name} with email {cla_manager_email} '
f'for project {project_name} and company {company_name} '
f'to user {contributor_name} with email {contributor_email}')
# TODO FIX Unit test - need to mock Project load_project_by_name() function
user_controller.invite_cla_manager(contributor_id, contributor_name, contributor_email,
cla_manager_name, cla_manager_email,
project_name, company_name)
mock_event.assert_called_once_with(
event_user_id=contributor_id,
event_project_name=project_name,
event_data=event_data,
event_type=EventType.InviteAdmin,
event_summary=event_data,
contains_pii=True,
)
class TestRequestCompanyCCLA:
def setup(self):
self.user_load = User.load
self.get_user_name = User.get_user_name
self.company_load = Company.load
self.project_load = Project.load
self.get_project_name = Project.get_project_name
self.get_managers = Company.get_managers
def teardown(self):
User.load = self.user_load
User.get_user_name = self.get_user_name
Company.load = self.company_load
Project.load = self.project_load
Project.get_project_name = self.get_project_name
Company.get_managers = self.get_managers
@patch('cla.controllers.user.Event.create_event')
def test_request_company_ccla(self, mock_event, create_event_user, user, project, company):
""" Test request company ccla event """
User.load = Mock()
User.get_user_name = Mock(return_value=user.get_user_name())
email = user.get_user_email()
Company.load = Mock()
Project.load = Mock()
Project.get_project_name = Mock(return_value=project.get_project_name())
manager = User(lf_username="harold", user_email="foo@gmail.com")
Company.get_managers = Mock(return_value=[manager, ])
event_data = f"Sent email to sign ccla for {project.get_project_name()}"
CCLAWhitelistRequest.save = Mock(return_value=None)
user_controller.request_company_ccla(
user.get_user_id(), email, company.get_company_id(), project.get_project_id()
)
mock_event.assert_called_once_with(
event_data=event_data,
event_summary=event_data,
event_type=EventType.RequestCCLA,
event_user_id=user.get_user_id(),
event_company_id=company.get_company_id(),
contains_pii=False,
)
| 41.168605 | 102 | 0.656263 |
from unittest.mock import patch, Mock
import unittest
import pytest
from cla.models.dynamo_models import User, Project, Company, CCLAWhitelistRequest
from cla.models.event_types import EventType
from cla.controllers import user as user_controller
from cla.auth import AuthUser
@pytest.fixture
def create_event_user():
user_controller.create_event = Mock()
class TestRequestCompanyWhitelist:
def setup(self) -> None:
self.old_load = User.load
self.old_get_user_name = User.get_user_name
self.get_user_emails = User.get_user_emails
self.get_user_email = User.get_user_email
self.company_load = Company.load
self.get_company_name = Company.get_company_name
self.project_load = Project.load
self.get_project_name = Project.get_project_name
def teardown(self) -> None:
User.load = self.old_load
User.get_user_name = self.old_get_user_name
User.get_user_emails = self.get_user_emails
User.get_user_email = self.get_user_email
Company.load = self.company_load
Company.get_company_name = self.get_company_name
Project.load = self.project_load
Project.get_project_name = self.get_project_name
def test_request_company_whitelist(self, create_event_user, project, company, user):
with patch('cla.controllers.user.Event.create_event') as mock_event:
event_type = EventType.RequestCompanyWL
User.load = Mock()
User.get_user_name = Mock(return_value=user.get_user_name())
User.get_user_emails = Mock(return_value=[user.get_user_email()])
User.get_user_email = Mock(return_value=user.get_user_email())
Company.load = Mock()
Company.get_company_name = Mock(return_value=company.get_company_name())
Project.load = Mock()
Project.get_project_name = Mock(return_value=project.get_project_name())
user_controller.get_email_service = Mock()
user_controller.send = Mock()
user_controller.request_company_whitelist(
user.get_user_id(),
company.get_company_id(),
user.get_user_name(),
user.get_user_email(),
project.get_project_id(),
message="Please add",
recipient_name="Recipient Name",
recipient_email="Recipient Email",
)
event_data = (f'CLA: contributor {user.get_user_name()} requests to be Approved for the '
f'project: {project.get_project_name()} '
f'organization: {company.get_company_name()} '
f'as {user.get_user_name()} <{user.get_user_email()}>')
mock_event.assert_called_once_with(
event_user_id=user.get_user_id(),
event_project_id=project.get_project_id(),
event_company_id=company.get_company_id(),
event_type=event_type,
event_data=event_data,
event_summary=event_data,
contains_pii=True,
)
class TestInviteClaManager:
def setup(self):
self.user_load = User.load
self.load_project_by_name = Project.load_project_by_name
self.save = CCLAWhitelistRequest.save
def teardown(self):
User.load = self.user_load
Project.load_project_by_name = self.load_project_by_name
CCLAWhitelistRequest.save = self.save
@patch('cla.controllers.user.Event.create_event')
def test_invite_cla_manager(self, mock_event, create_event_user, user):
User.load = Mock()
Project.load_project_by_name = Mock()
CCLAWhitelistRequest.save = Mock()
user_controller.send_email_to_cla_manager = Mock()
contributor_id = user.get_user_id()
contributor_name = user.get_user_name()
contributor_email = user.get_user_email()
cla_manager_name = "admin"
cla_manager_email = "foo@admin.com"
project_name = "foo_project"
company_name = "Test Company"
event_data = (f'sent email to CLA Manager: {cla_manager_name} with email {cla_manager_email} '
f'for project {project_name} and company {company_name} '
f'to user {contributor_name} with email {contributor_email}')
user_controller.invite_cla_manager(contributor_id, contributor_name, contributor_email,
cla_manager_name, cla_manager_email,
project_name, company_name)
mock_event.assert_called_once_with(
event_user_id=contributor_id,
event_project_name=project_name,
event_data=event_data,
event_type=EventType.InviteAdmin,
event_summary=event_data,
contains_pii=True,
)
class TestRequestCompanyCCLA:
def setup(self):
self.user_load = User.load
self.get_user_name = User.get_user_name
self.company_load = Company.load
self.project_load = Project.load
self.get_project_name = Project.get_project_name
self.get_managers = Company.get_managers
def teardown(self):
User.load = self.user_load
User.get_user_name = self.get_user_name
Company.load = self.company_load
Project.load = self.project_load
Project.get_project_name = self.get_project_name
Company.get_managers = self.get_managers
@patch('cla.controllers.user.Event.create_event')
def test_request_company_ccla(self, mock_event, create_event_user, user, project, company):
User.load = Mock()
User.get_user_name = Mock(return_value=user.get_user_name())
email = user.get_user_email()
Company.load = Mock()
Project.load = Mock()
Project.get_project_name = Mock(return_value=project.get_project_name())
manager = User(lf_username="harold", user_email="foo@gmail.com")
Company.get_managers = Mock(return_value=[manager, ])
event_data = f"Sent email to sign ccla for {project.get_project_name()}"
CCLAWhitelistRequest.save = Mock(return_value=None)
user_controller.request_company_ccla(
user.get_user_id(), email, company.get_company_id(), project.get_project_id()
)
mock_event.assert_called_once_with(
event_data=event_data,
event_summary=event_data,
event_type=EventType.RequestCCLA,
event_user_id=user.get_user_id(),
event_company_id=company.get_company_id(),
contains_pii=False,
)
| true | true |
f72bdc7b9355d16ea5ca646e0db1ca2a4c402827 | 5,135 | py | Python | win_unc/unc_directory.py | zo-edv/py_win_unc | 610b7c9ce4ea17554d04342126169b488c8ccfae | [
"MIT"
] | 10 | 2015-08-14T06:34:28.000Z | 2020-10-03T17:48:09.000Z | win_unc/unc_directory.py | zo-edv/py_win_unc | 610b7c9ce4ea17554d04342126169b488c8ccfae | [
"MIT"
] | 11 | 2017-01-12T23:43:56.000Z | 2020-06-19T18:32:56.000Z | win_unc/unc_directory.py | zo-edv/py_win_unc | 610b7c9ce4ea17554d04342126169b488c8ccfae | [
"MIT"
] | 8 | 2015-09-25T20:44:33.000Z | 2018-10-04T03:19:42.000Z | from win_unc.errors import InvalidUncPathError
from win_unc.cleaners import clean_unc_path
from win_unc.unc_credentials import get_creds_from_string
from win_unc.validators import is_valid_unc_path
class UncDirectory(object):
"""
Represents a UNC directory on Windows. A UNC directory is a path and optionally a set of
credentials that are required to connect to the UNC path.
"""
def __init__(self, path, creds=None):
"""
Returns a new `UncDirectory` class.
`path` must be a UNC directory path. If `path` cannot be construed as a valid UNC path,
this will raise an `InvalidUncPathError`.
`creds` may be `None` or a `UncCrednetials` object. If `None`, then the UNC directory
must not require authentication to be connected. Otherwise, `creds` will be used
for authentication.
If only the first positional argument is provided and it is already an instance of the
`UncDirectory` class (either directly or by inheritance), this constructor will clone
it and create a new `UncDirectory` object with the same properties. Note that the clone
is a "shallow" clone. Both the original `UncDirectory` object and its clone will use the
same `UncCredentials` object if it was provided.
"""
if creds is None and isinstance(path, UncDirectory):
new_path = path._path
new_creds = path._creds
else:
new_path = path
new_creds = creds
cleaned_path = clean_unc_path(new_path)
if is_valid_unc_path(cleaned_path):
self._path = cleaned_path
self._creds = new_creds if new_creds and not new_creds.is_empty() else None
else:
raise InvalidUncPathError(new_path)
def get_normalized_path(self):
"""
Returns the normalized path for this `UncDirectory`. Differing UNC paths that all point to
the same network location will have the same normalized path.
"""
path = self._path.lower()
return path[:-5] if path.endswith(r'\ipc$') else path.rstrip('\\')
def get_path(self):
"""
Returns the UNC path for this `UncDirectory`.
"""
return self._path
def get_username(self):
"""
Returns the username associated with the credentials of this `UncDirectory` or `None`
if no username was provided.
"""
return self._creds.get_username() if self._creds else None
def get_password(self):
"""
Returns the password associated with the credentials of this `UncDirectory` or `None`
if no password was provided.
"""
return self._creds.get_password() if self._creds else None
def get_auth_string(self):
"""
Returns the authorization string associated with the credentials of this `UncDirectory`.
"""
return self._creds.get_auth_string() if self._creds else ''
def get_auth_path(self):
"""
Returns the path of this `UncDirectory` with the authorization string prepended. If this
`UncDirectory` has no associated credentials, the returned path will be the
`UncDirectory`'s path unmodified. Otherwise, the returned path will resemble the HTTP
Basic Authentication scheme in its format.
"""
creds = self.get_auth_string()
return '{creds}{at}{path}'.format(
creds=creds,
at='@' if creds else '',
path=self._path)
def __eq__(self, other):
if isinstance(other, UncDirectory):
return (self.get_normalized_path() == other.get_normalized_path()
and self._creds == other._creds)
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(str(self))
def __str__(self):
return self.get_auth_path()
def __repr__(self):
return '<{cls}: "{str}">'.format(cls=self.__class__.__name__, str=self.get_auth_path())
def is_unc_directory_string(string):
"""
Returns `True` when `string` represents a `UncDirectory` as defined by `UncDirectory`'s
`get_auth_path` method or `False` otherwise.
"""
cleaned_string = clean_unc_path(string)
return (is_valid_unc_path(cleaned_string)
or ('@\\\\' in cleaned_string
and len(cleaned_string.partition('@\\\\')[2]) > 0))
def get_unc_directory_from_string(string):
"""
Parses a string from `UncDirectory`'s `get_auth_path` method and returns a new `UncDirectory`
object based on it. This may raise any errors that can be raised by `UncDirectory`'s
constructor.
"""
creds = None
path = string
if '@\\\\' in string:
creds_part, path_part = string.rsplit(r'@\\', 1) # Always split on the last `@\\` in case
# the password contains it.
path = r'\\' + path_part
creds = get_creds_from_string(creds_part)
return UncDirectory(path, creds)
| 37.481752 | 98 | 0.637196 | from win_unc.errors import InvalidUncPathError
from win_unc.cleaners import clean_unc_path
from win_unc.unc_credentials import get_creds_from_string
from win_unc.validators import is_valid_unc_path
class UncDirectory(object):
def __init__(self, path, creds=None):
if creds is None and isinstance(path, UncDirectory):
new_path = path._path
new_creds = path._creds
else:
new_path = path
new_creds = creds
cleaned_path = clean_unc_path(new_path)
if is_valid_unc_path(cleaned_path):
self._path = cleaned_path
self._creds = new_creds if new_creds and not new_creds.is_empty() else None
else:
raise InvalidUncPathError(new_path)
def get_normalized_path(self):
path = self._path.lower()
return path[:-5] if path.endswith(r'\ipc$') else path.rstrip('\\')
def get_path(self):
return self._path
def get_username(self):
return self._creds.get_username() if self._creds else None
def get_password(self):
return self._creds.get_password() if self._creds else None
def get_auth_string(self):
return self._creds.get_auth_string() if self._creds else ''
def get_auth_path(self):
creds = self.get_auth_string()
return '{creds}{at}{path}'.format(
creds=creds,
at='@' if creds else '',
path=self._path)
def __eq__(self, other):
if isinstance(other, UncDirectory):
return (self.get_normalized_path() == other.get_normalized_path()
and self._creds == other._creds)
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(str(self))
def __str__(self):
return self.get_auth_path()
def __repr__(self):
return '<{cls}: "{str}">'.format(cls=self.__class__.__name__, str=self.get_auth_path())
def is_unc_directory_string(string):
cleaned_string = clean_unc_path(string)
return (is_valid_unc_path(cleaned_string)
or ('@\\\\' in cleaned_string
and len(cleaned_string.partition('@\\\\')[2]) > 0))
def get_unc_directory_from_string(string):
creds = None
path = string
if '@\\\\' in string:
creds_part, path_part = string.rsplit(r'@\\', 1)
path = r'\\' + path_part
creds = get_creds_from_string(creds_part)
return UncDirectory(path, creds)
| true | true |
f72bde3b469c3f1a0eb70bdea36a1aef13bbb4ee | 1,302 | py | Python | donkeycar/parts/terminator.py | Stippos/donkeycar | 4d8bc923f3c188df4b72057c4fdf4315f5d1ad67 | [
"MIT"
] | 1 | 2020-12-08T07:43:07.000Z | 2020-12-08T07:43:07.000Z | donkeycar/parts/terminator.py | ari-viitala/donkeycar | 4d8bc923f3c188df4b72057c4fdf4315f5d1ad67 | [
"MIT"
] | null | null | null | donkeycar/parts/terminator.py | ari-viitala/donkeycar | 4d8bc923f3c188df4b72057c4fdf4315f5d1ad67 | [
"MIT"
] | null | null | null | class Terminator:
def __init__(self):
self.dead = False
self.training = False
self.image = None
self.steering = 0
self.throttle = 0
def poll():
self.dead = self.is_dead(self.image)
self.steering *= self.dead
self.throttle *= self.dead
def update():
while True:
self.poll()
def run_threaded(self, image, steering, throttle, training):
def run(self, image, steering, throttle, training):
return self.run_threaded()
def is_dead(self, img):
"""
Counts the black pixels from the ground and compares the amount to a threshold value.
If there are not enough black pixels the car is assumed to be off the track.
"""
crop_height = 20
crop_width = 20
threshold = 70
pixels_percentage = 0.10
pixels_required = (img.shape[1] - 2 * crop_width) * crop_height * pixels_percentage
crop = img[-crop_height:, crop_width:-crop_width]
r = crop[:,:,0] < threshold
g = crop[:,:,1] < threshold
b = crop[:,:,2] < threshold
pixels = (r & g & b).sum()
print("Pixels: {}, Required: {}".format(pixels, pixels_required))
return pixels < pixels_required | 27.125 | 93 | 0.570661 | class Terminator:
def __init__(self):
self.dead = False
self.training = False
self.image = None
self.steering = 0
self.throttle = 0
def poll():
self.dead = self.is_dead(self.image)
self.steering *= self.dead
self.throttle *= self.dead
def update():
while True:
self.poll()
def run_threaded(self, image, steering, throttle, training):
def run(self, image, steering, throttle, training):
return self.run_threaded()
def is_dead(self, img):
"""
Counts the black pixels from the ground and compares the amount to a threshold value.
If there are not enough black pixels the car is assumed to be off the track.
"""
crop_height = 20
crop_width = 20
threshold = 70
pixels_percentage = 0.10
pixels_required = (img.shape[1] - 2 * crop_width) * crop_height * pixels_percentage
crop = img[-crop_height:, crop_width:-crop_width]
r = crop[:,:,0] < threshold
g = crop[:,:,1] < threshold
b = crop[:,:,2] < threshold
pixels = (r & g & b).sum()
print("Pixels: {}, Required: {}".format(pixels, pixels_required))
return pixels < pixels_required | false | true |
f72bdee1d28143d175cb1971a290684cad30cafc | 479 | py | Python | Code/sentiment_analysis.py | mayureeb/fakenews | c47a72c8bbe4d413b309da0c662da784c002fe3f | [
"Unlicense"
] | 12 | 2017-06-25T23:06:31.000Z | 2021-02-27T23:06:57.000Z | Code/sentiment_analysis.py | mayureeb/fakenews | c47a72c8bbe4d413b309da0c662da784c002fe3f | [
"Unlicense"
] | 6 | 2021-03-31T20:03:11.000Z | 2022-03-12T00:49:42.000Z | Code/sentiment_analysis.py | mayureeb/fakenews | c47a72c8bbe4d413b309da0c662da784c002fe3f | [
"Unlicense"
] | 5 | 2017-06-25T23:05:41.000Z | 2018-11-22T16:10:58.000Z | import pandas as pd
from textblob import TextBlob
pd.options.mode.chained_assignment = None # ignores the SettingWithCopy Warning
df = pd.read_csv('INPUT.csv', encoding = 'utf8')
df['polarity'] = 0.0
df['subjectivity'] = 0.0
for i in range(0, len(df.index)):
print(i)
blob = TextBlob(str(df['text'][i]))
df['subjectivity'][i] = blob.sentiment.subjectivity
df['polarity'][i] = blob.sentiment.polarity
print(df.head())
df.to_csv('OUTPUT.csv', encoding = 'utf8')
| 29.9375 | 80 | 0.691023 | import pandas as pd
from textblob import TextBlob
pd.options.mode.chained_assignment = None
df = pd.read_csv('INPUT.csv', encoding = 'utf8')
df['polarity'] = 0.0
df['subjectivity'] = 0.0
for i in range(0, len(df.index)):
print(i)
blob = TextBlob(str(df['text'][i]))
df['subjectivity'][i] = blob.sentiment.subjectivity
df['polarity'][i] = blob.sentiment.polarity
print(df.head())
df.to_csv('OUTPUT.csv', encoding = 'utf8')
| true | true |
f72bdf31e9017fd309d269003029962f0606f6e1 | 22,621 | py | Python | client.py | iegorman/netspeed | 6eec201791b89be69ed9d6a6563d90cf324f2f14 | [
"BSD-2-Clause"
] | null | null | null | client.py | iegorman/netspeed | 6eec201791b89be69ed9d6a6563d90cf324f2f14 | [
"BSD-2-Clause"
] | null | null | null | client.py | iegorman/netspeed | 6eec201791b89be69ed9d6a6563d90cf324f2f14 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/python3
# Connmand line client for repeated internet speed tests.
import os
import sys
import collections
import gc
import getopt
import json
import math
import time
import traceback
import urllib.error
import urllib.request
import re
class Client(object):
"""
Python class and connmand line client for repeated internet speed tests.
Reports results to local file system and to a remote server.
The client and server exchange JSON strings that represent Python
dictionaries. The entries of each object will be a subset of the entries
shown as javascript object attributes in comments at the top of file
'./server.js'.
"""
# defaults for adjustment of transmit length -- treat as class constants
defaultInterval = 3_600 # seconds
initialDownloadLength = 20_000_000 # bytes
initialUploadLength = 2_000_000 # bytes
minLength = 200_000 # at least this length after adjustment
maxLength = 100_000_000 # at most this length after adjustment
desiredRuntime = 10 # ideal test run time, seconds
maxRatio = 1.5 # minimum time devation to cause change in length
maxUploadLength = 125_000_000 # upload will fail if upload is too large
# download limit is unknown, seems to be more than 1_000_000_000
# default output destimations
defaultLog = sys.stdout # message log
defaultReport = sys.stderr # summary reports and errors
# count only the bits in actual data, ignore protocal bits
# protocol bit are a small proportion of message exceopt in smalll packets
bitsPerDataByte = 8
@classmethod
def js_time(cls):
'''
JavaScript time -- milliseconds from Unix epoch.
This is the millisecond offset from 1970-01-01 00:00:00 UTC.
'''
return math.floor(1000 * time.time()) # integer from float
@classmethod
def js_clock(cls, milliseconds=None):
"""
Format javascript time from Unix epoch to local 'YYYY-MM-DD hh:mm:ss'.
This is the time in the local time zone. If no Javascript time is
given, it will the the current time.
"""
seconds = milliseconds / 1000 if milliseconds else None
return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(seconds))
def __init__(self, serverURL,
log=defaultLog, # JSON log of transactions
report=defaultReport, # human-readable report
interval=defaultInterval,
downloadLength=initialDownloadLength,
uploadLength=initialUploadLength,
testID = None # default: will be set by the server
):
"""
Create an instance for download and upload tests.
serverURL may be specified with or without trailing slash. A
slash will be appended if trailing slash is omitted.
report and log are the names of output destinations of destinations ins
the local filesystem.
"""
super()
# Accept server URL with or without trailing slash, strip the slash
self._serverURL = (serverURL.rstrip('/')
if serverURL.endswith('/')
else serverURL)
# Paths relative to server
self._rootPath = '/'
self._setupPath = '/begin'
self._downloadPath = '/download'
self._downreportPath = '/downreport'
self._uploadPath = '/upload'
self._upreportPath = '/upreport'
self._pingPath = '/echo'
# output to file system
self._report = report
self._log = log
# Initial settings
self._interval = ( interval if interval
else self.__class__.defaultInterval)
self._downloadLength = ( downloadLength if downloadLength
else self.__class__.initialDownloadLength)
self._uploadLength = ( uploadLength if uploadLength
else self.__class__.initialUploadLength)
self._testID = testID
self._testNumber = 0 # Incremented on each test cycle
self._externalIP = None # client IP seen by server at each contact
self._testBegin = None # date-time of first contact with server
# prevent upload failure caused by large uploads
self._uploadLength = min(self.maxUploadLength, self._uploadLength)
def recalculateLength(self, previousLength, previousRuntime):
"""
Choose a transmit length that gives a test time close to desired time.
previousLength: amount of data in last transmission, bytes
previousRuntime: time to complete last tranmission, seconds
When the previous time falls too far below or too far above the
desired time the previous transmit length will be changed in an
attempt to come closer to the desired time on the next test.
There is an upper and a lower limt to the changed length.
"""
targetRuntime = self.__class__.desiredRuntime
minLength = self.__class__.minLength
maxLength = self.__class__.maxLength
# don't crash on a zero time, replace by a very short time
lastRuntime = max(previousRuntime, targetRuntime/100)
if ( lastRuntime > targetRuntime / self.maxRatio
and lastRunime < targetRuntime * self.maxRatio ):
return previousLength
# round to nearest thousand and not too small or large
transmitLength = previousLength * targetRuntime / lastRuntime
return max(minLength, min(maxLength, round(transmitLength, -3)))
def bytesource(self, count):
"""
Iterate a sequence of blocks of bytes.
count is the total number of bytes.
Last block may be shorter than the others.
"""
byt = ((b'0123456789' * 7) + b'012345678\n') * 50
n = count
blen = len(byt)
while n > blen:
yield byt
n -= blen
yield byt[0:n] # may have zero length
def begin(self):
'''
Make initial contact with server.
The server will check donfiguration and provide some test information.
The server may replace out-of-range values by default values.
'''
timestamp = self.js_time()
params = collections.OrderedDict((
('externalIP', self._externalIP),
('testID', self._testID),
('testBegin', self.js_time()), # server may revise this time
('pathname', self._setupPath),
('clientTimeStamp', timestamp),
('interval', self._interval),
('downloadLength', self._downloadLength),
('uploadLength', self._uploadLength),
))
content = bytes(json.dumps(params), 'utf-8')
try:
url = self._serverURL + self._setupPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/json',
# Content-Length is automatically calculated
'Accept': 'application/json',
},
data=content,
method='POST'
)
with urllib.request.urlopen(request) as f:
# failure of the next assignments would be a system failure
info = json.loads(f.read())
self._testID = info["testID"]
self._interval = info["interval"]
self._downloadLength = info["downloadLength"]
self._uploadLength = info["uploadLength"]
self._testBegin = info['testBegin']
print(json.dumps(info), file=self._log)
self._log.flush()
print( 'Begin:\n Test ID = ' + info['testID']
+ '\n External IP = ' + info['externalIP']
+ '\n Test Begin Time = '
+ self.js_clock(info['testBegin'])
+ '\n', file=self._report)
self._report.flush()
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to begin communication with server at',
self._serverURL])) from e
return
def reportToServer(self, params, reportPath):
"""
Report the result of a download or upload test to the server.
This is the second stage of a download or upload test and is
invoked by downloadTest()
Takes a dictionary of informations and returns a similar
dictionary from the server.
"""
timestamp = self.js_time()
try:
params['clientTimestamp'] = timestamp
params['pathname'] = reportPath
# prepare the request
content = bytes(json.dumps(params), 'utf-8')
url = self._serverURL + reportPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/json',
# Content-Length is automatically calculated
'Accept': 'application/json',
},
data=content,
method='POST'
)
with urllib.request.urlopen(request) as f:
data = f.read(4096).decode(encoding='iso-8859-1',
errors='replace')
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to report result to', url])) from e
# data should be JSON text in canonical form
return json.loads(data)
def download(self, params):
"""
Run a download test with data received from the server.
This is the first stage of a download test and is invoked by
downloadTest()
Takes a dictionary of informations and returns a modified
dictionary.
"""
timestamp = self.js_time()
clientRequestBegin = 0
clientRequestEnd = 0
clientResponseBegin = 0
clientResponseEnd = 0
clientReceiveLength = 0
try:
# prepare the request
content = bytes(json.dumps(params), 'utf-8')
url = self._serverURL + self._downloadPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/json',
# Content-Length is automatically calculated
'Accept': 'text/plain, application/octet',
},
data=content,
method='POST'
)
# send the request, mark the times
clientRequestBegin = self.js_time()
with urllib.request.urlopen(request) as f:
clientRequestEnd = self.js_time()
# get the response, mark the times
# we only need the total length of downloaded data
clientResponseBegin = self.js_time()
size = len(f.read(1024))
while size > 0:
clientReceiveLength += size
size = len(f.read(16_384))
clientResponseEnd = self.js_time()
# update the information and return it
params.setdefault('clientReceiveLength', clientReceiveLength)
params.setdefault('downloadReceiveLength', clientReceiveLength)
params.setdefault('clientRequestBegin', clientRequestBegin)
params.setdefault('clientRequestEnd', clientRequestEnd)
params.setdefault('clientResponseBegin', clientResponseBegin)
params.setdefault('clientResponseEnd', clientResponseEnd)
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to download data from server at',
self._serverURL])) from e
return params
def downloadTest(self):
"""
Run a download test and report result to server.
There are two exchanges. The first exchange does the download and
reports partial information to the server. The second exchange
includes information that becomes available after completion of the
first exchange, and reports full information to the server.
"""
gc.collect() # try to avoid garbage collection during test
timestamp = self.js_time()
# allocation of data to make the request
params = collections.OrderedDict((
('externalIP', self._externalIP),
('testID', self._testID),
('testBegin', self._testBegin),
('testNumber', self._testNumber),
('pathname', self._downloadPath),
('clientTimestamp', timestamp),
('interval', self._interval),
('downloadLength', self._downloadLength),
))
params = self.download(params)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
# human-readable repot
megabytes = math.floor(params['clientReceiveLength'] / 1_000) / 1_000
seconds = (params['clientResponseEnd']
- params['clientResponseBegin']) / 1_000
print( 'Download\n Time: '
+ self.js_clock(params['clientTimestamp'])
+ '\n Megabytes: ' + str(megabytes)
+ '\n Seconds: ' + str(seconds)
+ '\n Megabits / Second: ' + str(round(
(self.bitsPerDataByte * megabytes / seconds), 3))
+ '\n', file=self._report)
self._report.flush()
# revise the download size for the next run, to get approximately the
# desired length of time on each test run.
self._downloadLength = self.recalculateLength(
params['downloadReceiveLength'], seconds)
params = self.reportToServer(params, self._downreportPath)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
return
def upload(self, params):
"""
Run an upload test with data sent to the server.
This is the first stage of an upload test and is invoked by
uploadTest()
Takes a dictionary of informations and returns a modified
dictionary.
"""
timestamp = self.js_time()
clientRequestBegin = 0
clientRequestEnd = 0
clientResponseBegin = 0
clientResponseEnd = 0
clientReceiveLength = 0
try:
# prepare the request
url = self._serverURL + self._uploadPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/octet',
'Content-Length': self._uploadLength,
'Accept': 'application/json',
},
data=self.bytesource(self._uploadLength),
method='POST'
)
# send the request, mark the times
clientRequestBegin = self.js_time()
with urllib.request.urlopen(request) as f:
clientRequestEnd = self.js_time()
# get the response, mark the times, save the info
clientResponseBegin = self.js_time()
text = f.read(4096) # should be JSON text, < 1K
clientResponseEnd = self.js_time()
size = len(text)
while size > 0:
# should be no remaining text
clientReceiveLength += size
size = len(f.read(4096))
clientResponseEnd = self.js_time()
info = json.loads(text)
# update data report for printing as JSON to the log
params.setdefault('clientReceiveLength', clientReceiveLength)
params.setdefault('clientRequestBegin', clientRequestBegin)
params.setdefault('clientRequestEnd', clientRequestEnd)
params.setdefault('clientResponseBegin', clientResponseBegin)
params.setdefault('clientResponseEnd', clientResponseEnd)
params.setdefault('serverReceiveLength',
info['serverReceiveLength'])
params.setdefault('serverRequestBegin', info['serverRequestBegin'])
params.setdefault('serverRequestEnd', info['serverRequestEnd'])
params.setdefault('serverResponseBegin',
info['serverResponseBegin'])
params.setdefault('uploadReceiveLength',
info['uploadReceiveLength'])
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to upload data from server at',
self._serverURL])) from e
return params
def uploadTest(self):
"""
Run upload test and report result to server.
There are two exchanges. The first exchange does the upload and
reports partial information to the server. The second exchange
includes information that becomes available after completion of the
first exchange, and reports full information to the server.
"""
gc.collect() # try to avoid garbage collection during test
timestamp = self.js_time()
# allocation of data to make the request
params = collections.OrderedDict((
('externalIP', self._externalIP),
('testID', self._testID),
('testBegin', self._testBegin),
('testNumber', self._testNumber),
('pathname', self._uploadPath),
('clientTimestamp', timestamp),
('interval', self._interval),
('uploadLength', self._uploadLength),
))
params = self.upload(params)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
# human-readable repot
megabytes = math.floor(params['uploadLength'] / 1_000) / 1_000
seconds = (params['clientResponseEnd']
- params['clientRequestBegin']) / 1_000
print( 'Upload\n Time: '
+ self.js_clock(params['clientTimestamp'])
+ '\n Megabytes: ' + str(megabytes)
+ '\n Seconds: ' + str(seconds)
+ '\n Megabits / Second: ' + str(round(
(self.bitsPerDataByte * megabytes / seconds), 3))
+ '\n', file=self._report)
self._report.flush()
# revise the upload size for the next run, to get approximately the
# desired length of time on each test run.
self._uploadLength = min(self.maxUploadLength,
self.recalculateLength(params['uploadReceiveLength'], seconds))
params = self.reportToServer(params, self._upreportPath)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
return
def run_test_cycle(self):
"""
Run a single set of upload and upload tests.
"""
self.downloadTest()
self.uploadTest()
def run(self):
"""
Invoke startup and ongoing test runs.
"""
self.begin()
while True:
self.run_test_cycle()
time.sleep(self._interval)
if __name__ == "__main__":
shortopts = "h"
longopts = ["help", "testid=", "interval=", "download=", "upload="]
cmdline = getopt.getopt(sys.argv[1:], shortopts, longopts=longopts)
argv = cmdline[1]
opt = dict(cmdline[0])
def printerr(s):
print(s, file=sys.stderr)
sys.stderr.flush()
if len(argv) < 1 or '-h' in opt or '--help' in opt:
printerr("Usage: " + sys.argv[0] + " [options] host[:port]")
printerr(" Client to estimate download and upload times")
printerr(" host (required): domain name or IP address of server")
printerr(" port (optional, default = 80): "
+ "destination port on server")
printerr(" options:")
printerr(" -h|--help print this message")
printerr(" --interval=n time (seconds) between runs"
+ " (default = " + str(Client.defaultInterval) +")")
printerr(" --download=n number of bytes to download"
+ " (default = " + str(Client.initialDownloadLength) + ")")
printerr(" --upload=n number of bytes to upload"
+ " (default = " + str(Client.initialUploadLength) + ")")
printerr(" --testid=ID test ID"
+ " (default = test ID will be set by server)")
printerr(" JSON log goes to stdout")
printerr(" Human-readable report goes to stderr")
printerr(" See script for details")
exit(2)
testID = opt["--testid"] if "--testid" in opt else None
interval = (int(opt["--interval"]) if "--interval" in opt
else Client.defaultInterval)
download = (int(opt["--download"]) if "--download" in opt
else Client.initialDownloadLength)
upload = (int(opt["--upload"]) if "--upload" in opt
else Client.initialUploadLength)
try:
Client(argv[0], interval=interval,
downloadLength=download,
uploadLength=upload,
testID=testID).run()
except KeyboardInterrupt as e:
printerr("Teiminated by Keyboard Interrupt\n")
exit(1)
| 41.582721 | 79 | 0.55895 |
import os
import sys
import collections
import gc
import getopt
import json
import math
import time
import traceback
import urllib.error
import urllib.request
import re
class Client(object):
defaultInterval = 3_600
initialDownloadLength = 20_000_000
initialUploadLength = 2_000_000
minLength = 200_000
maxLength = 100_000_000
desiredRuntime = 10
maxRatio = 1.5
maxUploadLength = 125_000_000
defaultLog = sys.stdout
defaultReport = sys.stderr
bitsPerDataByte = 8
@classmethod
def js_time(cls):
return math.floor(1000 * time.time())
@classmethod
def js_clock(cls, milliseconds=None):
seconds = milliseconds / 1000 if milliseconds else None
return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(seconds))
def __init__(self, serverURL,
log=defaultLog,
report=defaultReport,
interval=defaultInterval,
downloadLength=initialDownloadLength,
uploadLength=initialUploadLength,
testID = None
):
super()
self._serverURL = (serverURL.rstrip('/')
if serverURL.endswith('/')
else serverURL)
self._rootPath = '/'
self._setupPath = '/begin'
self._downloadPath = '/download'
self._downreportPath = '/downreport'
self._uploadPath = '/upload'
self._upreportPath = '/upreport'
self._pingPath = '/echo'
self._report = report
self._log = log
self._interval = ( interval if interval
else self.__class__.defaultInterval)
self._downloadLength = ( downloadLength if downloadLength
else self.__class__.initialDownloadLength)
self._uploadLength = ( uploadLength if uploadLength
else self.__class__.initialUploadLength)
self._testID = testID
self._testNumber = 0
self._externalIP = None
self._testBegin = None
self._uploadLength = min(self.maxUploadLength, self._uploadLength)
def recalculateLength(self, previousLength, previousRuntime):
targetRuntime = self.__class__.desiredRuntime
minLength = self.__class__.minLength
maxLength = self.__class__.maxLength
lastRuntime = max(previousRuntime, targetRuntime/100)
if ( lastRuntime > targetRuntime / self.maxRatio
and lastRunime < targetRuntime * self.maxRatio ):
return previousLength
# round to nearest thousand and not too small or large
transmitLength = previousLength * targetRuntime / lastRuntime
return max(minLength, min(maxLength, round(transmitLength, -3)))
def bytesource(self, count):
byt = ((b'0123456789' * 7) + b'012345678\n') * 50
n = count
blen = len(byt)
while n > blen:
yield byt
n -= blen
yield byt[0:n] # may have zero length
def begin(self):
timestamp = self.js_time()
params = collections.OrderedDict((
('externalIP', self._externalIP),
('testID', self._testID),
('testBegin', self.js_time()), # server may revise this time
('pathname', self._setupPath),
('clientTimeStamp', timestamp),
('interval', self._interval),
('downloadLength', self._downloadLength),
('uploadLength', self._uploadLength),
))
content = bytes(json.dumps(params), 'utf-8')
try:
url = self._serverURL + self._setupPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/json',
# Content-Length is automatically calculated
'Accept': 'application/json',
},
data=content,
method='POST'
)
with urllib.request.urlopen(request) as f:
# failure of the next assignments would be a system failure
info = json.loads(f.read())
self._testID = info["testID"]
self._interval = info["interval"]
self._downloadLength = info["downloadLength"]
self._uploadLength = info["uploadLength"]
self._testBegin = info['testBegin']
print(json.dumps(info), file=self._log)
self._log.flush()
print( 'Begin:\n Test ID = ' + info['testID']
+ '\n External IP = ' + info['externalIP']
+ '\n Test Begin Time = '
+ self.js_clock(info['testBegin'])
+ '\n', file=self._report)
self._report.flush()
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to begin communication with server at',
self._serverURL])) from e
return
def reportToServer(self, params, reportPath):
timestamp = self.js_time()
try:
params['clientTimestamp'] = timestamp
params['pathname'] = reportPath
# prepare the request
content = bytes(json.dumps(params), 'utf-8')
url = self._serverURL + reportPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/json',
# Content-Length is automatically calculated
'Accept': 'application/json',
},
data=content,
method='POST'
)
with urllib.request.urlopen(request) as f:
data = f.read(4096).decode(encoding='iso-8859-1',
errors='replace')
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to report result to', url])) from e
# data should be JSON text in canonical form
return json.loads(data)
def download(self, params):
timestamp = self.js_time()
clientRequestBegin = 0
clientRequestEnd = 0
clientResponseBegin = 0
clientResponseEnd = 0
clientReceiveLength = 0
try:
# prepare the request
content = bytes(json.dumps(params), 'utf-8')
url = self._serverURL + self._downloadPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/json',
# Content-Length is automatically calculated
'Accept': 'text/plain, application/octet',
},
data=content,
method='POST'
)
# send the request, mark the times
clientRequestBegin = self.js_time()
with urllib.request.urlopen(request) as f:
clientRequestEnd = self.js_time()
# get the response, mark the times
# we only need the total length of downloaded data
clientResponseBegin = self.js_time()
size = len(f.read(1024))
while size > 0:
clientReceiveLength += size
size = len(f.read(16_384))
clientResponseEnd = self.js_time()
# update the information and return it
params.setdefault('clientReceiveLength', clientReceiveLength)
params.setdefault('downloadReceiveLength', clientReceiveLength)
params.setdefault('clientRequestBegin', clientRequestBegin)
params.setdefault('clientRequestEnd', clientRequestEnd)
params.setdefault('clientResponseBegin', clientResponseBegin)
params.setdefault('clientResponseEnd', clientResponseEnd)
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to download data from server at',
self._serverURL])) from e
return params
def downloadTest(self):
gc.collect() # try to avoid garbage collection during test
timestamp = self.js_time()
# allocation of data to make the request
params = collections.OrderedDict((
('externalIP', self._externalIP),
('testID', self._testID),
('testBegin', self._testBegin),
('testNumber', self._testNumber),
('pathname', self._downloadPath),
('clientTimestamp', timestamp),
('interval', self._interval),
('downloadLength', self._downloadLength),
))
params = self.download(params)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
# human-readable repot
megabytes = math.floor(params['clientReceiveLength'] / 1_000) / 1_000
seconds = (params['clientResponseEnd']
- params['clientResponseBegin']) / 1_000
print( 'Download\n Time: '
+ self.js_clock(params['clientTimestamp'])
+ '\n Megabytes: ' + str(megabytes)
+ '\n Seconds: ' + str(seconds)
+ '\n Megabits / Second: ' + str(round(
(self.bitsPerDataByte * megabytes / seconds), 3))
+ '\n', file=self._report)
self._report.flush()
# revise the download size for the next run, to get approximately the
# desired length of time on each test run.
self._downloadLength = self.recalculateLength(
params['downloadReceiveLength'], seconds)
params = self.reportToServer(params, self._downreportPath)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
return
def upload(self, params):
timestamp = self.js_time()
clientRequestBegin = 0
clientRequestEnd = 0
clientResponseBegin = 0
clientResponseEnd = 0
clientReceiveLength = 0
try:
# prepare the request
url = self._serverURL + self._uploadPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/octet',
'Content-Length': self._uploadLength,
'Accept': 'application/json',
},
data=self.bytesource(self._uploadLength),
method='POST'
)
# send the request, mark the times
clientRequestBegin = self.js_time()
with urllib.request.urlopen(request) as f:
clientRequestEnd = self.js_time()
# get the response, mark the times, save the info
clientResponseBegin = self.js_time()
text = f.read(4096) # should be JSON text, < 1K
clientResponseEnd = self.js_time()
size = len(text)
while size > 0:
# should be no remaining text
clientReceiveLength += size
size = len(f.read(4096))
clientResponseEnd = self.js_time()
info = json.loads(text)
# update data report for printing as JSON to the log
params.setdefault('clientReceiveLength', clientReceiveLength)
params.setdefault('clientRequestBegin', clientRequestBegin)
params.setdefault('clientRequestEnd', clientRequestEnd)
params.setdefault('clientResponseBegin', clientResponseBegin)
params.setdefault('clientResponseEnd', clientResponseEnd)
params.setdefault('serverReceiveLength',
info['serverReceiveLength'])
params.setdefault('serverRequestBegin', info['serverRequestBegin'])
params.setdefault('serverRequestEnd', info['serverRequestEnd'])
params.setdefault('serverResponseBegin',
info['serverResponseBegin'])
params.setdefault('uploadReceiveLength',
info['uploadReceiveLength'])
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to upload data from server at',
self._serverURL])) from e
return params
def uploadTest(self):
gc.collect() # try to avoid garbage collection during test
timestamp = self.js_time()
# allocation of data to make the request
params = collections.OrderedDict((
('externalIP', self._externalIP),
('testID', self._testID),
('testBegin', self._testBegin),
('testNumber', self._testNumber),
('pathname', self._uploadPath),
('clientTimestamp', timestamp),
('interval', self._interval),
('uploadLength', self._uploadLength),
))
params = self.upload(params)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
# human-readable repot
megabytes = math.floor(params['uploadLength'] / 1_000) / 1_000
seconds = (params['clientResponseEnd']
- params['clientRequestBegin']) / 1_000
print( 'Upload\n Time: '
+ self.js_clock(params['clientTimestamp'])
+ '\n Megabytes: ' + str(megabytes)
+ '\n Seconds: ' + str(seconds)
+ '\n Megabits / Second: ' + str(round(
(self.bitsPerDataByte * megabytes / seconds), 3))
+ '\n', file=self._report)
self._report.flush()
# revise the upload size for the next run, to get approximately the
# desired length of time on each test run.
self._uploadLength = min(self.maxUploadLength,
self.recalculateLength(params['uploadReceiveLength'], seconds))
params = self.reportToServer(params, self._upreportPath)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
return
def run_test_cycle(self):
self.downloadTest()
self.uploadTest()
def run(self):
self.begin()
while True:
self.run_test_cycle()
time.sleep(self._interval)
if __name__ == "__main__":
shortopts = "h"
longopts = ["help", "testid=", "interval=", "download=", "upload="]
cmdline = getopt.getopt(sys.argv[1:], shortopts, longopts=longopts)
argv = cmdline[1]
opt = dict(cmdline[0])
def printerr(s):
print(s, file=sys.stderr)
sys.stderr.flush()
if len(argv) < 1 or '-h' in opt or '--help' in opt:
printerr("Usage: " + sys.argv[0] + " [options] host[:port]")
printerr(" Client to estimate download and upload times")
printerr(" host (required): domain name or IP address of server")
printerr(" port (optional, default = 80): "
+ "destination port on server")
printerr(" options:")
printerr(" -h|--help print this message")
printerr(" --interval=n time (seconds) between runs"
+ " (default = " + str(Client.defaultInterval) +")")
printerr(" --download=n number of bytes to download"
+ " (default = " + str(Client.initialDownloadLength) + ")")
printerr(" --upload=n number of bytes to upload"
+ " (default = " + str(Client.initialUploadLength) + ")")
printerr(" --testid=ID test ID"
+ " (default = test ID will be set by server)")
printerr(" JSON log goes to stdout")
printerr(" Human-readable report goes to stderr")
printerr(" See script for details")
exit(2)
testID = opt["--testid"] if "--testid" in opt else None
interval = (int(opt["--interval"]) if "--interval" in opt
else Client.defaultInterval)
download = (int(opt["--download"]) if "--download" in opt
else Client.initialDownloadLength)
upload = (int(opt["--upload"]) if "--upload" in opt
else Client.initialUploadLength)
try:
Client(argv[0], interval=interval,
downloadLength=download,
uploadLength=upload,
testID=testID).run()
except KeyboardInterrupt as e:
printerr("Teiminated by Keyboard Interrupt\n")
exit(1)
| true | true |
f72be1dc6f4fb1ba0ced9f2acbb93db31ef858b3 | 3,694 | py | Python | bin/specsim3d/spectralsim.py | LutzGross/fingal | 4b6fcc02871e7ba1a98f37ffd18f1a16a5fe6a48 | [
"Apache-2.0"
] | null | null | null | bin/specsim3d/spectralsim.py | LutzGross/fingal | 4b6fcc02871e7ba1a98f37ffd18f1a16a5fe6a48 | [
"Apache-2.0"
] | null | null | null | bin/specsim3d/spectralsim.py | LutzGross/fingal | 4b6fcc02871e7ba1a98f37ffd18f1a16a5fe6a48 | [
"Apache-2.0"
] | null | null | null | #-------------------------------------------------------------------------------
# Name: Spectralsim
# Purpose: Simulation of standard normal random fields
#
# Author: Dr.-Ing. S. Hoerning
#
# Created: 02.05.2018, Centre for Natural Gas, EAIT,
# The University of Queensland, Brisbane, QLD, Australia
#-------------------------------------------------------------------------------
import numpy as np
from . import covariancefunction as covfun
class spectral_random_field(object):
def __init__(self,
domainsize = (100,100),
covmod = '1.0 Exp(2.)',
periodic = False,
):
self.counter = 0
self.periodic = periodic
# create self.xyz for plotting 3d
if len(domainsize) == 3:
self.xyz = np.mgrid[[slice(0,n,1) for n in domainsize]].reshape(3,-1).T
# adjust domainsize by cutoff for non-perjodic output
self.cutoff = 0
if not self.periodic:
cutoff = covfun.find_maximum_range(covmod)
cutoffs = []
for dim in domainsize:
tsize = dim + cutoff
# find closest multiple of 8 that is larger than tsize
m8 = np.int(np.ceil(tsize/8.)*8.)
cutoffs.append(m8 - dim)
self.cutoff = np.array(cutoffs)
self.domainsize = np.array(domainsize)+self.cutoff
self.covmod = covmod
self.ndim = len(self.domainsize)
self.npoints = np.prod(self.domainsize)
self.grid = np.mgrid[[slice(0,n,1) for n in self.domainsize]]
# ensure periodicity of domain
for i in range(self.ndim):
self.domainsize = self.domainsize[:,np.newaxis]
self.grid = np.min((self.grid,np.array(self.domainsize)-self.grid),axis=0)
# compute distances from origin (--> wavenumbers in fourier space)
self.h = ((self.grid**2).sum(axis=0))**0.5
# covariances (in fourier space!!!)
self.Q = covfun.Covariogram(self.h, self.covmod)
# FFT of covariances
self.FFTQ = np.abs(np.fft.fftn(self.Q))
# eigenvalues of decomposition
self.sqrtFFTQ = np.sqrt(self.FFTQ / self.npoints)
self.Y = self.simnew()
def simnew(self):
self.counter += 1
# compute random field via inverse fourier transform
real = np.random.standard_normal(size=self.sqrtFFTQ.shape)
imag = np.random.standard_normal(size=self.sqrtFFTQ.shape)
epsilon = real + 1j*imag
rand = epsilon * self.sqrtFFTQ
self.Y = np.real(np.fft.ifftn(rand))*self.npoints
if not self.periodic:
# readjust domainsize to correct size (--> no boundary effects...)
gridslice = [slice(0,(self.domainsize.squeeze()-self.cutoff)[i],1)
for i in range(self.ndim)]
self.Y = self.Y[tuple(gridslice)]
self.Y = self.Y.reshape(self.domainsize.squeeze()-self.cutoff)
return self.Y
# TEST CASE
if __name__ == "__main__":
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
domain = (30, 30, 30)
covmod = '1.0 Exp(4.)'
spec = spectral_random_field(domainsize = domain, covmod = covmod)
field3d = spec.simnew()
xyz = np.mgrid[[slice(0 , n, 1) for n in domain]].reshape(3,-1).T
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(xyz[:,0], xyz[:,1], xyz[:,2], c=field3d.flatten())
plt.show()
| 35.864078 | 84 | 0.538711 |
import numpy as np
from . import covariancefunction as covfun
class spectral_random_field(object):
def __init__(self,
domainsize = (100,100),
covmod = '1.0 Exp(2.)',
periodic = False,
):
self.counter = 0
self.periodic = periodic
if len(domainsize) == 3:
self.xyz = np.mgrid[[slice(0,n,1) for n in domainsize]].reshape(3,-1).T
self.cutoff = 0
if not self.periodic:
cutoff = covfun.find_maximum_range(covmod)
cutoffs = []
for dim in domainsize:
tsize = dim + cutoff
m8 = np.int(np.ceil(tsize/8.)*8.)
cutoffs.append(m8 - dim)
self.cutoff = np.array(cutoffs)
self.domainsize = np.array(domainsize)+self.cutoff
self.covmod = covmod
self.ndim = len(self.domainsize)
self.npoints = np.prod(self.domainsize)
self.grid = np.mgrid[[slice(0,n,1) for n in self.domainsize]]
for i in range(self.ndim):
self.domainsize = self.domainsize[:,np.newaxis]
self.grid = np.min((self.grid,np.array(self.domainsize)-self.grid),axis=0)
self.h = ((self.grid**2).sum(axis=0))**0.5
self.Q = covfun.Covariogram(self.h, self.covmod)
self.FFTQ = np.abs(np.fft.fftn(self.Q))
self.sqrtFFTQ = np.sqrt(self.FFTQ / self.npoints)
self.Y = self.simnew()
def simnew(self):
self.counter += 1
real = np.random.standard_normal(size=self.sqrtFFTQ.shape)
imag = np.random.standard_normal(size=self.sqrtFFTQ.shape)
epsilon = real + 1j*imag
rand = epsilon * self.sqrtFFTQ
self.Y = np.real(np.fft.ifftn(rand))*self.npoints
if not self.periodic:
gridslice = [slice(0,(self.domainsize.squeeze()-self.cutoff)[i],1)
for i in range(self.ndim)]
self.Y = self.Y[tuple(gridslice)]
self.Y = self.Y.reshape(self.domainsize.squeeze()-self.cutoff)
return self.Y
if __name__ == "__main__":
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
domain = (30, 30, 30)
covmod = '1.0 Exp(4.)'
spec = spectral_random_field(domainsize = domain, covmod = covmod)
field3d = spec.simnew()
xyz = np.mgrid[[slice(0 , n, 1) for n in domain]].reshape(3,-1).T
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(xyz[:,0], xyz[:,1], xyz[:,2], c=field3d.flatten())
plt.show()
| true | true |
f72be352896b0f61ced55b6764db278e4593e15a | 387 | py | Python | sinoera/tst/solarterm/test_grainrain.py | sinotradition/sinoera | 1e93482c0a56a8917bc7ceebeef5b63b24ca3651 | [
"Apache-2.0"
] | 1 | 2015-12-14T15:14:35.000Z | 2015-12-14T15:14:35.000Z | sinoera/tst/solarterm/test_grainrain.py | sinotradition/sinoera | 1e93482c0a56a8917bc7ceebeef5b63b24ca3651 | [
"Apache-2.0"
] | null | null | null | sinoera/tst/solarterm/test_grainrain.py | sinotradition/sinoera | 1e93482c0a56a8917bc7ceebeef5b63b24ca3651 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
#coding=utf-8
'''This is test module
@author: sheng
@contact: sinotradition@gmail.com
@copyright: License according to the project license.
'''
import unittest
from sinoera.solarterm import grainrain
TestGrainrainFunctions(unittest.TestCase):
def setUp(self):
pass
def test_XXX(self):
pass
if __name__ == "__main__":
unittest.main()
| 16.125 | 53 | 0.705426 |
'''This is test module
@author: sheng
@contact: sinotradition@gmail.com
@copyright: License according to the project license.
'''
import unittest
from sinoera.solarterm import grainrain
TestGrainrainFunctions(unittest.TestCase):
def setUp(self):
pass
def test_XXX(self):
pass
if __name__ == "__main__":
unittest.main()
| false | true |
f72be3f57c7c5d2eb4ce80ae2f2640c917c82222 | 1,598 | py | Python | salt/states/aptpkg.py | preoctopus/salt | aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d | [
"Apache-2.0"
] | 3 | 2015-04-16T18:42:35.000Z | 2017-10-30T16:57:49.000Z | salt/states/aptpkg.py | preoctopus/salt | aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d | [
"Apache-2.0"
] | 16 | 2015-11-18T00:44:03.000Z | 2018-10-29T20:48:27.000Z | salt/states/aptpkg.py | preoctopus/salt | aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d | [
"Apache-2.0"
] | 4 | 2020-11-04T06:28:05.000Z | 2022-02-09T10:54:49.000Z | # -*- coding: utf-8 -*-
'''
Package management operations specific to APT- and DEB-based systems
====================================================================
'''
from __future__ import absolute_import
# Import python libs
import logging
# Import salt libs
import salt.utils
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'apt'
def __virtual__():
'''
Only work on apt-based platforms with pkg.get_selections
'''
return (__virtualname__
if __salt__.get('pkg.get_selections', False)
else False)
def held(name):
'''
Set package in 'hold' state, meaning it will not be upgraded.
name
The name of the package, e.g., 'tmux'
'''
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
state = __salt__['pkg.get_selections'](
pattern=name,
)
if not state:
ret.update(comment='Package {0} does not have a state'.format(name))
elif not salt.utils.is_true(state.get('hold', False)):
if not __opts__['test']:
result = __salt__['pkg.set_selections'](
selection={'hold': [name]}
)
ret.update(changes=result[name],
result=True,
comment='Package {0} is now being held'.format(name))
else:
ret.update(result=None,
comment='Package {0} is set to be held'.format(name))
else:
ret.update(result=True,
comment='Package {0} is already held'.format(name))
return ret
| 27.084746 | 76 | 0.560701 |
from __future__ import absolute_import
import logging
import salt.utils
log = logging.getLogger(__name__)
__virtualname__ = 'apt'
def __virtual__():
return (__virtualname__
if __salt__.get('pkg.get_selections', False)
else False)
def held(name):
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
state = __salt__['pkg.get_selections'](
pattern=name,
)
if not state:
ret.update(comment='Package {0} does not have a state'.format(name))
elif not salt.utils.is_true(state.get('hold', False)):
if not __opts__['test']:
result = __salt__['pkg.set_selections'](
selection={'hold': [name]}
)
ret.update(changes=result[name],
result=True,
comment='Package {0} is now being held'.format(name))
else:
ret.update(result=None,
comment='Package {0} is set to be held'.format(name))
else:
ret.update(result=True,
comment='Package {0} is already held'.format(name))
return ret
| true | true |
f72be4c5cc1eeab06b7191f01eab65ac00eda171 | 605 | py | Python | lms_aaditya/ContactsModules/update_contacts.py | hcmuleva/personal-profile | 051b5a2f36b927951691f48abe584beb8bc25440 | [
"MIT"
] | null | null | null | lms_aaditya/ContactsModules/update_contacts.py | hcmuleva/personal-profile | 051b5a2f36b927951691f48abe584beb8bc25440 | [
"MIT"
] | 3 | 2020-07-13T17:46:32.000Z | 2020-07-26T10:30:59.000Z | lms_aaditya/ContactsModules/update_contacts.py | hcmuleva/personal-profile | 051b5a2f36b927951691f48abe584beb8bc25440 | [
"MIT"
] | null | null | null | from MongoConnect import ConnectModule
my_con = ConnectModule.connect()
collection = my_con.db["Contacts"]
class UpdateContact:
def __init__(self, reg_id, uname, uemail, uphone):
self.uname = uname
self.uemail = uemail
self.uphone = uphone
self.reg_id = reg_id
def update(self):
newdata = {"$set": {
"Registration Id": self.reg_id,
"Name ": self.uname,
"Email ID ": self.uemail,
"Phone number ": self.uphone
}
}
return collection.update({"Registration Id": self.reg_id}, newdata)
| 27.5 | 75 | 0.58843 | from MongoConnect import ConnectModule
my_con = ConnectModule.connect()
collection = my_con.db["Contacts"]
class UpdateContact:
def __init__(self, reg_id, uname, uemail, uphone):
self.uname = uname
self.uemail = uemail
self.uphone = uphone
self.reg_id = reg_id
def update(self):
newdata = {"$set": {
"Registration Id": self.reg_id,
"Name ": self.uname,
"Email ID ": self.uemail,
"Phone number ": self.uphone
}
}
return collection.update({"Registration Id": self.reg_id}, newdata)
| true | true |
f72be4cb8b14bc8a51522aeae3113d30c873f8c5 | 1,485 | py | Python | v1functions/timer-trigger-azuresearch-index-monitoring/function/run.py | anuragc617/azure-functions-python-samples | d39e3154a960bd10cd1be80eecf0c4a1d5ab2244 | [
"MIT"
] | 323 | 2017-01-31T03:25:33.000Z | 2022-03-30T06:06:31.000Z | v1functions/timer-trigger-azuresearch-index-monitoring/function/run.py | anuragc617/azure-functions-python-samples | d39e3154a960bd10cd1be80eecf0c4a1d5ab2244 | [
"MIT"
] | 41 | 2017-10-13T10:43:35.000Z | 2022-01-13T01:10:24.000Z | v1functions/timer-trigger-azuresearch-index-monitoring/function/run.py | anuragc617/azure-functions-python-samples | d39e3154a960bd10cd1be80eecf0c4a1d5ab2244 | [
"MIT"
] | 167 | 2017-01-31T03:25:44.000Z | 2022-03-15T16:54:54.000Z | # -*- coding: utf-8 -*-
"""
Azure Functions Timer Trigger Python Sample
- Get Azure Search Index Statistics and store them into DocumentDB
DocumentDB binding reference:
https://docs.microsoft.com/en-us/azure/azure-functions/functions-bindings-documentdb
"""
import sys, os, datetime, json
import httplib, urllib
AZURE_SEARCH_SERVICE_NAME='<azure search service name>'
AZURE_SEARCH_API_VER='<azure search api version: ex. 2015-02-28-Preview>'
AZURE_SEARCH_ADMIN_KEY='<azure search API admin key>'
AZURE_SEARCH_INDEX_NAME='<azure search index name>'
CONTENT_TYPE='application/json'
headers = {
'api-key': AZURE_SEARCH_ADMIN_KEY,
'content-type': "application/json"
}
r_data = ''
try:
conn = httplib.HTTPSConnection('{}.search.windows.net'.format(AZURE_SEARCH_SERVICE_NAME))
conn.request("GET",
"/indexes/{0}/stats?api-version={1}".format(AZURE_SEARCH_INDEX_NAME, AZURE_SEARCH_API_VER),
'', headers)
response = conn.getresponse()
r_data = response.read()
conn.close()
except Exception as e:
print("[Errno {0}] {1}".format(e.errno, e.strerror))
if r_data:
r_jsonobject=json.loads(r_data)
outdoc= {
"doccount": r_jsonobject['documentCount'],
"storagesize": r_jsonobject['storageSize'],
"timestamp": str(datetime.datetime.utcnow())
}
print outdoc
# Writing to DocumentDB (Document parameter name: outputDocument)
with open(os.environ['outputDocument'], 'wb') as f:
json.dump(outdoc,f)
| 30.306122 | 99 | 0.711785 |
"""
Azure Functions Timer Trigger Python Sample
- Get Azure Search Index Statistics and store them into DocumentDB
DocumentDB binding reference:
https://docs.microsoft.com/en-us/azure/azure-functions/functions-bindings-documentdb
"""
import sys, os, datetime, json
import httplib, urllib
AZURE_SEARCH_SERVICE_NAME='<azure search service name>'
AZURE_SEARCH_API_VER='<azure search api version: ex. 2015-02-28-Preview>'
AZURE_SEARCH_ADMIN_KEY='<azure search API admin key>'
AZURE_SEARCH_INDEX_NAME='<azure search index name>'
CONTENT_TYPE='application/json'
headers = {
'api-key': AZURE_SEARCH_ADMIN_KEY,
'content-type': "application/json"
}
r_data = ''
try:
conn = httplib.HTTPSConnection('{}.search.windows.net'.format(AZURE_SEARCH_SERVICE_NAME))
conn.request("GET",
"/indexes/{0}/stats?api-version={1}".format(AZURE_SEARCH_INDEX_NAME, AZURE_SEARCH_API_VER),
'', headers)
response = conn.getresponse()
r_data = response.read()
conn.close()
except Exception as e:
print("[Errno {0}] {1}".format(e.errno, e.strerror))
if r_data:
r_jsonobject=json.loads(r_data)
outdoc= {
"doccount": r_jsonobject['documentCount'],
"storagesize": r_jsonobject['storageSize'],
"timestamp": str(datetime.datetime.utcnow())
}
print outdoc
with open(os.environ['outputDocument'], 'wb') as f:
json.dump(outdoc,f)
| false | true |
f72be53c78ed7ddbde3083e7ebead947d18b09a5 | 2,494 | py | Python | dayong/components/event_component.py | ooliver1/Dayong | 0923e0ff2a03157a51f7fae9c6056afd812c314c | [
"MIT"
] | 1 | 2021-11-04T13:20:31.000Z | 2021-11-04T13:20:31.000Z | dayong/components/event_component.py | ooliver1/Dayong | 0923e0ff2a03157a51f7fae9c6056afd812c314c | [
"MIT"
] | null | null | null | dayong/components/event_component.py | ooliver1/Dayong | 0923e0ff2a03157a51f7fae9c6056afd812c314c | [
"MIT"
] | null | null | null | """
dayong.components.event_component
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Organization of events and event listeners.
"""
from typing import Optional
import hikari
import tanjun
from dayong.configs import DayongConfig
component = tanjun.Component()
@component.with_listener(hikari.MemberCreateEvent)
async def greet_new_member(
event: hikari.MemberCreateEvent,
config: DayongConfig = tanjun.injected(type=DayongConfig),
) -> None:
"""Welcome new guild members.
This will dynamically search for welcome channels, sort the channels by name length
and send a greeting to the channel with the shortest name.
Args:
event (hikari.MemberCreateEvent): Instance of `hikari.MemberCreateEvent`. This
is a registered type dependency and is injected by the client.
"""
embeddings = config.embeddings["new_member_greetings"]
wc_channels: list[str] = []
wc_channel: Optional[hikari.TextableChannel] = None
channels = await event.app.rest.fetch_guild_channels(event.guild_id)
# Collect welcome channels.
for channel in channels:
if channel.name is not None and "welcome" in channel.name:
wc_channels.append(channel.name)
if wc_channels:
wc_channels.sort(key=len)
for channel in channels:
if wc_channels[0] == channel.name:
wc_channel = (
wch
if isinstance(
(wch := await event.app.rest.fetch_channel(channel.id)),
hikari.TextableChannel,
)
else None
)
if wc_channel is not None and isinstance(embeddings, dict):
embed = hikari.Embed(
description=embeddings["description"].format(
hikari.OwnGuild.name,
event.member.id,
embeddings["readme_channel_id"],
),
color=embeddings["color"],
)
for info in range(len(embeddings["greetings_field"])):
inner_dict = embeddings["greetings_field"][info]
embed.add_field(
name=inner_dict["name"],
value=inner_dict["value"],
inline=True,
)
await wc_channel.send(embed)
@tanjun.as_loader
def load_examples(client: tanjun.Client) -> None:
"""The loader for this component.
Args:
client (tanjun.Client): The client instance that will load this module.
"""
client.add_component(component.copy())
| 30.048193 | 87 | 0.623897 | from typing import Optional
import hikari
import tanjun
from dayong.configs import DayongConfig
component = tanjun.Component()
@component.with_listener(hikari.MemberCreateEvent)
async def greet_new_member(
event: hikari.MemberCreateEvent,
config: DayongConfig = tanjun.injected(type=DayongConfig),
) -> None:
embeddings = config.embeddings["new_member_greetings"]
wc_channels: list[str] = []
wc_channel: Optional[hikari.TextableChannel] = None
channels = await event.app.rest.fetch_guild_channels(event.guild_id)
for channel in channels:
if channel.name is not None and "welcome" in channel.name:
wc_channels.append(channel.name)
if wc_channels:
wc_channels.sort(key=len)
for channel in channels:
if wc_channels[0] == channel.name:
wc_channel = (
wch
if isinstance(
(wch := await event.app.rest.fetch_channel(channel.id)),
hikari.TextableChannel,
)
else None
)
if wc_channel is not None and isinstance(embeddings, dict):
embed = hikari.Embed(
description=embeddings["description"].format(
hikari.OwnGuild.name,
event.member.id,
embeddings["readme_channel_id"],
),
color=embeddings["color"],
)
for info in range(len(embeddings["greetings_field"])):
inner_dict = embeddings["greetings_field"][info]
embed.add_field(
name=inner_dict["name"],
value=inner_dict["value"],
inline=True,
)
await wc_channel.send(embed)
@tanjun.as_loader
def load_examples(client: tanjun.Client) -> None:
client.add_component(component.copy())
| true | true |
f72be60e09156efc269b07d9431ebb692d7f5e1c | 683 | py | Python | src/test_SimulationState.py | pawel00100/Disease-Spread-Simulation | eff8b824a9205f61a4d70cd6c7613a9a786bd1eb | [
"MIT"
] | null | null | null | src/test_SimulationState.py | pawel00100/Disease-Spread-Simulation | eff8b824a9205f61a4d70cd6c7613a9a786bd1eb | [
"MIT"
] | null | null | null | src/test_SimulationState.py | pawel00100/Disease-Spread-Simulation | eff8b824a9205f61a4d70cd6c7613a9a786bd1eb | [
"MIT"
] | null | null | null | from unittest import TestCase
from Person import Person
from SimulationState import SimulationState, MapPosition
from src.Map import Map
class TestSimulationState(TestCase):
def test_find_neighbors(self):
map = Map(200, 200)
p0 = Person(MapPosition(0, 0, map), map)
p1 = Person(MapPosition(1, 0, map), map)
p2 = Person(MapPosition(0, 100, map), map)
people = {p0: MapPosition(0, 0, map), p1: MapPosition(1, 0, map), p2: MapPosition(0, 100, map)}
simulation_state = SimulationState(people)
neighbors = simulation_state.find_neighbors(p0)
self.assert_(p1 in neighbors)
self.assert_(p2 not in neighbors)
| 31.045455 | 103 | 0.676428 | from unittest import TestCase
from Person import Person
from SimulationState import SimulationState, MapPosition
from src.Map import Map
class TestSimulationState(TestCase):
def test_find_neighbors(self):
map = Map(200, 200)
p0 = Person(MapPosition(0, 0, map), map)
p1 = Person(MapPosition(1, 0, map), map)
p2 = Person(MapPosition(0, 100, map), map)
people = {p0: MapPosition(0, 0, map), p1: MapPosition(1, 0, map), p2: MapPosition(0, 100, map)}
simulation_state = SimulationState(people)
neighbors = simulation_state.find_neighbors(p0)
self.assert_(p1 in neighbors)
self.assert_(p2 not in neighbors)
| true | true |
f72be711d81d043cbb81c0c2790bd4ca458ffcbf | 3,130 | py | Python | tests/test_types.py | acarrasco/dacite | ece070cc3c25e86634086db8ee4f2e45bdfe6fe5 | [
"MIT"
] | null | null | null | tests/test_types.py | acarrasco/dacite | ece070cc3c25e86634086db8ee4f2e45bdfe6fe5 | [
"MIT"
] | 1 | 2019-03-20T17:30:34.000Z | 2019-03-20T17:30:34.000Z | tests/test_types.py | acarrasco/dacite | ece070cc3c25e86634086db8ee4f2e45bdfe6fe5 | [
"MIT"
] | null | null | null | from typing import Optional, Union, List, Any, Dict, NewType, TypeVar, Generic
import pytest
from dacite.types import (
is_optional,
extract_optional,
is_generic,
is_union,
is_generic_collection,
extract_origin_collection,
is_instance,
cast_value,
extract_generic,
is_new_type,
)
def test_is_union_with_union():
assert is_union(Union[int, float])
def test_is_union_with_non_union():
assert not is_union(int)
def test_is_optional_with_optional():
assert is_optional(Optional[int])
def test_is_optional_with_non_optional():
assert not is_optional(int)
def test_is_optional_with_optional_of_union():
assert is_optional(Optional[Union[int, float]])
def test_extract_optional():
assert extract_optional(Optional[int]) == int
def test_extract_optional_with_wrong_type():
with pytest.raises(ValueError):
extract_optional(List[None])
def test_is_generic_with_generic():
assert is_generic(Optional[int])
def test_is_generic_with_non_generic():
assert not is_generic(int)
def test_is_generic_collection_with_generic_collection():
assert is_generic_collection(List[int])
def test_is_generic_collection_with_non_generic_collection():
assert not is_generic_collection(list)
def test_extract_generic_collection():
assert extract_origin_collection(List[int]) == list
def test_is_new_type_with_new_type():
assert is_new_type(NewType("NewType", int))
def test_is_new_type_with_non_new_type():
assert not is_new_type(int)
def test_is_instance_with_built_in_type_and_matching_value_type():
assert is_instance(1, int)
def test_is_instance_with_built_in_type_and_not_matching_value_type():
assert not is_instance("test", int)
def test_is_instance_with_union_and_matching_value_type():
assert is_instance(1, Union[int, float])
def test_is_instance_with_union_and_not_matching_value_type():
assert not is_instance("test", Union[int, float])
def test_is_instance_with_generic_collection_and_matching_value_type():
assert is_instance([1], List[int])
def test_is_instance_with_generic_collection_and_not_matching_value_type():
assert not is_instance({1}, List[int])
def test_is_instance_with_any_type():
assert is_instance(1, Any)
def test_is_instance_with_new_type_and_matching_value_type():
assert is_instance("test", NewType("MyStr", str))
def test_is_instance_with_new_type_and_not_matching_value_type():
assert not is_instance(1, NewType("MyStr", str))
def test_is_instance_with_not_supported_generic_types():
T = TypeVar("T")
class X(Generic[T]):
pass
assert not is_instance(X[str](), X[str])
def test_cast_value_with_built_in_type():
assert cast_value(int, "1") == 1
def test_cast_value_with_optional():
assert cast_value(Optional[int], "1") == 1
def test_cast_value_with_generic_sequence():
assert cast_value(List[int], ["1"]) == [1]
def test_cast_value_with_generic_mapping():
assert cast_value(Dict[str, int], {1: "1"}) == {"1": 1}
def test_extract_generic():
assert extract_generic(List[int]) == (int,)
| 22.517986 | 78 | 0.763259 | from typing import Optional, Union, List, Any, Dict, NewType, TypeVar, Generic
import pytest
from dacite.types import (
is_optional,
extract_optional,
is_generic,
is_union,
is_generic_collection,
extract_origin_collection,
is_instance,
cast_value,
extract_generic,
is_new_type,
)
def test_is_union_with_union():
assert is_union(Union[int, float])
def test_is_union_with_non_union():
assert not is_union(int)
def test_is_optional_with_optional():
assert is_optional(Optional[int])
def test_is_optional_with_non_optional():
assert not is_optional(int)
def test_is_optional_with_optional_of_union():
assert is_optional(Optional[Union[int, float]])
def test_extract_optional():
assert extract_optional(Optional[int]) == int
def test_extract_optional_with_wrong_type():
with pytest.raises(ValueError):
extract_optional(List[None])
def test_is_generic_with_generic():
assert is_generic(Optional[int])
def test_is_generic_with_non_generic():
assert not is_generic(int)
def test_is_generic_collection_with_generic_collection():
assert is_generic_collection(List[int])
def test_is_generic_collection_with_non_generic_collection():
assert not is_generic_collection(list)
def test_extract_generic_collection():
assert extract_origin_collection(List[int]) == list
def test_is_new_type_with_new_type():
assert is_new_type(NewType("NewType", int))
def test_is_new_type_with_non_new_type():
assert not is_new_type(int)
def test_is_instance_with_built_in_type_and_matching_value_type():
assert is_instance(1, int)
def test_is_instance_with_built_in_type_and_not_matching_value_type():
assert not is_instance("test", int)
def test_is_instance_with_union_and_matching_value_type():
assert is_instance(1, Union[int, float])
def test_is_instance_with_union_and_not_matching_value_type():
assert not is_instance("test", Union[int, float])
def test_is_instance_with_generic_collection_and_matching_value_type():
assert is_instance([1], List[int])
def test_is_instance_with_generic_collection_and_not_matching_value_type():
assert not is_instance({1}, List[int])
def test_is_instance_with_any_type():
assert is_instance(1, Any)
def test_is_instance_with_new_type_and_matching_value_type():
assert is_instance("test", NewType("MyStr", str))
def test_is_instance_with_new_type_and_not_matching_value_type():
assert not is_instance(1, NewType("MyStr", str))
def test_is_instance_with_not_supported_generic_types():
T = TypeVar("T")
class X(Generic[T]):
pass
assert not is_instance(X[str](), X[str])
def test_cast_value_with_built_in_type():
assert cast_value(int, "1") == 1
def test_cast_value_with_optional():
assert cast_value(Optional[int], "1") == 1
def test_cast_value_with_generic_sequence():
assert cast_value(List[int], ["1"]) == [1]
def test_cast_value_with_generic_mapping():
assert cast_value(Dict[str, int], {1: "1"}) == {"1": 1}
def test_extract_generic():
assert extract_generic(List[int]) == (int,)
| true | true |
f72be79039a8d75fd79fb65024ecf80bebc4577e | 3,508 | py | Python | p40-49/p41.py | kbrose/project_euler | f582ef1887f44628997e05d88253adad0822d6b9 | [
"Unlicense"
] | 1 | 2015-10-11T15:53:00.000Z | 2015-10-11T15:53:00.000Z | p40-49/p41.py | kbrose/project_euler | f582ef1887f44628997e05d88253adad0822d6b9 | [
"Unlicense"
] | null | null | null | p40-49/p41.py | kbrose/project_euler | f582ef1887f44628997e05d88253adad0822d6b9 | [
"Unlicense"
] | null | null | null | from array import *
import itertools
def isprime(n):
for i in xrange(2,int(n**.5)+1):
if not (n % i):
return 0
return 1
digits = {0:'0',1:'1',2:'2',3:'3',4:'4',5:'5',6:'6',7:'7',8:'8',9:'9'}
# if the sum of the digits of a number n is divisible by three, then so is n
# due to this, only 1, 4, and 7 digit pandigitals can be prime
def FindPrimePan():
for a in xrange(7,0,-1):
num1 = digits[a]
digits[a] = 0
for b in xrange(7,0,-1):
if not digits[b]:
continue
num2 = num1 + digits[b]
digits[b] = 0
for c in xrange(7,0,-1):
if not digits[c]:
continue
num3 = num2 + digits[c]
digits[c] = 0
for d in xrange(7,0,-1):
if not digits[d]:
continue
num4 = num3 + digits[d]
digits[d] = 0
for e in xrange(7,0,-1):
if not digits[e]:
continue
num5 = num4 + digits[e]
digits[e] = 0
for f in xrange(7,0,-1):
if not digits[f]:
continue
num6 = num5 + digits[f]
digits[f] = 0
for i in xrange(7,0,-1):
if not digits[i]:
continue
num7 = num6 + digits[i]
#print num7
if isprime(int(num7)):
print "PRIME:",num7
return
if not f == 1:
digits[f] = str(f)
if f == 1:
digits[f] = str(f)
if not e == 1:
digits[e] = str(e)
if not e == 1:
digits[e] = str(e)
if e == 1:
digits[e] = str(e)
if not d == 1:
digits[d] = str(d)
if not d == 1:
digits[d] = str(d)
if d == 1:
digits[d] = str(d)
if not c == 1:
digits[c] = str(c)
if not c == 1:
digits[c] = str(c)
if c == 1:
digits[c] = str(c)
if not b == 1:
digits[b] = str(b)
if not b == 1:
digits[b] = str(b)
if b == 1:
digits[b] = str(b)
if not a == 1:
digits[a] = str(a)
if not a == 1:
digits[a] = str(a)
FindPrimePan()
#print list(itertools.permutations([7,6,5,4,3,2,1], 7))
def NicerVersion():
pandigits7 = list(itertools.permutations([7,6,5,4,3,2,1],7))
for i in xrange(0,len(pandigits7)):
num = 0
for j in xrange(0,7):
num = num + pandigits7[i][6-j]*(10**(j))
if isprime(num):
print "PRIME:",num
break
NicerVersion()
| 34.732673 | 76 | 0.322406 | from array import *
import itertools
def isprime(n):
for i in xrange(2,int(n**.5)+1):
if not (n % i):
return 0
return 1
digits = {0:'0',1:'1',2:'2',3:'3',4:'4',5:'5',6:'6',7:'7',8:'8',9:'9'}
def FindPrimePan():
for a in xrange(7,0,-1):
num1 = digits[a]
digits[a] = 0
for b in xrange(7,0,-1):
if not digits[b]:
continue
num2 = num1 + digits[b]
digits[b] = 0
for c in xrange(7,0,-1):
if not digits[c]:
continue
num3 = num2 + digits[c]
digits[c] = 0
for d in xrange(7,0,-1):
if not digits[d]:
continue
num4 = num3 + digits[d]
digits[d] = 0
for e in xrange(7,0,-1):
if not digits[e]:
continue
num5 = num4 + digits[e]
digits[e] = 0
for f in xrange(7,0,-1):
if not digits[f]:
continue
num6 = num5 + digits[f]
digits[f] = 0
for i in xrange(7,0,-1):
if not digits[i]:
continue
num7 = num6 + digits[i]
if isprime(int(num7)):
print "PRIME:",num7
return
if not f == 1:
digits[f] = str(f)
if f == 1:
digits[f] = str(f)
if not e == 1:
digits[e] = str(e)
if not e == 1:
digits[e] = str(e)
if e == 1:
digits[e] = str(e)
if not d == 1:
digits[d] = str(d)
if not d == 1:
digits[d] = str(d)
if d == 1:
digits[d] = str(d)
if not c == 1:
digits[c] = str(c)
if not c == 1:
digits[c] = str(c)
if c == 1:
digits[c] = str(c)
if not b == 1:
digits[b] = str(b)
if not b == 1:
digits[b] = str(b)
if b == 1:
digits[b] = str(b)
if not a == 1:
digits[a] = str(a)
if not a == 1:
digits[a] = str(a)
FindPrimePan()
def NicerVersion():
pandigits7 = list(itertools.permutations([7,6,5,4,3,2,1],7))
for i in xrange(0,len(pandigits7)):
num = 0
for j in xrange(0,7):
num = num + pandigits7[i][6-j]*(10**(j))
if isprime(num):
print "PRIME:",num
break
NicerVersion()
| false | true |
f72be79b665283eb1a3552b5a050e1fc7b15727e | 2,877 | py | Python | mms/context.py | andrewfayres/mxnet-model-server | ef4edfef4cfe5234887bf834ec7b82676a36ba02 | [
"Apache-2.0"
] | 1 | 2019-01-30T02:57:31.000Z | 2019-01-30T02:57:31.000Z | mms/context.py | DrSnowbird/mxnet-model-server | a0bfd712350545dceb21c8e0b0b21dfa0c9918a7 | [
"Apache-2.0"
] | null | null | null | mms/context.py | DrSnowbird/mxnet-model-server | a0bfd712350545dceb21c8e0b0b21dfa0c9918a7 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
# http://www.apache.org/licenses/LICENSE-2.0
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""
Context object of incoming request
"""
class Context(object):
"""
Context stores model relevant worker information
Some fixed during load times and some
"""
def __init__(self, model_name, model_dir, manifest, batch_size, gpu, mms_version):
self.model_name = model_name
self.manifest = manifest
self._system_properties = {
"model_dir": model_dir,
"gpu_id": gpu,
"batch_size": batch_size,
"server_name": "MMS",
"server_version": mms_version
}
self.request_ids = None
self.request_processor = RequestProcessor(dict())
self._metrics = None
@property
def system_properties(self):
return self._system_properties
@property
def request_processor(self):
return self._request_processor
@request_processor.setter
def request_processor(self, request_processor):
self._request_processor = request_processor
@property
def metrics(self):
return self._metrics
@metrics.setter
def metrics(self, metrics):
self._metrics = metrics
def set_response_content_type(self, request_id, value):
self._request_processor.add_response_property(request_id, {'content-type': value})
def get_response_content_type(self, request_id):
response_headers = self._request_processor.get_response_header().get(request_id)
if response_headers is not None:
return response_headers.get('content-type')
return None
def __eq__(self, other):
return isinstance(other, Context) and self.__dict__ == other.__dict__
class RequestProcessor(object):
"""
Request processor
"""
def __init__(self, request_header):
self._status_code = 200
self._reason_phrase = None
self._response_header = {}
self._request_header = request_header
def get_request_property(self, key):
return self._request_header.get(key)
def report_status(self, code, reason_phrase=None):
self._status_code = code
self._reason_phrase = reason_phrase
def add_response_property(self, key, value):
self._response_header[key] = value
def get_response_header(self):
return self._response_header
| 31.271739 | 90 | 0.686827 |
class Context(object):
def __init__(self, model_name, model_dir, manifest, batch_size, gpu, mms_version):
self.model_name = model_name
self.manifest = manifest
self._system_properties = {
"model_dir": model_dir,
"gpu_id": gpu,
"batch_size": batch_size,
"server_name": "MMS",
"server_version": mms_version
}
self.request_ids = None
self.request_processor = RequestProcessor(dict())
self._metrics = None
@property
def system_properties(self):
return self._system_properties
@property
def request_processor(self):
return self._request_processor
@request_processor.setter
def request_processor(self, request_processor):
self._request_processor = request_processor
@property
def metrics(self):
return self._metrics
@metrics.setter
def metrics(self, metrics):
self._metrics = metrics
def set_response_content_type(self, request_id, value):
self._request_processor.add_response_property(request_id, {'content-type': value})
def get_response_content_type(self, request_id):
response_headers = self._request_processor.get_response_header().get(request_id)
if response_headers is not None:
return response_headers.get('content-type')
return None
def __eq__(self, other):
return isinstance(other, Context) and self.__dict__ == other.__dict__
class RequestProcessor(object):
def __init__(self, request_header):
self._status_code = 200
self._reason_phrase = None
self._response_header = {}
self._request_header = request_header
def get_request_property(self, key):
return self._request_header.get(key)
def report_status(self, code, reason_phrase=None):
self._status_code = code
self._reason_phrase = reason_phrase
def add_response_property(self, key, value):
self._response_header[key] = value
def get_response_header(self):
return self._response_header
| true | true |
f72be81e9dc0fedf810a47adb40ea90cbd01bd08 | 7,766 | py | Python | xos/core/dashboard/views/home.py | pan2za/xos | c2a4da2ccaa12360b2718be303b247866aefdfe6 | [
"Apache-2.0"
] | null | null | null | xos/core/dashboard/views/home.py | pan2za/xos | c2a4da2ccaa12360b2718be303b247866aefdfe6 | [
"Apache-2.0"
] | 5 | 2020-06-05T17:47:15.000Z | 2021-09-23T23:21:27.000Z | xos/core/dashboard/views/home.py | pan2za/xos | c2a4da2ccaa12360b2718be303b247866aefdfe6 | [
"Apache-2.0"
] | null | null | null | from view_common import *
from django.http import HttpResponseRedirect
import sys
def isInt(s):
try:
int(s)
return True
except ValueError:
return False
class LoggedInView(TemplateView):
def get(self, request, name="root", *args, **kwargs):
if request.user.login_page:
return HttpResponseRedirect(request.user.login_page)
else:
return HttpResponseRedirect("/admin/")
class DashboardDynamicView(TemplateView):
head_template = r"""{% extends "admin/dashboard/dashboard_base.html" %}
{% load admin_static %}
{% block content %}
"""
head_wholePage_template = r"""{% extends "admin/wholePage.html" %}
{% load admin_static %}
{% block content %}
"""
tail_template = r"{% endblock %}"
def get(self, request, name="root", *args, **kwargs):
context = self.get_context_data(**kwargs)
context = getDashboardContext(request.user, context)
if name == "root":
# maybe it is a bit hacky, didn't want to mess up everything @teone
user_dashboards = request.user.get_dashboards()
first_dasboard_name = user_dashboards[0].id
return self.singleDashboardView(request, first_dasboard_name, context)
# return self.multiDashboardView(request, context)
elif kwargs.get("wholePage", None):
return self.singleFullView(request, name, context)
else:
return self.singleDashboardView(request, name, context)
def readTemplate(self, fn):
TEMPLATE_DIRS = [XOS_DIR + "/templates/admin/dashboard/",
XOS_DIR + "/core/xoslib/dashboards/"]
for template_dir in TEMPLATE_DIRS:
pathname = os.path.join(template_dir, fn) + ".html"
if os.path.exists(pathname):
break
else:
return "failed to find %s in %s" % (fn, TEMPLATE_DIRS)
template = open(pathname, "r").read()
if (fn == "tenant"):
# fix for tenant view - it writes html to a div called tabs-5
template = '<div id="tabs-5"></div>' + template
return template
def embedDashboardUrl(self, url):
if url.startswith("template:"):
fn = url[9:]
return self.readTemplate(fn)
elif url.startswith("http"):
return '<iframe src="%s" width="100%%" height="100%%" style="min-height: 1024px;" frameBorder="0"></iframe>' % url
else:
return "don't know how to load dashboard %s" % url
def embedDashboardView(self, view, i=0):
body = ""
url = view.url
if (view.controllers.all().count() > 0):
body = body + 'Controller: <select id="dashselect-%d">' % i
body = body + '<option value="None">(select a controller)</option>'
for j, controllerdashboard in enumerate(view.controllerdashboardviews.all()):
body = body + '<option value="%d">%s</option>' % (j, controllerdashboard.controller.name)
body = body + '</select><hr>'
for j, controllerdashboard in enumerate(view.controllerdashboardviews.all()):
body = body + '<script type="text/template" id="dashtemplate-%d-%d">\n%s\n</script>\n' % (i,j, self.embedDashboardUrl(controllerdashboard.url));
body = body + '<div id="dashcontent-%d" class="dashcontent"></div>\n' % i
body = body + """<script>
$("#dashselect-%d").change(function() {
v=$("#dashselect-%d").val();
if (v=="None") {
$("#dashcontent-%d").html("");
return;
}
$("#dashcontent-%d").html( $("#dashtemplate-%d-" + v).html() );
});
//$("#dashcontent-%d").html( $("#dashtemplate-%d-0").html() );
</script>
""" % (i, i, i, i, i, i, i)
else:
body = body + self.embedDashboardUrl(url)
return body
def multiDashboardView(self, request, context):
head_template = self.head_template
tail_template = self.tail_template
dashboards = request.user.get_dashboards()
if not request.user.is_appuser:
# customize is a special dashboard they always get
customize = DashboardView.objects.filter(name="Customize")
if customize:
dashboards.append(customize[0])
tabs = []
bodies = []
i = 0
for view in dashboards:
# don't display disabled dashboards
if (not view.enabled):
continue
tabs.append('<li><a href="#dashtab-%d">%s</a></li>\n' % (i, view.name))
body = '<div id="dashtab-%d">%s</div>\n' % (i, self.embedDashboardView(view, i))
bodies.append(body)
i = i + 1
# embed content provider dashboards
for cp in ContentProvider.objects.all():
if request.user in cp.users.all():
tabs.append('<li><a href="#dashtab-%d">%s</a></li>\n' % (i, cp.name))
body = ""
body = body + '<div id="dashtab-%d">\n' % i
body = body + self.embedDashboardUrl("http:/admin/hpc/contentprovider/%s/%s/embeddedfilteredchange" % (cp.serviceProvider.hpcService.id, cp.id))
body = body + '</div>\n'
bodies.append(body)
i = i + 1
if (len(tabs) == 1) and (len(bodies) == 1):
# there is only one dashboard, so optimize out the tabbing
contents = bodies[0]
else:
contents = """
<div id="hometabs" >
<ul id="suit_form_tabs" class="nav nav-tabs nav-tabs-suit" data-tab-prefix="suit-tab">
%s
</ul>
%s
</div>
""" % ("\n".join(tabs), "\n".join(bodies))
t = template.Template(head_template + contents + self.tail_template)
response_kwargs = {}
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=request,
template=t,
context=context,
**response_kwargs)
def singleDashboardView(self, request, id, context):
head_template = self.head_template
tail_template = self.tail_template
# if id is a number, load by datamodel,
# else look directly for the template
if(isInt(id)):
view = DashboardView.objects.get(id=id)
t = template.Template(head_template + self.embedDashboardView(view) + self.tail_template)
else:
t = template.Template(head_template + self.readTemplate("xos" + id) + self.tail_template)
response_kwargs = {}
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=request,
template=t,
context=context,
**response_kwargs)
def singleFullView(self, request, id, context):
head_template = self.head_wholePage_template
tail_template = self.tail_template
view = DashboardView.objects.get(id=id)
t = template.Template(head_template + self.embedDashboardView(view) + self.tail_template)
response_kwargs = {}
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=request,
template=t,
context=context,
**response_kwargs)
| 37.882927 | 160 | 0.550992 | from view_common import *
from django.http import HttpResponseRedirect
import sys
def isInt(s):
try:
int(s)
return True
except ValueError:
return False
class LoggedInView(TemplateView):
def get(self, request, name="root", *args, **kwargs):
if request.user.login_page:
return HttpResponseRedirect(request.user.login_page)
else:
return HttpResponseRedirect("/admin/")
class DashboardDynamicView(TemplateView):
head_template = r"""{% extends "admin/dashboard/dashboard_base.html" %}
{% load admin_static %}
{% block content %}
"""
head_wholePage_template = r"""{% extends "admin/wholePage.html" %}
{% load admin_static %}
{% block content %}
"""
tail_template = r"{% endblock %}"
def get(self, request, name="root", *args, **kwargs):
context = self.get_context_data(**kwargs)
context = getDashboardContext(request.user, context)
if name == "root":
user_dashboards = request.user.get_dashboards()
first_dasboard_name = user_dashboards[0].id
return self.singleDashboardView(request, first_dasboard_name, context)
# return self.multiDashboardView(request, context)
elif kwargs.get("wholePage", None):
return self.singleFullView(request, name, context)
else:
return self.singleDashboardView(request, name, context)
def readTemplate(self, fn):
TEMPLATE_DIRS = [XOS_DIR + "/templates/admin/dashboard/",
XOS_DIR + "/core/xoslib/dashboards/"]
for template_dir in TEMPLATE_DIRS:
pathname = os.path.join(template_dir, fn) + ".html"
if os.path.exists(pathname):
break
else:
return "failed to find %s in %s" % (fn, TEMPLATE_DIRS)
template = open(pathname, "r").read()
if (fn == "tenant"):
# fix for tenant view - it writes html to a div called tabs-5
template = '<div id="tabs-5"></div>' + template
return template
def embedDashboardUrl(self, url):
if url.startswith("template:"):
fn = url[9:]
return self.readTemplate(fn)
elif url.startswith("http"):
return '<iframe src="%s" width="100%%" height="100%%" style="min-height: 1024px;" frameBorder="0"></iframe>' % url
else:
return "don't know how to load dashboard %s" % url
def embedDashboardView(self, view, i=0):
body = ""
url = view.url
if (view.controllers.all().count() > 0):
body = body + 'Controller: <select id="dashselect-%d">' % i
body = body + '<option value="None">(select a controller)</option>'
for j, controllerdashboard in enumerate(view.controllerdashboardviews.all()):
body = body + '<option value="%d">%s</option>' % (j, controllerdashboard.controller.name)
body = body + '</select><hr>'
for j, controllerdashboard in enumerate(view.controllerdashboardviews.all()):
body = body + '<script type="text/template" id="dashtemplate-%d-%d">\n%s\n</script>\n' % (i,j, self.embedDashboardUrl(controllerdashboard.url));
body = body + '<div id="dashcontent-%d" class="dashcontent"></div>\n' % i
body = body + """<script>
$("#dashselect-%d").change(function() {
v=$("#dashselect-%d").val();
if (v=="None") {
$("#dashcontent-%d").html("");
return;
}
$("#dashcontent-%d").html( $("#dashtemplate-%d-" + v).html() );
});
//$("#dashcontent-%d").html( $("#dashtemplate-%d-0").html() );
</script>
""" % (i, i, i, i, i, i, i)
else:
body = body + self.embedDashboardUrl(url)
return body
def multiDashboardView(self, request, context):
head_template = self.head_template
tail_template = self.tail_template
dashboards = request.user.get_dashboards()
if not request.user.is_appuser:
customize = DashboardView.objects.filter(name="Customize")
if customize:
dashboards.append(customize[0])
tabs = []
bodies = []
i = 0
for view in dashboards:
if (not view.enabled):
continue
tabs.append('<li><a href="#dashtab-%d">%s</a></li>\n' % (i, view.name))
body = '<div id="dashtab-%d">%s</div>\n' % (i, self.embedDashboardView(view, i))
bodies.append(body)
i = i + 1
# embed content provider dashboards
for cp in ContentProvider.objects.all():
if request.user in cp.users.all():
tabs.append('<li><a href="#dashtab-%d">%s</a></li>\n' % (i, cp.name))
body = ""
body = body + '<div id="dashtab-%d">\n' % i
body = body + self.embedDashboardUrl("http:/admin/hpc/contentprovider/%s/%s/embeddedfilteredchange" % (cp.serviceProvider.hpcService.id, cp.id))
body = body + '</div>\n'
bodies.append(body)
i = i + 1
if (len(tabs) == 1) and (len(bodies) == 1):
# there is only one dashboard, so optimize out the tabbing
contents = bodies[0]
else:
contents = """
<div id="hometabs" >
<ul id="suit_form_tabs" class="nav nav-tabs nav-tabs-suit" data-tab-prefix="suit-tab">
%s
</ul>
%s
</div>
""" % ("\n".join(tabs), "\n".join(bodies))
t = template.Template(head_template + contents + self.tail_template)
response_kwargs = {}
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=request,
template=t,
context=context,
**response_kwargs)
def singleDashboardView(self, request, id, context):
head_template = self.head_template
tail_template = self.tail_template
# if id is a number, load by datamodel,
# else look directly for the template
if(isInt(id)):
view = DashboardView.objects.get(id=id)
t = template.Template(head_template + self.embedDashboardView(view) + self.tail_template)
else:
t = template.Template(head_template + self.readTemplate("xos" + id) + self.tail_template)
response_kwargs = {}
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=request,
template=t,
context=context,
**response_kwargs)
def singleFullView(self, request, id, context):
head_template = self.head_wholePage_template
tail_template = self.tail_template
view = DashboardView.objects.get(id=id)
t = template.Template(head_template + self.embedDashboardView(view) + self.tail_template)
response_kwargs = {}
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=request,
template=t,
context=context,
**response_kwargs)
| true | true |
f72be85a54f072db1fb3df347bed8b5574b5043c | 3,782 | py | Python | fn_task_utils/fn_task_utils/components/task_utils_close_task.py | nickpartner-goahead/resilient-community-apps | 097c0dbefddbd221b31149d82af9809420498134 | [
"MIT"
] | 65 | 2017-12-04T13:58:32.000Z | 2022-03-24T18:33:17.000Z | fn_task_utils/fn_task_utils/components/task_utils_close_task.py | nickpartner-goahead/resilient-community-apps | 097c0dbefddbd221b31149d82af9809420498134 | [
"MIT"
] | 48 | 2018-03-02T19:17:14.000Z | 2022-03-09T22:00:38.000Z | fn_task_utils/fn_task_utils/components/task_utils_close_task.py | nickpartner-goahead/resilient-community-apps | 097c0dbefddbd221b31149d82af9809420498134 | [
"MIT"
] | 95 | 2018-01-11T16:23:39.000Z | 2022-03-21T11:34:29.000Z | # -*- coding: utf-8 -*-
# Copyright © IBM Corporation 2010, 2019
# pragma pylint: disable=unused-argument, no-self-use
"""Function implementation"""
import logging
from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError
from resilient_lib import ResultPayload
from fn_task_utils.lib.task_common import find_task_by_name, get_function_input
class FunctionComponent(ResilientComponent):
"""Component that implements Resilient function 'task_utils_close_task"""
def __init__(self, opts):
"""constructor provides access to the configuration options"""
super(FunctionComponent, self).__init__(opts)
self.options = opts.get("fn_task_utils", {})
@handler("reload")
def _reload(self, event, opts):
"""Configuration options have changed, save new values"""
self.options = opts.get("fn_task_utils", {})
@function("task_utils_close_task")
def _task_utils_close_task_function(self, event, *args, **kwargs):
"""Function: A function which will attempt to close either a System or Custom task using the REST API."""
def close_task_status(task):
"""
A inner function which is used as a lambda
Get_put from the res_client gets our data and this lambda decides what to do with the data
The return value of this lambda is then sent to Resilient as a PUT.
:param task:
:return task:
"""
task["status"] = "C"
log.debug("Changed status to closed for task with name %s" % task["name"])
return task
try:
payload = ResultPayload("task_utils_close_task", **kwargs)
# Get the function parameters:
incident_id = get_function_input(kwargs, "incident_id") # number
task_id = get_function_input(kwargs, "task_id", optional=True) # number
task_name = get_function_input(kwargs, "task_name", optional=True) # text
log = logging.getLogger(__name__)
log.info("incident_id: %s", incident_id)
log.info("task_id: %s", task_id)
log.info("task_name: %s", task_name)
res_client = self.rest_client()
if not task_name and not task_id:
raise ValueError("Either a Task ID or a Task Name to search for must be provided.")
if task_id:
log.debug("Task ID was provided, using this to contact REST API")
else:
if task_name:
yield StatusMessage(
u"task_name was provided; Searching incident {} for first matching task with name '{}'".format(
incident_id, task_name))
task_id = find_task_by_name(res_client, incident_id, task_name)
if not task_id:
raise ValueError(u"Could not find task with name {}".format(task_name))
task_url = "/tasks/{}".format(task_id)
try:
res_client.get_put(task_url, lambda task: close_task_status(task))
except Exception as close_exception:
err_msg = "Encountered exception while trying to close task. Error: {}", close_exception
raise ValueError(err_msg)
yield StatusMessage("Task {} has been closed".format(task_id))
results = payload.done(
success=True,
content={
"task_id": task_id,
"task_name": task_name
}
)
# Produce a FunctionResult with the results
yield FunctionResult(results)
except Exception:
yield FunctionError()
| 41.56044 | 119 | 0.608673 |
import logging
from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError
from resilient_lib import ResultPayload
from fn_task_utils.lib.task_common import find_task_by_name, get_function_input
class FunctionComponent(ResilientComponent):
def __init__(self, opts):
super(FunctionComponent, self).__init__(opts)
self.options = opts.get("fn_task_utils", {})
@handler("reload")
def _reload(self, event, opts):
self.options = opts.get("fn_task_utils", {})
@function("task_utils_close_task")
def _task_utils_close_task_function(self, event, *args, **kwargs):
def close_task_status(task):
task["status"] = "C"
log.debug("Changed status to closed for task with name %s" % task["name"])
return task
try:
payload = ResultPayload("task_utils_close_task", **kwargs)
incident_id = get_function_input(kwargs, "incident_id")
task_id = get_function_input(kwargs, "task_id", optional=True)
task_name = get_function_input(kwargs, "task_name", optional=True)
log = logging.getLogger(__name__)
log.info("incident_id: %s", incident_id)
log.info("task_id: %s", task_id)
log.info("task_name: %s", task_name)
res_client = self.rest_client()
if not task_name and not task_id:
raise ValueError("Either a Task ID or a Task Name to search for must be provided.")
if task_id:
log.debug("Task ID was provided, using this to contact REST API")
else:
if task_name:
yield StatusMessage(
u"task_name was provided; Searching incident {} for first matching task with name '{}'".format(
incident_id, task_name))
task_id = find_task_by_name(res_client, incident_id, task_name)
if not task_id:
raise ValueError(u"Could not find task with name {}".format(task_name))
task_url = "/tasks/{}".format(task_id)
try:
res_client.get_put(task_url, lambda task: close_task_status(task))
except Exception as close_exception:
err_msg = "Encountered exception while trying to close task. Error: {}", close_exception
raise ValueError(err_msg)
yield StatusMessage("Task {} has been closed".format(task_id))
results = payload.done(
success=True,
content={
"task_id": task_id,
"task_name": task_name
}
)
yield FunctionResult(results)
except Exception:
yield FunctionError()
| true | true |
f72be963c795ad4620dda7b2f24b5d80e2f23382 | 8,139 | py | Python | tests/test_templates.py | eon01/wagtail-metadata | 583968b67157ac7944631966919b523674568141 | [
"BSD-3-Clause"
] | null | null | null | tests/test_templates.py | eon01/wagtail-metadata | 583968b67157ac7944631966919b523674568141 | [
"BSD-3-Clause"
] | null | null | null | tests/test_templates.py | eon01/wagtail-metadata | 583968b67157ac7944631966919b523674568141 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from django.forms.utils import flatatt
from django.template import TemplateSyntaxError, engines
from django.test import RequestFactory, TestCase, override_settings
from django.utils.html import format_html
from wagtail.core.models import Site
from wagtail.images.models import Image
from wagtail.images.tests.utils import get_test_image_file
from tests.app.models import TestModel, TestPage
from wagtailmetadata.tags import get_meta_image_url
class TemplateCase(object):
def setUp(self):
self.site = Site.objects.first()
self.site.site_name = 'Example site'
self.site.save()
self.factory = RequestFactory()
self.request = self.factory.get('/test/')
self.request.site = self.site
self.image = Image.objects.create(
title='Test Image',
file=get_test_image_file(),
)
self.page = self.site.root_page.add_child(instance=TestPage(
title='Test Page',
search_image=self.image,
search_description='Some test content description',
))
self.test_model = TestModel.objects.create()
def render(self, string, context=None, request_context=True):
if context is None:
context = {}
# Add a request to the template, to simulate a RequestContext
if request_context:
context['request'] = self.request
template = self.engine.from_string(string)
return template.render(context)
def meta(self, attrs):
return format_html('<meta{0}>'.format(flatatt(attrs)))
def test_twitter_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'twitter:card', 'content': 'summary_large_image',
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:title',
'content': self.page.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_twitter_no_image(self):
self.page.search_image = None
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'twitter:card', 'content': 'summary',
}), out)
self.assertNotIn('twitter:image', out)
def test_og_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'property': 'og:url', 'content': self.page.full_url
}), out)
self.assertInHTML(self.meta({
'property': 'og:title',
'content': self.page.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'property': 'og:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'property': 'og:site_name', 'content': self.site.site_name
}), out)
self.assertInHTML(self.meta({
'property': 'og:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_og_no_image(self):
self.page.search_image = None
out = self.render_meta()
self.assertNotIn('og:image', out)
def test_misc_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'itemprop': 'url', 'content': self.page.full_url
}), out)
self.assertInHTML(self.meta({
'itemprop': 'name',
'content': self.page.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'itemprop': 'description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'itemprop': 'image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_generic_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'description', 'content': self.page.search_description,
}), out)
def test_custom_model(self):
out = self.render_with_model()
self.assertInHTML(self.meta({
'itemprop': 'url',
'content': self.test_model.get_meta_url()
}), out)
self.assertInHTML(self.meta({
'itemprop': 'name',
'content': self.test_model.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'itemprop': 'description',
'content': self.test_model.get_meta_description()
}), out)
def fill_out_page_meta_fields(self):
self.page.search_description = 'Hello, world'
self.page.search_image = Image.objects.create(
title='Page image', file=get_test_image_file())
def test_page_twitter_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'twitter:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_page_og_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML(self.meta({
'property': 'og:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'property': 'og:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_page_misc_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML(self.meta({
'itemprop': 'description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'itemprop': 'image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_page_generic_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML("<title>{}</title>".format(self.page.title), out)
self.assertInHTML(self.meta({
'name': 'description', 'content': self.page.search_description,
}), out)
def test_error_messages(self):
self.assertRaises(TemplateSyntaxError, self.render_with_error)
def test_get_meta_image_url_filter(self):
self.fill_out_page_meta_fields()
result = get_meta_image_url(self.request, self.page.search_image)
self.assertTrue(result.endswith("original.png"))
@override_settings(WAGTAILMETADATA_IMAGE_FILTER="fill-10x20")
def test_get_meta_image_url_filter_with_override(self):
self.fill_out_page_meta_fields()
result = get_meta_image_url(self.request, self.page.search_image)
self.assertTrue(result.endswith("fill-10x20.png"))
class TestJinja2(TemplateCase, TestCase):
engine = engines['jinja2']
def render_meta(self):
return self.render('{{ meta_tags() }}', context={'page': self.page})
def render_with_model(self):
return self.render('{{ meta_tags(custom) }}', context={'custom': self.test_model})
def render_with_error(self):
return self.render('{{ meta_tags(custom) }}', context={'custom': None})
class TestDjangoTemplateEngine(TemplateCase, TestCase):
engine = engines['django']
def render_meta(self):
return self.render('{% load wagtailmetadata_tags %}{% meta_tags %}', context={'self': self.page})
def render_with_model(self):
return self.render('{% load wagtailmetadata_tags %}{% meta_tags custom %}', context={'custom': self.test_model})
def render_with_error(self):
return self.render('{% load wagtailmetadata_tags %}{% meta_tags custom %}', context={'custom': None})
| 34.341772 | 120 | 0.621207 |
from django.forms.utils import flatatt
from django.template import TemplateSyntaxError, engines
from django.test import RequestFactory, TestCase, override_settings
from django.utils.html import format_html
from wagtail.core.models import Site
from wagtail.images.models import Image
from wagtail.images.tests.utils import get_test_image_file
from tests.app.models import TestModel, TestPage
from wagtailmetadata.tags import get_meta_image_url
class TemplateCase(object):
def setUp(self):
self.site = Site.objects.first()
self.site.site_name = 'Example site'
self.site.save()
self.factory = RequestFactory()
self.request = self.factory.get('/test/')
self.request.site = self.site
self.image = Image.objects.create(
title='Test Image',
file=get_test_image_file(),
)
self.page = self.site.root_page.add_child(instance=TestPage(
title='Test Page',
search_image=self.image,
search_description='Some test content description',
))
self.test_model = TestModel.objects.create()
def render(self, string, context=None, request_context=True):
if context is None:
context = {}
if request_context:
context['request'] = self.request
template = self.engine.from_string(string)
return template.render(context)
def meta(self, attrs):
return format_html('<meta{0}>'.format(flatatt(attrs)))
def test_twitter_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'twitter:card', 'content': 'summary_large_image',
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:title',
'content': self.page.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_twitter_no_image(self):
self.page.search_image = None
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'twitter:card', 'content': 'summary',
}), out)
self.assertNotIn('twitter:image', out)
def test_og_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'property': 'og:url', 'content': self.page.full_url
}), out)
self.assertInHTML(self.meta({
'property': 'og:title',
'content': self.page.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'property': 'og:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'property': 'og:site_name', 'content': self.site.site_name
}), out)
self.assertInHTML(self.meta({
'property': 'og:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_og_no_image(self):
self.page.search_image = None
out = self.render_meta()
self.assertNotIn('og:image', out)
def test_misc_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'itemprop': 'url', 'content': self.page.full_url
}), out)
self.assertInHTML(self.meta({
'itemprop': 'name',
'content': self.page.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'itemprop': 'description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'itemprop': 'image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_generic_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'description', 'content': self.page.search_description,
}), out)
def test_custom_model(self):
out = self.render_with_model()
self.assertInHTML(self.meta({
'itemprop': 'url',
'content': self.test_model.get_meta_url()
}), out)
self.assertInHTML(self.meta({
'itemprop': 'name',
'content': self.test_model.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'itemprop': 'description',
'content': self.test_model.get_meta_description()
}), out)
def fill_out_page_meta_fields(self):
self.page.search_description = 'Hello, world'
self.page.search_image = Image.objects.create(
title='Page image', file=get_test_image_file())
def test_page_twitter_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'twitter:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_page_og_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML(self.meta({
'property': 'og:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'property': 'og:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_page_misc_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML(self.meta({
'itemprop': 'description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'itemprop': 'image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_page_generic_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML("<title>{}</title>".format(self.page.title), out)
self.assertInHTML(self.meta({
'name': 'description', 'content': self.page.search_description,
}), out)
def test_error_messages(self):
self.assertRaises(TemplateSyntaxError, self.render_with_error)
def test_get_meta_image_url_filter(self):
self.fill_out_page_meta_fields()
result = get_meta_image_url(self.request, self.page.search_image)
self.assertTrue(result.endswith("original.png"))
@override_settings(WAGTAILMETADATA_IMAGE_FILTER="fill-10x20")
def test_get_meta_image_url_filter_with_override(self):
self.fill_out_page_meta_fields()
result = get_meta_image_url(self.request, self.page.search_image)
self.assertTrue(result.endswith("fill-10x20.png"))
class TestJinja2(TemplateCase, TestCase):
engine = engines['jinja2']
def render_meta(self):
return self.render('{{ meta_tags() }}', context={'page': self.page})
def render_with_model(self):
return self.render('{{ meta_tags(custom) }}', context={'custom': self.test_model})
def render_with_error(self):
return self.render('{{ meta_tags(custom) }}', context={'custom': None})
class TestDjangoTemplateEngine(TemplateCase, TestCase):
engine = engines['django']
def render_meta(self):
return self.render('{% load wagtailmetadata_tags %}{% meta_tags %}', context={'self': self.page})
def render_with_model(self):
return self.render('{% load wagtailmetadata_tags %}{% meta_tags custom %}', context={'custom': self.test_model})
def render_with_error(self):
return self.render('{% load wagtailmetadata_tags %}{% meta_tags custom %}', context={'custom': None})
| true | true |
f72bea5f4fd55e05a650df7818ae1a13dacf5f50 | 1,392 | py | Python | tests/z_value_test.py | daineal/Anomaly-Detection | 437b926c03b2975f87c9e182f47a84c71b4abdb0 | [
"MIT"
] | 1 | 2017-04-30T08:05:14.000Z | 2017-04-30T08:05:14.000Z | tests/z_value_test.py | daineal/BestProject | 437b926c03b2975f87c9e182f47a84c71b4abdb0 | [
"MIT"
] | null | null | null | tests/z_value_test.py | daineal/BestProject | 437b926c03b2975f87c9e182f47a84c71b4abdb0 | [
"MIT"
] | null | null | null | from unittest import TestCase
from apps.algorithms.mean import Mean
from apps.algorithms.standart_deviation import StandartDeviation
from apps.algorithms.z_value import ZValue
__author__ = 'cenk'
class ZValueTest(TestCase):
def setUp(self):
pass
def test_algorithm_with_list(self):
data_list = [1, 2, 3, 4, 5]
standart_deviation = StandartDeviation()
standart_deviation_value = standart_deviation.calculate(data_list)
mean = Mean()
mean_value = mean.calculate(data_list)
print standart_deviation_value, mean_value
z_value = ZValue()
z1 = z_value.calculate(88, mean=100, standart_deviation=10)
z2 = z_value.calculate(112, mean=100, standart_deviation=10)
z3 = z_value.calculate(5, mean=100, standart_deviation=10)
print z1, z2, z3
def test_get_decimals(self):
z_value = ZValue()
z_value.calculate(88, mean=100, standart_deviation=10)
z_value.find_from_table()
def test_algorithm_with_tuple(self):
mean = Mean()
data_list = [("a", 1), ("b", 2), ("c", 3), ( "d", 4), ("e", 5)]
self.assertEquals(3, mean.calculate(data_list, is_tuple=True, index=1))
data_list = [("a", "a", 1), ("b", "b", 2), ("c", "c", 3), ("d", "d", 4), ("e", "e", 5)]
self.assertEquals(3.0, mean.calculate(data_list, is_tuple=True, index=2)) | 35.692308 | 95 | 0.641523 | from unittest import TestCase
from apps.algorithms.mean import Mean
from apps.algorithms.standart_deviation import StandartDeviation
from apps.algorithms.z_value import ZValue
__author__ = 'cenk'
class ZValueTest(TestCase):
def setUp(self):
pass
def test_algorithm_with_list(self):
data_list = [1, 2, 3, 4, 5]
standart_deviation = StandartDeviation()
standart_deviation_value = standart_deviation.calculate(data_list)
mean = Mean()
mean_value = mean.calculate(data_list)
print standart_deviation_value, mean_value
z_value = ZValue()
z1 = z_value.calculate(88, mean=100, standart_deviation=10)
z2 = z_value.calculate(112, mean=100, standart_deviation=10)
z3 = z_value.calculate(5, mean=100, standart_deviation=10)
print z1, z2, z3
def test_get_decimals(self):
z_value = ZValue()
z_value.calculate(88, mean=100, standart_deviation=10)
z_value.find_from_table()
def test_algorithm_with_tuple(self):
mean = Mean()
data_list = [("a", 1), ("b", 2), ("c", 3), ( "d", 4), ("e", 5)]
self.assertEquals(3, mean.calculate(data_list, is_tuple=True, index=1))
data_list = [("a", "a", 1), ("b", "b", 2), ("c", "c", 3), ("d", "d", 4), ("e", "e", 5)]
self.assertEquals(3.0, mean.calculate(data_list, is_tuple=True, index=2)) | false | true |
f72beaa7ee8d91281059cff457c016f1e6f65cb3 | 3,651 | py | Python | bindings/python/ensmallen/datasets/string/candidatussericytochromatiabacteriums15bmn24raac196.py | AnacletoLAB/ensmallen_graph | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 5 | 2021-02-17T00:44:45.000Z | 2021-08-09T16:41:47.000Z | bindings/python/ensmallen/datasets/string/candidatussericytochromatiabacteriums15bmn24raac196.py | AnacletoLAB/ensmallen_graph | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 18 | 2021-01-07T16:47:39.000Z | 2021-08-12T21:51:32.000Z | bindings/python/ensmallen/datasets/string/candidatussericytochromatiabacteriums15bmn24raac196.py | AnacletoLAB/ensmallen | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 3 | 2021-01-14T02:20:59.000Z | 2021-08-04T19:09:52.000Z | """
This file offers the methods to automatically retrieve the graph Candidatus Sericytochromatia bacterium S15B-MN24 RAAC_196.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def CandidatusSericytochromatiaBacteriumS15bMn24Raac196(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Candidatus Sericytochromatia bacterium S15B-MN24 RAAC_196 graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.5
- physical.links.v11.5
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Candidatus Sericytochromatia bacterium S15B-MN24 RAAC_196 graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="CandidatusSericytochromatiaBacteriumS15bMn24Raac196",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 34.771429 | 223 | 0.690222 | from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph
def CandidatusSericytochromatiaBacteriumS15bMn24Raac196(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
return AutomaticallyRetrievedGraph(
graph_name="CandidatusSericytochromatiaBacteriumS15bMn24Raac196",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| true | true |
f72beafa6216e6ee1923e3c7055a37cbcdd6d963 | 2,305 | py | Python | oop-intro/Game.py | emptyspace42/mycode | 834c0a2817ffffa92b90ec3bd05ef41cb73a8879 | [
"MIT"
] | 5 | 2018-05-21T18:16:13.000Z | 2018-05-22T22:41:40.000Z | oop-intro/Game.py | cazzara/mycode | 834c0a2817ffffa92b90ec3bd05ef41cb73a8879 | [
"MIT"
] | null | null | null | oop-intro/Game.py | cazzara/mycode | 834c0a2817ffffa92b90ec3bd05ef41cb73a8879 | [
"MIT"
] | null | null | null | from Player import Player
from time import sleep
from Cheater import Cheater_Loaded, Cheater_Swapper
from random import random
class Game:
def __init__(self):
self.main()
def make_player(self):
p = None
name = input("Enter your name: ")
cheat = input("Are you a cheater? (y/n)")
if cheat[0].lower() == 'y':
cheat_type = int(random()*10)
if cheat_type <= 5:
p = Cheater_Loaded(name)
else:
p = Cheater_Swapper(name)
else:
p = Player(name)
return p
def play_hand(self, player):
print("1...")
sleep(1)
print("2...")
sleep(1)
print("3...")
sleep(1)
player.roll()
if self.is_cheater(player):
player.cheat()
def is_cheater(self, player):
return isinstance(player, Cheater_Swapper) or isinstance(player, Cheater_Loaded)
def main(self):
print("Welcome to the super fun dice game!")
print("-----Player 1-----")
player1 = self.make_player()
print("-----Player 2-----")
player2 = self.make_player()
print("Alright, {} vs {}!!".format(player1.get_name(), player2.get_name()))
print("*****Begin!*****")
print("Player 1 Rolling:")
self.play_hand(player1)
print("Player 2 Rolling:")
self.play_hand(player2)
p1_total = sum(player1.get_dice())
p2_total = sum(player2.get_dice())
print("{} rolled {}...Total: {}".format(player1.get_name(), player1.get_dice(), p1_total))
print("{} rolled {}...Total: {}".format(player2.get_name(), player2.get_dice(), p2_total))
if p1_total == p2_total:
print("A Draw! :O")
elif p1_total > p2_total:
if self.is_cheater(player1):
print("{} Wins! (But you cheated!)".format(player1.get_name()))
else:
print("{} Wins!".format(player1.get_name()))
else:
if self.is_cheater(player2):
print("{} Wins! (But you cheated!)".format(player2.get_name()))
else:
print("{} Wins!".format(player2.get_name()))
if __name__ == "__main__":
g = Game()
| 32.013889 | 98 | 0.527549 | from Player import Player
from time import sleep
from Cheater import Cheater_Loaded, Cheater_Swapper
from random import random
class Game:
def __init__(self):
self.main()
def make_player(self):
p = None
name = input("Enter your name: ")
cheat = input("Are you a cheater? (y/n)")
if cheat[0].lower() == 'y':
cheat_type = int(random()*10)
if cheat_type <= 5:
p = Cheater_Loaded(name)
else:
p = Cheater_Swapper(name)
else:
p = Player(name)
return p
def play_hand(self, player):
print("1...")
sleep(1)
print("2...")
sleep(1)
print("3...")
sleep(1)
player.roll()
if self.is_cheater(player):
player.cheat()
def is_cheater(self, player):
return isinstance(player, Cheater_Swapper) or isinstance(player, Cheater_Loaded)
def main(self):
print("Welcome to the super fun dice game!")
print("-----Player 1-----")
player1 = self.make_player()
print("-----Player 2-----")
player2 = self.make_player()
print("Alright, {} vs {}!!".format(player1.get_name(), player2.get_name()))
print("*****Begin!*****")
print("Player 1 Rolling:")
self.play_hand(player1)
print("Player 2 Rolling:")
self.play_hand(player2)
p1_total = sum(player1.get_dice())
p2_total = sum(player2.get_dice())
print("{} rolled {}...Total: {}".format(player1.get_name(), player1.get_dice(), p1_total))
print("{} rolled {}...Total: {}".format(player2.get_name(), player2.get_dice(), p2_total))
if p1_total == p2_total:
print("A Draw! :O")
elif p1_total > p2_total:
if self.is_cheater(player1):
print("{} Wins! (But you cheated!)".format(player1.get_name()))
else:
print("{} Wins!".format(player1.get_name()))
else:
if self.is_cheater(player2):
print("{} Wins! (But you cheated!)".format(player2.get_name()))
else:
print("{} Wins!".format(player2.get_name()))
if __name__ == "__main__":
g = Game()
| true | true |
f72bebf85db3d4c69b7d19ce58388cebfef5832f | 80 | py | Python | brian2/sphinxext/__init__.py | SimonAltrogge/brian2 | 6463c368a8277041051bf5ae4816f0dd5b6e057c | [
"BSD-2-Clause"
] | 674 | 2015-01-14T11:05:39.000Z | 2022-03-29T04:53:50.000Z | brian2/sphinxext/__init__.py | JongwanKim2090/brian2 | c212a57cb992b766786b5769ebb830ff12d8a8ad | [
"BSD-2-Clause"
] | 937 | 2015-01-05T13:24:22.000Z | 2022-03-25T13:10:13.000Z | brian2/sphinxext/__init__.py | JongwanKim2090/brian2 | c212a57cb992b766786b5769ebb830ff12d8a8ad | [
"BSD-2-Clause"
] | 237 | 2015-01-05T13:54:16.000Z | 2022-03-15T22:16:32.000Z | """
Brian-specific extension to the Sphinx documentation generation system.
"""
| 20 | 71 | 0.775 | true | true | |
f72bec6eb6fe20587d73322d3bca2f73d06296e7 | 24,462 | py | Python | XET.py | mezutelni/twrp-installer-xiaomi | 62ac87f316c70089e7188fa7b7f21d234c3643cd | [
"MIT"
] | 4 | 2017-10-09T20:23:56.000Z | 2019-06-27T03:59:26.000Z | XET.py | mezutelni/twrp-installer-xiaomi | 62ac87f316c70089e7188fa7b7f21d234c3643cd | [
"MIT"
] | 1 | 2017-12-01T19:12:44.000Z | 2018-01-06T14:42:47.000Z | XET.py | mezutelni/twrp-installer-xiaomi | 62ac87f316c70089e7188fa7b7f21d234c3643cd | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import os
import sys
import time
import urllib.request
import hashlib
try:
from colorama import Fore, Back, Style, init
except ModuleNotFoundError:
print ("You have no colorama installed, i will install it for you")
print
path = sys.executable
#path = path[:-11]
path = path.replace("python.exe","")
os.system(path+"/Scripts/pip install colorama")
print
print ("Ok, now you can restart script :)")
from colorama import Fore, Back, Style, init
from twrp import twrpInstaller
init()
# here i'm checking wchich os you are using and setting command to clear cmd/terminal window
if sys.platform == "linux" or sys.platform == "linux2":
clear = lambda: os.system('clear')
s = "l"
elif sys.platform == "win32":
clear = lambda: os.system('cls')
s = "w"
# some global variables
dashed_line = (Fore.MAGENTA + "--------------------------------------------------------------------" + Fore.RESET)
killsystem = os.system("adb kill-server")
# this is path to /res/ folder and to .py file
resPath = os.path.abspath(os.path.dirname(__file__)) + os.sep + "res" + os.sep
filePath = os.path.abspath(os.path.dirname(__file__)) + os.sep
# resPath = os.path.dirname(sys.executable)+os.sep+"res"+os.sep
# filePath = os.path.dirname(sys.executable)+os.sep
# this is list of devices with official twrp support
devices = ["cancro", "libra", "ferrari", "aqua", "gemini", "virgo", "leo", "scorpio", "jason", "tiffany", "song",
"meri", "tisson", "capricorn", "natrium", "lithium", "chiron", "sagit", "hydrogen", "oxygen", "helium",
"HM2013023", "armani", "HM2014811", "HM2014813", "omega", "lcsh92_wet_jb9", "gucci", "dior", "hermes", "ido",
"land", "hennessy", "kate", "kenzo", "nikel", "prada", "markw", "ugg", "mido", "rolex", "santoni", "mocha",
"latte", "cappu","ugglite" ]
devicesDict = {'aries': "Mi 2", 'pisces': "Mi 3 TD", 'cancro': "Mi 3 W/Mi 4", 'libra': "Mi 4c",
'ferrari': "Mi 4i", 'aqua': "Mi 4s", 'gemini': "Mi 5", 'virgo': "Mi Note",
'leo': "Mi Note Pro", 'scorpio': "Mi Note 2", 'jason': "Mi Note 3", 'tiffany': "Mi 5x",
'song': "Mi 5c", 'meri': "Mi 5c", 'tissot': "Mi A1", 'capricorn': "Mi 5s", 'natrium': "Mi 5s+",
'lithium': "Mi MIX", 'chiron': "Mi MIX 2",'polaris':'Mi MIX 2s', 'sagit': "Mi 6", 'hydrogen': "Mi MAX",
'oxygen': "Mi MAX 2", 'helium': "Mi MAX PRO",
'HM2013023': "Redmi 1 - WCDMA",
'armani': "Redmi 1s - WCDMA", 'HM2014811': "Redmi 2 - WCDMA", 'HM2014813': "Redmi 2 - TD",
'omega': "Redmi PRO", 'lcsh92_wet_jb9': "Redmi note 1 - 3g-mtk", 'gucci': "Redmi note 1s",
'dior': "Redmi Note 1 - 4g", 'hermes': "Redmi Note 2", 'ido': "Redmi 3", 'land': "Redmi 3 S/X",
'hennessy': "Redmi Note 3 (MTK)", 'kate': "Redmi Note 3 Global",
'kenzo': "Redmi Note 3 Chinese", 'nikel': "Redmi Note 4", 'prada': "Redmi 4",
'markw': "Redmi 4 pro", 'ugg': "Redmi Note 5A", 'mido': "Redmi Note 4/4x", 'rolex': "Redmi 4a",
'santoni': "Redmi 4x", 'ugglite':'Redmi Note 5A','vince':'Redmi Note 5/5+','whyred':'Redmi Note 5 Pro',
'mocha': "Mi PAD", 'latte': "Mi PAD 2", 'cappu': "Mi PAD 3"}
googleApps = {
"youtube": "com.google.android.youtube",
"drive": "com.google.android.apps.docs",
"music": "com.google.android.music",
"maps": ":com.google.android.apps.maps",
"videos": "com.google.android.videos",
"photos": "com.google.android.apps.photos",
"chrome": "com.android.chrome",
"gmail": "com.google.android.gm",
"translate": "com.google.android.apps.translate",
"duo": "com.google.android.apps.tachyon"
}
miuiApps = {
"bugreport": "com.miui.bugreport",
"compass": "com.miui.compass",
"video": "com.miui.videoplayer",
"mail": "com.android.email",
"music": "com.miui.player",
"scanner": "com.xiaomi.scanner",
"browser": "com.android.browser",
"screenrecorder": "com.miui.screenrecorder",
"gallery": "com.miui.gallery",
"updater": "com.android.updater",
"midrop": "com.xiaomi.midrop",
"calendar": "com.android.calendar",
"miui assistant": "com.mi.android.globalpersonalassistant",
"notes": "com.miui.notes",
}
localmd5s = [
"f337d1707478d63315820a45030f547d", # 0.camera
"537e17e2585e731a1c26fbd81eb2affa", # 1.home
]
def getInt():
try:
case = int(input(Back.BLUE + "choose: " + Back.RESET))
return case
except ValueError:
print()
print(Fore.RED+"Wrong, choose right option!"+Fore.RESET)
case = int(getInt())
return case
def mydevice():
os.system("adb start-server")
os.system("adb shell mount /system")
glob_device = os.system("adb shell \"cat /system/build.prop | grep ro.product.device=\" > tmp ")
glob_device = open('tmp', 'r').read()
open('tmp', "r").close()
os.remove("tmp")
os.system("adb shell umount /system")
glob_device = glob_device.lstrip('ro.product.device')[1:]
codename = ''.join(glob_device.split())
devicename = codename
clear()
for key, values in devicesDict.items():
if key == codename:
codename = values
return codename
elif key != codename:
continue
codename = "none"
return codename
# Thanks to stackoverflow!
def md5(fname):
hash_md5 = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def dpiChanger():
print(dashed_line)
os.system("adb shell mount /system")
print("Make sure that you made a build.prop backup! just in case")
dpi = input("Tell me what is your desired dpi: ")
print("Ok, i'll change dpi to this value!")
os.system('adb shell "grep -v "ro.sf.lcd_density" /system/build.prop > /system/build.prop.2"')
os.system('adb shell "cp /system/build.prop.2 /system/build.prop"')
os.system('adb shell "echo "ro.sf.lcd_density = ' + dpi + '" >> /system/build.prop"')
os.system('adb shell "chmod 644 /system/build.prop"')
print("Dpi has been changed!" + Fore.RESET)
os.system("adb shell umount /system")
input("push enter to continue")
print(dashed_line)
sTweaksMenu()
def mix2Cam():
print(dashed_line)
path = resPath + os.sep + "cam.apk"
os.system("adb shell mount /system")
isf = os.path.isfile(os.path.dirname(resPath) + os.sep + "cam.apk")
if not isf:
print(Fore.WHITE + "I need to download camera file first, be patient please" + Fore.RESET)
urllib.request.urlretrieve('http://www1.zippyshare.com/d/T0XrorQl/9267/cam.apk', resPath + 'cam.apk')
elif isf:
print(Fore.WHITE + "Ok, you have camera file already!" + Fore.RESET)
md5sum = md5(path)
if md5sum == localmd5s[0]:
os.system("adb push " + resPath + "cam.apk /system/priv-app/MiuiCamera/MiuiCamera.apk")
os.system("adb shell chmod 644 /system/priv-app/MiuiCamera/MiuiCamera.apk")
print(Back.BLUE + "Your old camera is still here, backed up, just in case" + Back.RESET)
os.system("adb shell umount /system")
input(Fore.GREEN + "push enter to continue" + Fore.RESET)
print(dashed_line)
sTweaksMenu()
else:
print("But it's looks like it's broken, let me re-download it!")
os.remove(path)
mix2Cam()
def comMiuiHome():
print(dashed_line)
path = resPath + os.sep + "com.miui.home"
os.system("adb shell mount /system")
os.system("adb shell mv /system/media/theme/default/com.miui.home /system/media/theme/default/com.miui.home.old")
isf = os.path.isfile(os.path.dirname(resPath) + os.sep + "com.miui.home")
if not isf:
print(Fore.WHITE + "I need to download custom home file first, be patient please" + Fore.RESET)
urllib.request.urlretrieve('http://www9.zippyshare.com/d/dRMuSMgW/9585/com.miui.home', resPath + 'com.miui.home')
elif isf:
print(Fore.WHITE + "Ok, you have custom home file already!" + Fore.RESET)
md5sum = md5(path)
if md5sum == localmd5s[1]:
os.system("adb push " + resPath + "com.miui.home /system/media/theme/default/com.miui.home")
os.system("adb shell chmod 644 /system/media/theme/default/com.miui.home")
print(Back.BLUE + "Your old com.miui.home is still here, backed up, just in case" + Back.RESET)
os.system("adb shell umount /system")
input(Fore.GREEN +"push enter to continue" + Fore.RESET)
print(dashed_line)
sTweaksMenu()
else:
os.remove(path)
print("But it's looks like it's broken, let me re-download it!")
comMiuiHome()
def bl():
os.system("adb reboot bootloader")
clear()
print(dashed_line)
print("Your bootloader status is: ")
os.system('fastboot oem device-info > results.txt 2>&1')
bl = open('results.txt', 'r').read()
os.remove('results.txt')
# bl = bl[72]+bl[73]+bl[74]+bl[75]+bl[76]
if bl[72] == "t":
bl = "Unlocked"
print(Fore.GREEN + bl + Fore.RESET)
elif bl[72] == "f":
bl = "Locked"
print(Fore.RED + bl + Fore.RESET)
print()
input(Back.BLUE + "Push enter to exit" + Back.RESET)
menu()
def sideloader():
while (True):
print(dashed_line)
print(
Fore.WHITE + "Due to problems with adb sideload implementation, you have to start sideload on your phone manually!" + Fore.RESET)
sideloadFile = input(Back.BLUE + "Drag and drop your file here: " + Back.RESET)
os.system("adb sideload " + sideloadFile)
ifContinue = input("Do you want to sideload next file? (y/n)")
ifContinue = str(ifContinue).lower()
if ifContinue == 'n':
print(Fore.WHITE + "Ok, we'll go back now" + Fore.RESET)
input("Push enter to continue")
print(dashed_line)
menu()
elif ifContinue == "y":
print(Fore.WHITE + "Ok! so here we go again" + Fore.RESET)
else:
print(
Fore.RED + "Wrong option, so we will stop now, if u want to continue sideloading, just re launch this option from menu" + Fore.RESET)
print(dashed_line)
time.sleep(5)
menu()
def remover(appList):
print(dashed_line + Fore.LIGHTCYAN_EX)
i = 1
for key, values in appList.items():
print("%i. %s" % (i, key.capitalize()))
i = i + 1
print()
print("0. Exit")
case = getInt()
i = 0
if case == 0:
clear()
sTweaksMenu()
else:
for key, values in appList.items():
pckg = values
if case == i + 1:
clear()
print(dashed_line + Fore.GREEN)
os.system("adb shell \"pm uninstall -k --user 0 %s\"" % pckg)
print (pckg)
if appList==miuiApps:
removermiui()
elif appList==googleApps:
removergoogle()
else:
i = i + 1
continue
def appremover():
print(dashed_line)
print(Fore.YELLOW + "| X.E.T |")
print("| App remover menu |")
print(dashed_line)
print(Fore.CYAN + "| 1. Miui Apps")
print(dashed_line)
print(Fore.CYAN +"| 2. Google Apps")
print(dashed_line)
print(Fore.CYAN +"| 3. Full")
print(Fore.RED + "| ^This one will remove all possible google and miui apps"+Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "| 0. Exit")
print(dashed_line)
case = getInt()
if case == 1:
clear()
remover(miuiApps)
elif case == 2:
clear()
remover(googleApps)
elif case == 3:
apps = list("")
pckg = list("")
i = 0
for key, values in googleApps.items():
apps.append(key)
pckg.append(values)
i = i + 1
continue
for key, values in miuiApps.items():
apps.append(key)
pckg.append(values)
i = i + 1
continue
print(Fore.RED + "Are you sure you want to remove: %s?" % ', '.join(apps))
case = input(Back.BLUE + "Y/N: " + Back.RESET)
if case.lower() == "y":
for x in pckg:
os.system("adb shell \" pm uninstall -k --user 0 %s\"" % x)
clear()
print(dashed_line)
print("Everything seems to be removed")
input("Press enter to go back")
sTweaksMenu()
elif case.lower() == "n":
sTweaksMenu()
elif case==0:
sTweaksMenu()
def rbMenu():
clear()
print(mydevice())
print(dashed_line)
print(Fore.YELLOW + "| X.E.T |")
print("| REBOOT MENU |")
print("| Some devices, like RN3P might have problems with reboots |")
print("| from system, but reboots should work from adb/fastboot |")
print(dashed_line + Fore.RESET)
print(Fore.CYAN + "|1. Reboot to recovery |")
print(Fore.WHITE + "|Reboot to recovery using ADB (so make sure to turn on debugging) |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|2. Reboot to fastboot |")
print(Fore.WHITE + "|Reboot to fastboot using ADB (so make sure to turn on debugging) |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|3. Reboot to system |")
print(Fore.WHITE + "|Reboot to system using ADB (so make sure to turn on debugging) |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|4. Reboot to system |")
print(Fore.WHITE + "|Reboot to system using Fastboot mode! |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|5. Reboot to adb-sideload |")
print(Fore.WHITE + "|Reboot to sideload using ADB-root (so use it when in recovery) |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|6. Boot twrp from file |")
print(Fore.WHITE + "|You can use it when you dont want to install it |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|0. Back to main menu |")
print(dashed_line + Fore.RESET)
case = getInt()
if case == 1:
clear()
os.system('adb reboot recovery')
os.system('adb kill-server')
rbMenu()
elif case == 2:
clear()
os.system('adb reboot bootloader')
os.system('adb kill-server')
rbMenu()
elif case == 3:
clear()
os.system('adb reboot')
os.system('adb kill-server')
rbMenu()
elif case == 4:
clear()
os.system('fastboot reboot')
menu()
elif case == 5:
clear()
os.system('adb reboot sideload')
menu()
elif case == 6:
clear()
twrp = input("Put twrp file here: ")
os.system('fastboot boot '+twrp)
menu()
elif case == 0:
killsystem
clear()
menu()
else:
clear()
print(Fore.RED + "Error you should choose right option!" + Fore.RESET)
input("push enter to continue")
rbMenu()
# Tweaks
def sTweaksMenu():
clear()
print(mydevice())
print(dashed_line)
print(Fore.YELLOW + "| X.E.T |")
print("| SYSTEM TWEEKS MENU |")
print(dashed_line)
print(Fore.CYAN + "|1. Build.prop backup |")
print(Fore.WHITE + "|Use it to backup your build.prop file! |")
print(dashed_line)
print(Fore.CYAN + "|2. Build.prop restore |")
print(Fore.WHITE + "|Use it to restore your build.prop file! |")
print(dashed_line)
print(Fore.CYAN + "|3. Change DPI |")
print(Fore.WHITE + "|For changing dpi more than once, you have to restore build.prop! |")
print(dashed_line)
print(Fore.CYAN + "|4. Install mix 2 camera |")
print(Fore.WHITE + "|Mix 2 camera ported for all Xiaomi devices;Tested only on miui9 |")
print(dashed_line)
print(Fore.CYAN + "|5. Install modified com.miui.home (desktop grid up to 10x10) |")
print(Fore.WHITE + "|Miui 9 exclusive |")
print(dashed_line)
print(Fore.CYAN + "|6. Activate Camera 2 API |")
print(Fore.WHITE + "|Use it to activate cam2api in your build.prop |")
print(dashed_line)
print(Fore.CYAN + "|7. System apps remover |")
print(Fore.WHITE + "|Remove google/miui apss without root, from system |")
print(dashed_line)
print(Fore.CYAN + "|0. Back to main menu |")
print(dashed_line)
case = getInt()
if case == 1:
clear()
print(dashed_line)
os.system("adb shell mount /system")
os.system("adb pull /system/build.prop " + resPath + "build.prop")
print(Fore.WHITE + "Backup complete! Your build.prop is now in res folder!" + Fore.RESET)
os.system("adb shell umount /system")
input("push enter to continue")
print(dashed_line)
sTweaksMenu()
elif case == 2:
clear()
print(dashed_line)
os.system("adb shell mount /system")
os.system("adb push " + resPath + "build.prop /system/build.prop")
os.system('adb shell "chmod 644 /system/build.prop"')
print(Fore.WHITE + "Restore complete!" + Fore.RESET)
os.system("adb shell umount /system")
input("push enter to continue")
print(dashed_line)
sTweaksMenu()
elif case == 3:
clear()
dpiChanger()
elif case == 4:
clear()
mix2Cam()
elif case == 5:
clear()
comMiuiHome()
elif case == 6:
clear()
os.system("adb shell mount /system")
os.system('adb shell "echo persist.camera.HAL3.enabled=1 >> /system/build.prop"')
print("You have enabled Camera 2 API YAY!")
os.system("adb shell umount /system")
input("push enter to continue")
sTweaksMenu()
elif case == 7:
clear()
appremover()
elif case == 8:
clear()
autoroot()
elif case == 0:
killsystem
clear()
menu()
else:
clear()
print(Fore.RED + "Error you should choose right option!" + Fore.RESET)
input("push enter to continue")
sTweaksMenu()
# about
def aboutMenu():
clear()
print(mydevice())
print(dashed_line)
print(Fore.YELLOW + "| X.E.T |")
print("| About |")
print(dashed_line)
print(Fore.CYAN + "|1. About script |")
print(dashed_line)
print(Fore.CYAN + "|2. Contact |")
print(dashed_line)
print(Fore.CYAN + "|3. Donations |")
print(dashed_line)
print(Fore.CYAN + "|4. Credits |")
print(dashed_line)
print(Fore.CYAN + "|0. Back |")
print(dashed_line)
case = getInt()
if case == 1:
print(dashed_line)
print("Simply script, created by student, to make some tweaks easier to apply")
print("First script purpose was to only automatize twrp installing (that's why repo is called twrp-installer)")
print("Script is aiming to support Xiaomi devices(Some features are universal) on both Windows and Linux")
print("When more test will be made, there will be stable executable version avalible for Windows")
print(dashed_line)
input()
aboutMenu()
elif case == 2:
print(dashed_line)
print("U can contact me on various sites, mostly under nickname Mezutelni")
print("- github.com/mezutelni/")
print("- miuipolska.pl/forum/profile/7082-mezutelni/")
print("- forum.xda-developers.com/member.php?u=6270598")
print(dashed_line)
input()
aboutMenu()
elif case == 3:
print(dashed_line)
print(
"If you want to buy me a beer, or keep my servers online, or simply say Thank You, please consider Donation for me")
print("You can do it by PayPal on PayPal.me/Mezutelni or by contacting with me directly (see contact)")
print(dashed_line)
input()
aboutMenu()
elif case == 4:
print(dashed_line)
print("Thanks to: ")
print("- Facebook group \" Złomowisko Rudej\" for inspiration and help with testing")
print("- MiuiPolska forum society for help with testing and trusting me")
print("- Orjon from MiuiPolska for idea and alpha code for google's app remover")
print(dashed_line)
input()
aboutMenu()
elif case == 0:
menu()
else:
aboutMenu()
# main
def menu():
clear()
print(mydevice())
print(dashed_line)
print(Fore.YELLOW + "| X.E.T |")
print("| Xiaomi Essential Tools |")
print(dashed_line + Fore.RESET)
print(Fore.CYAN + "|1. Reboot menu |")
print(Fore.WHITE + "|Simple reboot menu, to make your life more comfortable! |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|2. System tweaks |")
print(Fore.WHITE + "|Here you can find system tweaks, they are all applied in recovery!|" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|3. Install Recovery |")
print(Fore.WHITE + "|Use it to install recovery | Due to server problems, auto installer is off for now|" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|4. Check bootloader status (locked/unlocked) |")
print(Fore.WHITE + "|You have to be in fastboot mode to make it work |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|5. ADB sideloader |")
print(Fore.WHITE + "|Start in recovery, then use it to flash all zips you want! |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|9. About |")
print(dashed_line)
print(Fore.CYAN + "|0. Exit |")
print(dashed_line + Fore.RESET)
case = getInt()
if case == 1:
killsystem
rbMenu()
elif case == 2:
killsystem
sTweaksMenu()
elif case == 3:
killsystem
twrpInstaller(mydevice(), s)
menu()
elif case == 4:
clear()
bl()
input("push enter to continue")
menu()
elif case == 5:
killsystem
clear()
sideloader()
elif case == 9:
clear()
aboutMenu()
elif case == 0:
killsystem
print(Fore.GREEN + "Consider a donation for me to keep my servers up!")
print("www.paypal.me/Mezutelni")
sys.exit()
else:
clear()
print("Error choose right option\n" + Fore.RESET)
input("push enter to continue")
menu()
menu()
| 39.518578 | 149 | 0.539245 |
import os
import sys
import time
import urllib.request
import hashlib
try:
from colorama import Fore, Back, Style, init
except ModuleNotFoundError:
print ("You have no colorama installed, i will install it for you")
print
path = sys.executable
path = path.replace("python.exe","")
os.system(path+"/Scripts/pip install colorama")
print
print ("Ok, now you can restart script :)")
from colorama import Fore, Back, Style, init
from twrp import twrpInstaller
init()
if sys.platform == "linux" or sys.platform == "linux2":
clear = lambda: os.system('clear')
s = "l"
elif sys.platform == "win32":
clear = lambda: os.system('cls')
s = "w"
# some global variables
dashed_line = (Fore.MAGENTA + "--------------------------------------------------------------------" + Fore.RESET)
killsystem = os.system("adb kill-server")
# this is path to /res/ folder and to .py file
resPath = os.path.abspath(os.path.dirname(__file__)) + os.sep + "res" + os.sep
filePath = os.path.abspath(os.path.dirname(__file__)) + os.sep
# resPath = os.path.dirname(sys.executable)+os.sep+"res"+os.sep
# filePath = os.path.dirname(sys.executable)+os.sep
# this is list of devices with official twrp support
devices = ["cancro", "libra", "ferrari", "aqua", "gemini", "virgo", "leo", "scorpio", "jason", "tiffany", "song",
"meri", "tisson", "capricorn", "natrium", "lithium", "chiron", "sagit", "hydrogen", "oxygen", "helium",
"HM2013023", "armani", "HM2014811", "HM2014813", "omega", "lcsh92_wet_jb9", "gucci", "dior", "hermes", "ido",
"land", "hennessy", "kate", "kenzo", "nikel", "prada", "markw", "ugg", "mido", "rolex", "santoni", "mocha",
"latte", "cappu","ugglite" ]
devicesDict = {'aries': "Mi 2", 'pisces': "Mi 3 TD", 'cancro': "Mi 3 W/Mi 4", 'libra': "Mi 4c",
'ferrari': "Mi 4i", 'aqua': "Mi 4s", 'gemini': "Mi 5", 'virgo': "Mi Note",
'leo': "Mi Note Pro", 'scorpio': "Mi Note 2", 'jason': "Mi Note 3", 'tiffany': "Mi 5x",
'song': "Mi 5c", 'meri': "Mi 5c", 'tissot': "Mi A1", 'capricorn': "Mi 5s", 'natrium': "Mi 5s+",
'lithium': "Mi MIX", 'chiron': "Mi MIX 2",'polaris':'Mi MIX 2s', 'sagit': "Mi 6", 'hydrogen': "Mi MAX",
'oxygen': "Mi MAX 2", 'helium': "Mi MAX PRO",
'HM2013023': "Redmi 1 - WCDMA",
'armani': "Redmi 1s - WCDMA", 'HM2014811': "Redmi 2 - WCDMA", 'HM2014813': "Redmi 2 - TD",
'omega': "Redmi PRO", 'lcsh92_wet_jb9': "Redmi note 1 - 3g-mtk", 'gucci': "Redmi note 1s",
'dior': "Redmi Note 1 - 4g", 'hermes': "Redmi Note 2", 'ido': "Redmi 3", 'land': "Redmi 3 S/X",
'hennessy': "Redmi Note 3 (MTK)", 'kate': "Redmi Note 3 Global",
'kenzo': "Redmi Note 3 Chinese", 'nikel': "Redmi Note 4", 'prada': "Redmi 4",
'markw': "Redmi 4 pro", 'ugg': "Redmi Note 5A", 'mido': "Redmi Note 4/4x", 'rolex': "Redmi 4a",
'santoni': "Redmi 4x", 'ugglite':'Redmi Note 5A','vince':'Redmi Note 5/5+','whyred':'Redmi Note 5 Pro',
'mocha': "Mi PAD", 'latte': "Mi PAD 2", 'cappu': "Mi PAD 3"}
googleApps = {
"youtube": "com.google.android.youtube",
"drive": "com.google.android.apps.docs",
"music": "com.google.android.music",
"maps": ":com.google.android.apps.maps",
"videos": "com.google.android.videos",
"photos": "com.google.android.apps.photos",
"chrome": "com.android.chrome",
"gmail": "com.google.android.gm",
"translate": "com.google.android.apps.translate",
"duo": "com.google.android.apps.tachyon"
}
miuiApps = {
"bugreport": "com.miui.bugreport",
"compass": "com.miui.compass",
"video": "com.miui.videoplayer",
"mail": "com.android.email",
"music": "com.miui.player",
"scanner": "com.xiaomi.scanner",
"browser": "com.android.browser",
"screenrecorder": "com.miui.screenrecorder",
"gallery": "com.miui.gallery",
"updater": "com.android.updater",
"midrop": "com.xiaomi.midrop",
"calendar": "com.android.calendar",
"miui assistant": "com.mi.android.globalpersonalassistant",
"notes": "com.miui.notes",
}
localmd5s = [
"f337d1707478d63315820a45030f547d", # 0.camera
"537e17e2585e731a1c26fbd81eb2affa", # 1.home
]
def getInt():
try:
case = int(input(Back.BLUE + "choose: " + Back.RESET))
return case
except ValueError:
print()
print(Fore.RED+"Wrong, choose right option!"+Fore.RESET)
case = int(getInt())
return case
def mydevice():
os.system("adb start-server")
os.system("adb shell mount /system")
glob_device = os.system("adb shell \"cat /system/build.prop | grep ro.product.device=\" > tmp ")
glob_device = open('tmp', 'r').read()
open('tmp', "r").close()
os.remove("tmp")
os.system("adb shell umount /system")
glob_device = glob_device.lstrip('ro.product.device')[1:]
codename = ''.join(glob_device.split())
devicename = codename
clear()
for key, values in devicesDict.items():
if key == codename:
codename = values
return codename
elif key != codename:
continue
codename = "none"
return codename
# Thanks to stackoverflow!
def md5(fname):
hash_md5 = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def dpiChanger():
print(dashed_line)
os.system("adb shell mount /system")
print("Make sure that you made a build.prop backup! just in case")
dpi = input("Tell me what is your desired dpi: ")
print("Ok, i'll change dpi to this value!")
os.system('adb shell "grep -v "ro.sf.lcd_density" /system/build.prop > /system/build.prop.2"')
os.system('adb shell "cp /system/build.prop.2 /system/build.prop"')
os.system('adb shell "echo "ro.sf.lcd_density = ' + dpi + '" >> /system/build.prop"')
os.system('adb shell "chmod 644 /system/build.prop"')
print("Dpi has been changed!" + Fore.RESET)
os.system("adb shell umount /system")
input("push enter to continue")
print(dashed_line)
sTweaksMenu()
def mix2Cam():
print(dashed_line)
path = resPath + os.sep + "cam.apk"
os.system("adb shell mount /system")
isf = os.path.isfile(os.path.dirname(resPath) + os.sep + "cam.apk")
if not isf:
print(Fore.WHITE + "I need to download camera file first, be patient please" + Fore.RESET)
urllib.request.urlretrieve('http://www1.zippyshare.com/d/T0XrorQl/9267/cam.apk', resPath + 'cam.apk')
elif isf:
print(Fore.WHITE + "Ok, you have camera file already!" + Fore.RESET)
md5sum = md5(path)
if md5sum == localmd5s[0]:
os.system("adb push " + resPath + "cam.apk /system/priv-app/MiuiCamera/MiuiCamera.apk")
os.system("adb shell chmod 644 /system/priv-app/MiuiCamera/MiuiCamera.apk")
print(Back.BLUE + "Your old camera is still here, backed up, just in case" + Back.RESET)
os.system("adb shell umount /system")
input(Fore.GREEN + "push enter to continue" + Fore.RESET)
print(dashed_line)
sTweaksMenu()
else:
print("But it's looks like it's broken, let me re-download it!")
os.remove(path)
mix2Cam()
def comMiuiHome():
print(dashed_line)
path = resPath + os.sep + "com.miui.home"
os.system("adb shell mount /system")
os.system("adb shell mv /system/media/theme/default/com.miui.home /system/media/theme/default/com.miui.home.old")
isf = os.path.isfile(os.path.dirname(resPath) + os.sep + "com.miui.home")
if not isf:
print(Fore.WHITE + "I need to download custom home file first, be patient please" + Fore.RESET)
urllib.request.urlretrieve('http://www9.zippyshare.com/d/dRMuSMgW/9585/com.miui.home', resPath + 'com.miui.home')
elif isf:
print(Fore.WHITE + "Ok, you have custom home file already!" + Fore.RESET)
md5sum = md5(path)
if md5sum == localmd5s[1]:
os.system("adb push " + resPath + "com.miui.home /system/media/theme/default/com.miui.home")
os.system("adb shell chmod 644 /system/media/theme/default/com.miui.home")
print(Back.BLUE + "Your old com.miui.home is still here, backed up, just in case" + Back.RESET)
os.system("adb shell umount /system")
input(Fore.GREEN +"push enter to continue" + Fore.RESET)
print(dashed_line)
sTweaksMenu()
else:
os.remove(path)
print("But it's looks like it's broken, let me re-download it!")
comMiuiHome()
def bl():
os.system("adb reboot bootloader")
clear()
print(dashed_line)
print("Your bootloader status is: ")
os.system('fastboot oem device-info > results.txt 2>&1')
bl = open('results.txt', 'r').read()
os.remove('results.txt')
if bl[72] == "t":
bl = "Unlocked"
print(Fore.GREEN + bl + Fore.RESET)
elif bl[72] == "f":
bl = "Locked"
print(Fore.RED + bl + Fore.RESET)
print()
input(Back.BLUE + "Push enter to exit" + Back.RESET)
menu()
def sideloader():
while (True):
print(dashed_line)
print(
Fore.WHITE + "Due to problems with adb sideload implementation, you have to start sideload on your phone manually!" + Fore.RESET)
sideloadFile = input(Back.BLUE + "Drag and drop your file here: " + Back.RESET)
os.system("adb sideload " + sideloadFile)
ifContinue = input("Do you want to sideload next file? (y/n)")
ifContinue = str(ifContinue).lower()
if ifContinue == 'n':
print(Fore.WHITE + "Ok, we'll go back now" + Fore.RESET)
input("Push enter to continue")
print(dashed_line)
menu()
elif ifContinue == "y":
print(Fore.WHITE + "Ok! so here we go again" + Fore.RESET)
else:
print(
Fore.RED + "Wrong option, so we will stop now, if u want to continue sideloading, just re launch this option from menu" + Fore.RESET)
print(dashed_line)
time.sleep(5)
menu()
def remover(appList):
print(dashed_line + Fore.LIGHTCYAN_EX)
i = 1
for key, values in appList.items():
print("%i. %s" % (i, key.capitalize()))
i = i + 1
print()
print("0. Exit")
case = getInt()
i = 0
if case == 0:
clear()
sTweaksMenu()
else:
for key, values in appList.items():
pckg = values
if case == i + 1:
clear()
print(dashed_line + Fore.GREEN)
os.system("adb shell \"pm uninstall -k --user 0 %s\"" % pckg)
print (pckg)
if appList==miuiApps:
removermiui()
elif appList==googleApps:
removergoogle()
else:
i = i + 1
continue
def appremover():
print(dashed_line)
print(Fore.YELLOW + "| X.E.T |")
print("| App remover menu |")
print(dashed_line)
print(Fore.CYAN + "| 1. Miui Apps")
print(dashed_line)
print(Fore.CYAN +"| 2. Google Apps")
print(dashed_line)
print(Fore.CYAN +"| 3. Full")
print(Fore.RED + "| ^This one will remove all possible google and miui apps"+Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "| 0. Exit")
print(dashed_line)
case = getInt()
if case == 1:
clear()
remover(miuiApps)
elif case == 2:
clear()
remover(googleApps)
elif case == 3:
apps = list("")
pckg = list("")
i = 0
for key, values in googleApps.items():
apps.append(key)
pckg.append(values)
i = i + 1
continue
for key, values in miuiApps.items():
apps.append(key)
pckg.append(values)
i = i + 1
continue
print(Fore.RED + "Are you sure you want to remove: %s?" % ', '.join(apps))
case = input(Back.BLUE + "Y/N: " + Back.RESET)
if case.lower() == "y":
for x in pckg:
os.system("adb shell \" pm uninstall -k --user 0 %s\"" % x)
clear()
print(dashed_line)
print("Everything seems to be removed")
input("Press enter to go back")
sTweaksMenu()
elif case.lower() == "n":
sTweaksMenu()
elif case==0:
sTweaksMenu()
def rbMenu():
clear()
print(mydevice())
print(dashed_line)
print(Fore.YELLOW + "| X.E.T |")
print("| REBOOT MENU |")
print("| Some devices, like RN3P might have problems with reboots |")
print("| from system, but reboots should work from adb/fastboot |")
print(dashed_line + Fore.RESET)
print(Fore.CYAN + "|1. Reboot to recovery |")
print(Fore.WHITE + "|Reboot to recovery using ADB (so make sure to turn on debugging) |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|2. Reboot to fastboot |")
print(Fore.WHITE + "|Reboot to fastboot using ADB (so make sure to turn on debugging) |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|3. Reboot to system |")
print(Fore.WHITE + "|Reboot to system using ADB (so make sure to turn on debugging) |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|4. Reboot to system |")
print(Fore.WHITE + "|Reboot to system using Fastboot mode! |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|5. Reboot to adb-sideload |")
print(Fore.WHITE + "|Reboot to sideload using ADB-root (so use it when in recovery) |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|6. Boot twrp from file |")
print(Fore.WHITE + "|You can use it when you dont want to install it |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|0. Back to main menu |")
print(dashed_line + Fore.RESET)
case = getInt()
if case == 1:
clear()
os.system('adb reboot recovery')
os.system('adb kill-server')
rbMenu()
elif case == 2:
clear()
os.system('adb reboot bootloader')
os.system('adb kill-server')
rbMenu()
elif case == 3:
clear()
os.system('adb reboot')
os.system('adb kill-server')
rbMenu()
elif case == 4:
clear()
os.system('fastboot reboot')
menu()
elif case == 5:
clear()
os.system('adb reboot sideload')
menu()
elif case == 6:
clear()
twrp = input("Put twrp file here: ")
os.system('fastboot boot '+twrp)
menu()
elif case == 0:
killsystem
clear()
menu()
else:
clear()
print(Fore.RED + "Error you should choose right option!" + Fore.RESET)
input("push enter to continue")
rbMenu()
# Tweaks
def sTweaksMenu():
clear()
print(mydevice())
print(dashed_line)
print(Fore.YELLOW + "| X.E.T |")
print("| SYSTEM TWEEKS MENU |")
print(dashed_line)
print(Fore.CYAN + "|1. Build.prop backup |")
print(Fore.WHITE + "|Use it to backup your build.prop file! |")
print(dashed_line)
print(Fore.CYAN + "|2. Build.prop restore |")
print(Fore.WHITE + "|Use it to restore your build.prop file! |")
print(dashed_line)
print(Fore.CYAN + "|3. Change DPI |")
print(Fore.WHITE + "|For changing dpi more than once, you have to restore build.prop! |")
print(dashed_line)
print(Fore.CYAN + "|4. Install mix 2 camera |")
print(Fore.WHITE + "|Mix 2 camera ported for all Xiaomi devices;Tested only on miui9 |")
print(dashed_line)
print(Fore.CYAN + "|5. Install modified com.miui.home (desktop grid up to 10x10) |")
print(Fore.WHITE + "|Miui 9 exclusive |")
print(dashed_line)
print(Fore.CYAN + "|6. Activate Camera 2 API |")
print(Fore.WHITE + "|Use it to activate cam2api in your build.prop |")
print(dashed_line)
print(Fore.CYAN + "|7. System apps remover |")
print(Fore.WHITE + "|Remove google/miui apss without root, from system |")
print(dashed_line)
print(Fore.CYAN + "|0. Back to main menu |")
print(dashed_line)
case = getInt()
if case == 1:
clear()
print(dashed_line)
os.system("adb shell mount /system")
os.system("adb pull /system/build.prop " + resPath + "build.prop")
print(Fore.WHITE + "Backup complete! Your build.prop is now in res folder!" + Fore.RESET)
os.system("adb shell umount /system")
input("push enter to continue")
print(dashed_line)
sTweaksMenu()
elif case == 2:
clear()
print(dashed_line)
os.system("adb shell mount /system")
os.system("adb push " + resPath + "build.prop /system/build.prop")
os.system('adb shell "chmod 644 /system/build.prop"')
print(Fore.WHITE + "Restore complete!" + Fore.RESET)
os.system("adb shell umount /system")
input("push enter to continue")
print(dashed_line)
sTweaksMenu()
elif case == 3:
clear()
dpiChanger()
elif case == 4:
clear()
mix2Cam()
elif case == 5:
clear()
comMiuiHome()
elif case == 6:
clear()
os.system("adb shell mount /system")
os.system('adb shell "echo persist.camera.HAL3.enabled=1 >> /system/build.prop"')
print("You have enabled Camera 2 API YAY!")
os.system("adb shell umount /system")
input("push enter to continue")
sTweaksMenu()
elif case == 7:
clear()
appremover()
elif case == 8:
clear()
autoroot()
elif case == 0:
killsystem
clear()
menu()
else:
clear()
print(Fore.RED + "Error you should choose right option!" + Fore.RESET)
input("push enter to continue")
sTweaksMenu()
# about
def aboutMenu():
clear()
print(mydevice())
print(dashed_line)
print(Fore.YELLOW + "| X.E.T |")
print("| About |")
print(dashed_line)
print(Fore.CYAN + "|1. About script |")
print(dashed_line)
print(Fore.CYAN + "|2. Contact |")
print(dashed_line)
print(Fore.CYAN + "|3. Donations |")
print(dashed_line)
print(Fore.CYAN + "|4. Credits |")
print(dashed_line)
print(Fore.CYAN + "|0. Back |")
print(dashed_line)
case = getInt()
if case == 1:
print(dashed_line)
print("Simply script, created by student, to make some tweaks easier to apply")
print("First script purpose was to only automatize twrp installing (that's why repo is called twrp-installer)")
print("Script is aiming to support Xiaomi devices(Some features are universal) on both Windows and Linux")
print("When more test will be made, there will be stable executable version avalible for Windows")
print(dashed_line)
input()
aboutMenu()
elif case == 2:
print(dashed_line)
print("U can contact me on various sites, mostly under nickname Mezutelni")
print("- github.com/mezutelni/")
print("- miuipolska.pl/forum/profile/7082-mezutelni/")
print("- forum.xda-developers.com/member.php?u=6270598")
print(dashed_line)
input()
aboutMenu()
elif case == 3:
print(dashed_line)
print(
"If you want to buy me a beer, or keep my servers online, or simply say Thank You, please consider Donation for me")
print("You can do it by PayPal on PayPal.me/Mezutelni or by contacting with me directly (see contact)")
print(dashed_line)
input()
aboutMenu()
elif case == 4:
print(dashed_line)
print("Thanks to: ")
print("- Facebook group \" Złomowisko Rudej\" for inspiration and help with testing")
print("- MiuiPolska forum society for help with testing and trusting me")
print("- Orjon from MiuiPolska for idea and alpha code for google's app remover")
print(dashed_line)
input()
aboutMenu()
elif case == 0:
menu()
else:
aboutMenu()
# main
def menu():
clear()
print(mydevice())
print(dashed_line)
print(Fore.YELLOW + "| X.E.T |")
print("| Xiaomi Essential Tools |")
print(dashed_line + Fore.RESET)
print(Fore.CYAN + "|1. Reboot menu |")
print(Fore.WHITE + "|Simple reboot menu, to make your life more comfortable! |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|2. System tweaks |")
print(Fore.WHITE + "|Here you can find system tweaks, they are all applied in recovery!|" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|3. Install Recovery |")
print(Fore.WHITE + "|Use it to install recovery | Due to server problems, auto installer is off for now|" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|4. Check bootloader status (locked/unlocked) |")
print(Fore.WHITE + "|You have to be in fastboot mode to make it work |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|5. ADB sideloader |")
print(Fore.WHITE + "|Start in recovery, then use it to flash all zips you want! |" + Fore.RESET)
print(dashed_line)
print(Fore.CYAN + "|9. About |")
print(dashed_line)
print(Fore.CYAN + "|0. Exit |")
print(dashed_line + Fore.RESET)
case = getInt()
if case == 1:
killsystem
rbMenu()
elif case == 2:
killsystem
sTweaksMenu()
elif case == 3:
killsystem
twrpInstaller(mydevice(), s)
menu()
elif case == 4:
clear()
bl()
input("push enter to continue")
menu()
elif case == 5:
killsystem
clear()
sideloader()
elif case == 9:
clear()
aboutMenu()
elif case == 0:
killsystem
print(Fore.GREEN + "Consider a donation for me to keep my servers up!")
print("www.paypal.me/Mezutelni")
sys.exit()
else:
clear()
print("Error choose right option\n" + Fore.RESET)
input("push enter to continue")
menu()
menu()
| true | true |
f72becd780ca4f371e5aa799094ea59aac89e645 | 27,367 | py | Python | ietf/liaisons/forms.py | hassanakbar4/ietfdb | cabee059092ae776015410640226064331c293b7 | [
"BSD-3-Clause"
] | 2 | 2022-03-12T04:37:08.000Z | 2022-03-13T00:48:39.000Z | ietf/liaisons/forms.py | hassanakbar4/ietfdb | cabee059092ae776015410640226064331c293b7 | [
"BSD-3-Clause"
] | 39 | 2021-05-31T21:10:14.000Z | 2022-03-07T16:07:14.000Z | ietf/liaisons/forms.py | hassanakbar4/ietfdb | cabee059092ae776015410640226064331c293b7 | [
"BSD-3-Clause"
] | 2 | 2021-10-05T12:48:20.000Z | 2021-11-08T11:38:35.000Z | # Copyright The IETF Trust 2011-2020, All Rights Reserved
# -*- coding: utf-8 -*-
import io
import datetime, os
import operator
from typing import Union # pyflakes:ignore
from email.utils import parseaddr
from form_utils.forms import BetterModelForm
from django import forms
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db.models.query import QuerySet
from django.forms.utils import ErrorList
from django.db.models import Q
#from django.forms.widgets import RadioFieldRenderer
from django.core.validators import validate_email
import debug # pyflakes:ignore
from ietf.ietfauth.utils import has_role
from ietf.name.models import DocRelationshipName
from ietf.liaisons.utils import get_person_for_user,is_authorized_individual
from ietf.liaisons.widgets import ButtonWidget,ShowAttachmentsWidget
from ietf.liaisons.models import (LiaisonStatement,
LiaisonStatementEvent,LiaisonStatementAttachment,LiaisonStatementPurposeName)
from ietf.liaisons.fields import SearchableLiaisonStatementsField
from ietf.group.models import Group
from ietf.person.models import Email
from ietf.person.fields import SearchableEmailField
from ietf.doc.models import Document, DocAlias
from ietf.utils.fields import DatepickerDateField
from functools import reduce
'''
NOTES:
Authorized individuals are people (in our Person table) who are authorized to send
messages on behalf of some other group - they have a formal role in the other group,
whereas the liasion manager has a formal role with the IETF (or more correctly,
with the IAB).
'''
# -------------------------------------------------
# Helper Functions
# -------------------------------------------------
def liaison_manager_sdos(person):
return Group.objects.filter(type="sdo", state="active", role__person=person, role__name="liaiman").distinct()
def flatten_choices(choices):
'''Returns a flat choice list given one with option groups defined'''
flat = []
for optgroup,options in choices:
flat.extend(options)
return flat
def get_internal_choices(user):
'''Returns the set of internal IETF groups the user has permissions for, as a list
of choices suitable for use in a select widget. If user == None, all active internal
groups are included.'''
choices = []
groups = get_groups_for_person(user.person if user else None)
main = [ (g.pk, 'The {}'.format(g.acronym.upper())) for g in groups.filter(acronym__in=('ietf','iesg','iab')) ]
areas = [ (g.pk, '{} - {}'.format(g.acronym,g.name)) for g in groups.filter(type='area') ]
wgs = [ (g.pk, '{} - {}'.format(g.acronym,g.name)) for g in groups.filter(type='wg') ]
choices.append(('Main IETF Entities', main))
choices.append(('IETF Areas', areas))
choices.append(('IETF Working Groups', wgs ))
return choices
def get_groups_for_person(person):
'''Returns queryset of internal Groups the person has interesting roles in.
This is a refactor of IETFHierarchyManager.get_entities_for_person(). If Person
is None or Secretariat or Liaison Manager all internal IETF groups are returned.
'''
if person == None or has_role(person.user, "Secretariat") or has_role(person.user, "Liaison Manager"):
# collect all internal IETF groups
queries = [Q(acronym__in=('ietf','iesg','iab')),
Q(type='area',state='active'),
Q(type='wg',state='active')]
else:
# Interesting roles, as Group queries
queries = [Q(role__person=person,role__name='chair',acronym='ietf'),
Q(role__person=person,role__name__in=('chair','execdir'),acronym='iab'),
Q(role__person=person,role__name='ad',type='area',state='active'),
Q(role__person=person,role__name__in=('chair','secretary'),type='wg',state='active'),
Q(parent__role__person=person,parent__role__name='ad',type='wg',state='active')]
return Group.objects.filter(reduce(operator.or_,queries)).order_by('acronym').distinct()
def liaison_form_factory(request, type=None, **kwargs):
"""Returns appropriate Liaison entry form"""
user = request.user
if kwargs.get('instance',None):
return EditLiaisonForm(user, **kwargs)
elif type == 'incoming':
return IncomingLiaisonForm(user, **kwargs)
elif type == 'outgoing':
return OutgoingLiaisonForm(user, **kwargs)
return None
def validate_emails(value):
'''Custom validator for emails'''
value = value.strip() # strip whitespace
if '\r\n' in value: # cc_contacts has newlines
value = value.replace('\r\n',',')
value = value.rstrip(',') # strip trailing comma
emails = value.split(',')
for email in emails:
name, addr = parseaddr(email)
try:
validate_email(addr)
except ValidationError:
raise forms.ValidationError('Invalid email address: %s' % addr)
try:
addr.encode('ascii')
except UnicodeEncodeError as e:
raise forms.ValidationError('Invalid email address: %s (check character %d)' % (addr,e.start))
# -------------------------------------------------
# Form Classes
# -------------------------------------------------
class AddCommentForm(forms.Form):
comment = forms.CharField(required=True, widget=forms.Textarea, strip=False)
private = forms.BooleanField(label="Private comment", required=False,help_text="If this box is checked the comment will not appear in the statement's public history view.")
# class RadioRenderer(RadioFieldRenderer):
# def render(self):
# output = []
# for widget in self:
# output.append(format_html(force_text(widget)))
# return mark_safe('\n'.join(output))
class SearchLiaisonForm(forms.Form):
'''Expects initial keyword argument queryset which then gets filtered based on form data'''
text = forms.CharField(required=False)
# scope = forms.ChoiceField(choices=(("all", "All text fields"), ("title", "Title field")), required=False, initial='title')
source = forms.CharField(required=False)
destination = forms.CharField(required=False)
start_date = DatepickerDateField(date_format="yyyy-mm-dd", picker_settings={"autoclose": "1" }, label='Start date', required=False)
end_date = DatepickerDateField(date_format="yyyy-mm-dd", picker_settings={"autoclose": "1" }, label='End date', required=False)
def __init__(self, *args, **kwargs):
self.queryset = kwargs.pop('queryset')
super(SearchLiaisonForm, self).__init__(*args, **kwargs)
def get_results(self):
results = self.queryset
if self.is_bound:
query = self.cleaned_data.get('text')
if query:
q = (Q(title__icontains=query) |
Q(from_contact__address__icontains=query) |
Q(to_contacts__icontains=query) |
Q(other_identifiers__icontains=query) |
Q(body__icontains=query) |
Q(attachments__title__icontains=query,liaisonstatementattachment__removed=False) |
Q(technical_contacts__icontains=query) |
Q(action_holder_contacts__icontains=query) |
Q(cc_contacts=query) |
Q(response_contacts__icontains=query))
results = results.filter(q)
source = self.cleaned_data.get('source')
if source:
source_list = source.split(',')
if len(source_list) > 1:
results = results.filter(Q(from_groups__acronym__in=source_list))
else:
results = results.filter(Q(from_groups__name__icontains=source) | Q(from_groups__acronym__iexact=source))
destination = self.cleaned_data.get('destination')
if destination:
destination_list = destination.split(',')
if len(destination_list) > 1:
results = results.filter(Q(to_groups__acronym__in=destination_list))
else:
results = results.filter(Q(to_groups__name__icontains=destination) | Q(to_groups__acronym__iexact=destination))
start_date = self.cleaned_data.get('start_date')
end_date = self.cleaned_data.get('end_date')
events = None
if start_date:
events = LiaisonStatementEvent.objects.filter(type='posted', time__gte=start_date)
if end_date:
events = events.filter(time__lte=end_date)
elif end_date:
events = LiaisonStatementEvent.objects.filter(type='posted', time__lte=end_date)
if events:
results = results.filter(liaisonstatementevent__in=events)
results = results.distinct().order_by('title')
return results
class CustomModelMultipleChoiceField(forms.ModelMultipleChoiceField):
'''If value is a QuerySet, return it as is (for use in widget.render)'''
def prepare_value(self, value):
if isinstance(value, QuerySet):
return value
if (hasattr(value, '__iter__') and
not isinstance(value, str) and
not hasattr(value, '_meta')):
return [super(CustomModelMultipleChoiceField, self).prepare_value(v) for v in value]
return super(CustomModelMultipleChoiceField, self).prepare_value(value)
class LiaisonModelForm(BetterModelForm):
'''Specify fields which require a custom widget or that are not part of the model.
NOTE: from_groups and to_groups are marked as not required because select2 has
a problem with validating
'''
from_groups = forms.ModelMultipleChoiceField(queryset=Group.objects.all(),label='Groups',required=False)
from_contact = forms.EmailField() # type: Union[forms.EmailField, SearchableEmailField]
to_contacts = forms.CharField(label="Contacts", widget=forms.Textarea(attrs={'rows':'3', }), strip=False)
to_groups = forms.ModelMultipleChoiceField(queryset=Group.objects,label='Groups',required=False)
deadline = DatepickerDateField(date_format="yyyy-mm-dd", picker_settings={"autoclose": "1" }, label='Deadline', required=True)
related_to = SearchableLiaisonStatementsField(label='Related Liaison Statement', required=False)
submitted_date = DatepickerDateField(date_format="yyyy-mm-dd", picker_settings={"autoclose": "1" }, label='Submission date', required=True, initial=datetime.date.today())
attachments = CustomModelMultipleChoiceField(queryset=Document.objects,label='Attachments', widget=ShowAttachmentsWidget, required=False)
attach_title = forms.CharField(label='Title', required=False)
attach_file = forms.FileField(label='File', required=False)
attach_button = forms.CharField(label='',
widget=ButtonWidget(label='Attach', show_on='id_attachments',
require=['id_attach_title', 'id_attach_file'],
required_label='title and file'),
required=False)
class Meta:
model = LiaisonStatement
exclude = ('attachments','state','from_name','to_name')
fieldsets = [('From', {'fields': ['from_groups','from_contact', 'response_contacts'], 'legend': ''}),
('To', {'fields': ['to_groups','to_contacts'], 'legend': ''}),
('Other email addresses', {'fields': ['technical_contacts','action_holder_contacts','cc_contacts'], 'legend': ''}),
('Purpose', {'fields':['purpose', 'deadline'], 'legend': ''}),
('Reference', {'fields': ['other_identifiers','related_to'], 'legend': ''}),
('Liaison Statement', {'fields': ['title', 'submitted_date', 'body', 'attachments'], 'legend': ''}),
('Add attachment', {'fields': ['attach_title', 'attach_file', 'attach_button'], 'legend': ''})]
def __init__(self, user, *args, **kwargs):
super(LiaisonModelForm, self).__init__(*args, **kwargs)
self.user = user
self.edit = False
self.person = get_person_for_user(user)
self.is_new = not self.instance.pk
self.fields["from_groups"].widget.attrs["placeholder"] = "Type in name to search for group"
self.fields["to_groups"].widget.attrs["placeholder"] = "Type in name to search for group"
self.fields["to_contacts"].label = 'Contacts'
self.fields["other_identifiers"].widget.attrs["rows"] = 2
# add email validators
for field in ['from_contact','to_contacts','technical_contacts','action_holder_contacts','cc_contacts']:
if field in self.fields:
self.fields[field].validators.append(validate_emails)
self.set_from_fields()
self.set_to_fields()
def clean_from_groups(self):
from_groups = self.cleaned_data.get('from_groups')
if not from_groups:
raise forms.ValidationError('You must specify a From Group')
return from_groups
def clean_to_groups(self):
to_groups = self.cleaned_data.get('to_groups')
if not to_groups:
raise forms.ValidationError('You must specify a To Group')
return to_groups
def clean_from_contact(self):
contact = self.cleaned_data.get('from_contact')
from_groups = self.cleaned_data.get('from_groups')
try:
email = Email.objects.get(address=contact)
if not email.origin:
email.origin = "liaison: %s" % (','.join([ g.acronym for g in from_groups.all() ]))
email.save()
except ObjectDoesNotExist:
raise forms.ValidationError('Email address does not exist')
return email
# Note to future person: This is the wrong place to fix the new lines
# in cc_contacts and to_contacts. Those belong in the save function.
# Or at least somewhere other than here.
def clean_cc_contacts(self):
'''Return a comma separated list of addresses'''
cc_contacts = self.cleaned_data.get('cc_contacts')
cc_contacts = cc_contacts.replace('\r\n',',')
cc_contacts = cc_contacts.rstrip(',')
return cc_contacts
## to_contacts can also have new lines
def clean_to_contacts(self):
'''Return a comma separated list of addresses'''
to_contacts = self.cleaned_data.get('to_contacts')
to_contacts = to_contacts.replace('\r\n',',')
to_contacts = to_contacts.rstrip(',')
return to_contacts
def clean(self):
if not self.cleaned_data.get('body', None) and not self.has_attachments():
self._errors['body'] = ErrorList(['You must provide a body or attachment files'])
self._errors['attachments'] = ErrorList(['You must provide a body or attachment files'])
# if purpose=response there must be a related statement
purpose = LiaisonStatementPurposeName.objects.get(slug='response')
if self.cleaned_data.get('purpose') == purpose and not self.cleaned_data.get('related_to'):
self._errors['related_to'] = ErrorList(['You must provide a related statement when purpose is In Response'])
return self.cleaned_data
def full_clean(self):
self.set_required_fields()
super(LiaisonModelForm, self).full_clean()
self.reset_required_fields()
def has_attachments(self):
for key in list(self.files.keys()):
if key.startswith('attach_file_') and key.replace('file', 'title') in list(self.data.keys()):
return True
return False
def is_approved(self):
assert NotImplemented
def save(self, *args, **kwargs):
super(LiaisonModelForm, self).save(*args,**kwargs)
# set state for new statements
if self.is_new:
self.instance.change_state(state_id='pending',person=self.person)
if self.is_approved():
self.instance.change_state(state_id='posted',person=self.person)
else:
# create modified event
LiaisonStatementEvent.objects.create(
type_id='modified',
by=self.person,
statement=self.instance,
desc='Statement Modified'
)
self.save_related_liaisons()
self.save_attachments()
self.save_tags()
return self.instance
def save_attachments(self):
'''Saves new attachments.
Files come in with keys like "attach_file_N" where N is index of attachments
displayed in the form. The attachment title is in the corresponding
request.POST[attach_title_N]
'''
written = self.instance.attachments.all().count()
for key in list(self.files.keys()):
title_key = key.replace('file', 'title')
attachment_title = self.data.get(title_key)
if not key.startswith('attach_file_') or not title_key in list(self.data.keys()):
continue
attached_file = self.files.get(key)
extension=attached_file.name.rsplit('.', 1)
if len(extension) > 1:
extension = '.' + extension[1]
else:
extension = ''
written += 1
name = self.instance.name() + ("-attachment-%s" % written)
attach, created = Document.objects.get_or_create(
name = name,
defaults=dict(
title = attachment_title,
type_id = "liai-att",
uploaded_filename = name + extension,
)
)
if created:
DocAlias.objects.create(name=attach.name).docs.add(attach)
LiaisonStatementAttachment.objects.create(statement=self.instance,document=attach)
attach_file = io.open(os.path.join(settings.LIAISON_ATTACH_PATH, attach.name + extension), 'wb')
attach_file.write(attached_file.read())
attach_file.close()
if not self.is_new:
# create modified event
LiaisonStatementEvent.objects.create(
type_id='modified',
by=self.person,
statement=self.instance,
desc='Added attachment: {}'.format(attachment_title)
)
def save_related_liaisons(self):
rel = DocRelationshipName.objects.get(slug='refold')
new_related = self.cleaned_data.get('related_to', [])
# add new ones
for stmt in new_related:
self.instance.source_of_set.get_or_create(target=stmt,relationship=rel)
# delete removed ones
for related in self.instance.source_of_set.all():
if related.target not in new_related:
related.delete()
def save_tags(self):
'''Create tags as needed'''
if self.instance.deadline and not self.instance.tags.filter(slug='taken'):
self.instance.tags.add('required')
def set_from_fields(self):
assert NotImplemented
def set_required_fields(self):
purpose = self.data.get('purpose', None)
if purpose in ['action', 'comment']:
self.fields['deadline'].required = True
else:
self.fields['deadline'].required = False
def reset_required_fields(self):
self.fields['deadline'].required = True
def set_to_fields(self):
assert NotImplemented
class IncomingLiaisonForm(LiaisonModelForm):
def clean(self):
if 'send' in list(self.data.keys()) and self.get_post_only():
raise forms.ValidationError('As an IETF Liaison Manager you can not send incoming liaison statements, you only can post them')
return super(IncomingLiaisonForm, self).clean()
def is_approved(self):
'''Incoming Liaison Statements do not required approval'''
return True
def get_post_only(self):
from_groups = self.cleaned_data.get('from_groups')
if has_role(self.user, "Secretariat") or is_authorized_individual(self.user,from_groups):
return False
return True
def set_from_fields(self):
'''Set from_groups and from_contact options and initial value based on user
accessing the form.'''
if has_role(self.user, "Secretariat"):
queryset = Group.objects.filter(type="sdo", state="active").order_by('name')
else:
queryset = Group.objects.filter(type="sdo", state="active", role__person=self.person, role__name__in=("liaiman", "auth")).distinct().order_by('name')
self.fields['from_contact'].initial = self.person.role_set.filter(group=queryset[0]).first().email.address
self.fields['from_contact'].widget.attrs['readonly'] = True
self.fields['from_groups'].queryset = queryset
self.fields['from_groups'].widget.submitter = str(self.person)
# if there's only one possibility make it the default
if len(queryset) == 1:
self.fields['from_groups'].initial = queryset
def set_to_fields(self):
'''Set to_groups and to_contacts options and initial value based on user
accessing the form. For incoming Liaisons, to_groups choices is the full set.
'''
self.fields['to_groups'].choices = get_internal_choices(None)
class OutgoingLiaisonForm(LiaisonModelForm):
from_contact = SearchableEmailField(only_users=True)
approved = forms.BooleanField(label="Obtained prior approval", required=False)
class Meta:
model = LiaisonStatement
exclude = ('attachments','state','from_name','to_name','action_holder_contacts')
# add approved field, no action_holder_contacts
fieldsets = [('From', {'fields': ['from_groups','from_contact','response_contacts','approved'], 'legend': ''}),
('To', {'fields': ['to_groups','to_contacts'], 'legend': ''}),
('Other email addresses', {'fields': ['technical_contacts','cc_contacts'], 'legend': ''}),
('Purpose', {'fields':['purpose', 'deadline'], 'legend': ''}),
('Reference', {'fields': ['other_identifiers','related_to'], 'legend': ''}),
('Liaison Statement', {'fields': ['title', 'submitted_date', 'body', 'attachments'], 'legend': ''}),
('Add attachment', {'fields': ['attach_title', 'attach_file', 'attach_button'], 'legend': ''})]
def is_approved(self):
return self.cleaned_data['approved']
def set_from_fields(self):
'''Set from_groups and from_contact options and initial value based on user
accessing the form'''
choices = get_internal_choices(self.user)
self.fields['from_groups'].choices = choices
# set initial value if only one entry
flat_choices = flatten_choices(choices)
if len(flat_choices) == 1:
self.fields['from_groups'].initial = [flat_choices[0][0]]
if has_role(self.user, "Secretariat"):
return
if self.person.role_set.filter(name='liaiman',group__state='active'):
email = self.person.role_set.filter(name='liaiman',group__state='active').first().email.address
elif self.person.role_set.filter(name__in=('ad','chair'),group__state='active'):
email = self.person.role_set.filter(name__in=('ad','chair'),group__state='active').first().email.address
else:
email = self.person.email_address()
self.fields['from_contact'].initial = email
self.fields['from_contact'].widget.attrs['readonly'] = True
def set_to_fields(self):
'''Set to_groups and to_contacts options and initial value based on user
accessing the form'''
# set options. if the user is a Liaison Manager and nothing more, reduce set to his SDOs
if has_role(self.user, "Liaison Manager") and not self.person.role_set.filter(name__in=('ad','chair'),group__state='active'):
queryset = Group.objects.filter(type="sdo", state="active", role__person=self.person, role__name="liaiman").distinct().order_by('name')
else:
# get all outgoing entities
queryset = Group.objects.filter(type="sdo", state="active").order_by('name')
self.fields['to_groups'].queryset = queryset
# set initial
if has_role(self.user, "Liaison Manager"):
self.fields['to_groups'].initial = [queryset.first()]
class EditLiaisonForm(LiaisonModelForm):
def __init__(self, *args, **kwargs):
super(EditLiaisonForm, self).__init__(*args, **kwargs)
self.edit = True
self.fields['attachments'].initial = self.instance.liaisonstatementattachment_set.exclude(removed=True)
related = [ str(x.pk) for x in self.instance.source_of_set.all() ]
self.fields['related_to'].initial = ','.join(related)
self.fields['submitted_date'].initial = self.instance.submitted
def save(self, *args, **kwargs):
super(EditLiaisonForm, self).save(*args,**kwargs)
if self.has_changed() and 'submitted_date' in self.changed_data:
event = self.instance.liaisonstatementevent_set.filter(type='submitted').first()
event.time = self.cleaned_data.get('submitted_date')
event.save()
return self.instance
def set_from_fields(self):
'''Set from_groups and from_contact options and initial value based on user
accessing the form.'''
if self.instance.is_outgoing():
self.fields['from_groups'].choices = get_internal_choices(self.user)
else:
if has_role(self.user, "Secretariat"):
queryset = Group.objects.filter(type="sdo").order_by('name')
else:
queryset = Group.objects.filter(type="sdo", role__person=self.person, role__name__in=("liaiman", "auth")).distinct().order_by('name')
self.fields['from_contact'].widget.attrs['readonly'] = True
self.fields['from_groups'].queryset = queryset
def set_to_fields(self):
'''Set to_groups and to_contacts options and initial value based on user
accessing the form. For incoming Liaisons, to_groups choices is the full set.
'''
if self.instance.is_outgoing():
# if the user is a Liaison Manager and nothing more, reduce to set to his SDOs
if has_role(self.user, "Liaison Manager") and not self.person.role_set.filter(name__in=('ad','chair'),group__state='active'):
queryset = Group.objects.filter(type="sdo", role__person=self.person, role__name="liaiman").distinct().order_by('name')
else:
# get all outgoing entities
queryset = Group.objects.filter(type="sdo").order_by('name')
self.fields['to_groups'].queryset = queryset
else:
self.fields['to_groups'].choices = get_internal_choices(None)
class EditAttachmentForm(forms.Form):
title = forms.CharField(max_length=255)
| 47.512153 | 176 | 0.637227 |
import io
import datetime, os
import operator
from typing import Union
from email.utils import parseaddr
from form_utils.forms import BetterModelForm
from django import forms
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db.models.query import QuerySet
from django.forms.utils import ErrorList
from django.db.models import Q
from django.core.validators import validate_email
import debug
from ietf.ietfauth.utils import has_role
from ietf.name.models import DocRelationshipName
from ietf.liaisons.utils import get_person_for_user,is_authorized_individual
from ietf.liaisons.widgets import ButtonWidget,ShowAttachmentsWidget
from ietf.liaisons.models import (LiaisonStatement,
LiaisonStatementEvent,LiaisonStatementAttachment,LiaisonStatementPurposeName)
from ietf.liaisons.fields import SearchableLiaisonStatementsField
from ietf.group.models import Group
from ietf.person.models import Email
from ietf.person.fields import SearchableEmailField
from ietf.doc.models import Document, DocAlias
from ietf.utils.fields import DatepickerDateField
from functools import reduce
def liaison_manager_sdos(person):
return Group.objects.filter(type="sdo", state="active", role__person=person, role__name="liaiman").distinct()
def flatten_choices(choices):
flat = []
for optgroup,options in choices:
flat.extend(options)
return flat
def get_internal_choices(user):
choices = []
groups = get_groups_for_person(user.person if user else None)
main = [ (g.pk, 'The {}'.format(g.acronym.upper())) for g in groups.filter(acronym__in=('ietf','iesg','iab')) ]
areas = [ (g.pk, '{} - {}'.format(g.acronym,g.name)) for g in groups.filter(type='area') ]
wgs = [ (g.pk, '{} - {}'.format(g.acronym,g.name)) for g in groups.filter(type='wg') ]
choices.append(('Main IETF Entities', main))
choices.append(('IETF Areas', areas))
choices.append(('IETF Working Groups', wgs ))
return choices
def get_groups_for_person(person):
if person == None or has_role(person.user, "Secretariat") or has_role(person.user, "Liaison Manager"):
queries = [Q(acronym__in=('ietf','iesg','iab')),
Q(type='area',state='active'),
Q(type='wg',state='active')]
else:
queries = [Q(role__person=person,role__name='chair',acronym='ietf'),
Q(role__person=person,role__name__in=('chair','execdir'),acronym='iab'),
Q(role__person=person,role__name='ad',type='area',state='active'),
Q(role__person=person,role__name__in=('chair','secretary'),type='wg',state='active'),
Q(parent__role__person=person,parent__role__name='ad',type='wg',state='active')]
return Group.objects.filter(reduce(operator.or_,queries)).order_by('acronym').distinct()
def liaison_form_factory(request, type=None, **kwargs):
user = request.user
if kwargs.get('instance',None):
return EditLiaisonForm(user, **kwargs)
elif type == 'incoming':
return IncomingLiaisonForm(user, **kwargs)
elif type == 'outgoing':
return OutgoingLiaisonForm(user, **kwargs)
return None
def validate_emails(value):
value = value.strip()
if '\r\n' in value:
value = value.replace('\r\n',',')
value = value.rstrip(',')
emails = value.split(',')
for email in emails:
name, addr = parseaddr(email)
try:
validate_email(addr)
except ValidationError:
raise forms.ValidationError('Invalid email address: %s' % addr)
try:
addr.encode('ascii')
except UnicodeEncodeError as e:
raise forms.ValidationError('Invalid email address: %s (check character %d)' % (addr,e.start))
class AddCommentForm(forms.Form):
comment = forms.CharField(required=True, widget=forms.Textarea, strip=False)
private = forms.BooleanField(label="Private comment", required=False,help_text="If this box is checked the comment will not appear in the statement's public history view.")
# class RadioRenderer(RadioFieldRenderer):
# def render(self):
# output = []
# for widget in self:
# output.append(format_html(force_text(widget)))
# return mark_safe('\n'.join(output))
class SearchLiaisonForm(forms.Form):
text = forms.CharField(required=False)
# scope = forms.ChoiceField(choices=(("all", "All text fields"), ("title", "Title field")), required=False, initial='title')
source = forms.CharField(required=False)
destination = forms.CharField(required=False)
start_date = DatepickerDateField(date_format="yyyy-mm-dd", picker_settings={"autoclose": "1" }, label='Start date', required=False)
end_date = DatepickerDateField(date_format="yyyy-mm-dd", picker_settings={"autoclose": "1" }, label='End date', required=False)
def __init__(self, *args, **kwargs):
self.queryset = kwargs.pop('queryset')
super(SearchLiaisonForm, self).__init__(*args, **kwargs)
def get_results(self):
results = self.queryset
if self.is_bound:
query = self.cleaned_data.get('text')
if query:
q = (Q(title__icontains=query) |
Q(from_contact__address__icontains=query) |
Q(to_contacts__icontains=query) |
Q(other_identifiers__icontains=query) |
Q(body__icontains=query) |
Q(attachments__title__icontains=query,liaisonstatementattachment__removed=False) |
Q(technical_contacts__icontains=query) |
Q(action_holder_contacts__icontains=query) |
Q(cc_contacts=query) |
Q(response_contacts__icontains=query))
results = results.filter(q)
source = self.cleaned_data.get('source')
if source:
source_list = source.split(',')
if len(source_list) > 1:
results = results.filter(Q(from_groups__acronym__in=source_list))
else:
results = results.filter(Q(from_groups__name__icontains=source) | Q(from_groups__acronym__iexact=source))
destination = self.cleaned_data.get('destination')
if destination:
destination_list = destination.split(',')
if len(destination_list) > 1:
results = results.filter(Q(to_groups__acronym__in=destination_list))
else:
results = results.filter(Q(to_groups__name__icontains=destination) | Q(to_groups__acronym__iexact=destination))
start_date = self.cleaned_data.get('start_date')
end_date = self.cleaned_data.get('end_date')
events = None
if start_date:
events = LiaisonStatementEvent.objects.filter(type='posted', time__gte=start_date)
if end_date:
events = events.filter(time__lte=end_date)
elif end_date:
events = LiaisonStatementEvent.objects.filter(type='posted', time__lte=end_date)
if events:
results = results.filter(liaisonstatementevent__in=events)
results = results.distinct().order_by('title')
return results
class CustomModelMultipleChoiceField(forms.ModelMultipleChoiceField):
def prepare_value(self, value):
if isinstance(value, QuerySet):
return value
if (hasattr(value, '__iter__') and
not isinstance(value, str) and
not hasattr(value, '_meta')):
return [super(CustomModelMultipleChoiceField, self).prepare_value(v) for v in value]
return super(CustomModelMultipleChoiceField, self).prepare_value(value)
class LiaisonModelForm(BetterModelForm):
from_groups = forms.ModelMultipleChoiceField(queryset=Group.objects.all(),label='Groups',required=False)
from_contact = forms.EmailField() # type: Union[forms.EmailField, SearchableEmailField]
to_contacts = forms.CharField(label="Contacts", widget=forms.Textarea(attrs={'rows':'3', }), strip=False)
to_groups = forms.ModelMultipleChoiceField(queryset=Group.objects,label='Groups',required=False)
deadline = DatepickerDateField(date_format="yyyy-mm-dd", picker_settings={"autoclose": "1" }, label='Deadline', required=True)
related_to = SearchableLiaisonStatementsField(label='Related Liaison Statement', required=False)
submitted_date = DatepickerDateField(date_format="yyyy-mm-dd", picker_settings={"autoclose": "1" }, label='Submission date', required=True, initial=datetime.date.today())
attachments = CustomModelMultipleChoiceField(queryset=Document.objects,label='Attachments', widget=ShowAttachmentsWidget, required=False)
attach_title = forms.CharField(label='Title', required=False)
attach_file = forms.FileField(label='File', required=False)
attach_button = forms.CharField(label='',
widget=ButtonWidget(label='Attach', show_on='id_attachments',
require=['id_attach_title', 'id_attach_file'],
required_label='title and file'),
required=False)
class Meta:
model = LiaisonStatement
exclude = ('attachments','state','from_name','to_name')
fieldsets = [('From', {'fields': ['from_groups','from_contact', 'response_contacts'], 'legend': ''}),
('To', {'fields': ['to_groups','to_contacts'], 'legend': ''}),
('Other email addresses', {'fields': ['technical_contacts','action_holder_contacts','cc_contacts'], 'legend': ''}),
('Purpose', {'fields':['purpose', 'deadline'], 'legend': ''}),
('Reference', {'fields': ['other_identifiers','related_to'], 'legend': ''}),
('Liaison Statement', {'fields': ['title', 'submitted_date', 'body', 'attachments'], 'legend': ''}),
('Add attachment', {'fields': ['attach_title', 'attach_file', 'attach_button'], 'legend': ''})]
def __init__(self, user, *args, **kwargs):
super(LiaisonModelForm, self).__init__(*args, **kwargs)
self.user = user
self.edit = False
self.person = get_person_for_user(user)
self.is_new = not self.instance.pk
self.fields["from_groups"].widget.attrs["placeholder"] = "Type in name to search for group"
self.fields["to_groups"].widget.attrs["placeholder"] = "Type in name to search for group"
self.fields["to_contacts"].label = 'Contacts'
self.fields["other_identifiers"].widget.attrs["rows"] = 2
# add email validators
for field in ['from_contact','to_contacts','technical_contacts','action_holder_contacts','cc_contacts']:
if field in self.fields:
self.fields[field].validators.append(validate_emails)
self.set_from_fields()
self.set_to_fields()
def clean_from_groups(self):
from_groups = self.cleaned_data.get('from_groups')
if not from_groups:
raise forms.ValidationError('You must specify a From Group')
return from_groups
def clean_to_groups(self):
to_groups = self.cleaned_data.get('to_groups')
if not to_groups:
raise forms.ValidationError('You must specify a To Group')
return to_groups
def clean_from_contact(self):
contact = self.cleaned_data.get('from_contact')
from_groups = self.cleaned_data.get('from_groups')
try:
email = Email.objects.get(address=contact)
if not email.origin:
email.origin = "liaison: %s" % (','.join([ g.acronym for g in from_groups.all() ]))
email.save()
except ObjectDoesNotExist:
raise forms.ValidationError('Email address does not exist')
return email
# Note to future person: This is the wrong place to fix the new lines
# in cc_contacts and to_contacts. Those belong in the save function.
# Or at least somewhere other than here.
def clean_cc_contacts(self):
cc_contacts = self.cleaned_data.get('cc_contacts')
cc_contacts = cc_contacts.replace('\r\n',',')
cc_contacts = cc_contacts.rstrip(',')
return cc_contacts
## to_contacts can also have new lines
def clean_to_contacts(self):
to_contacts = self.cleaned_data.get('to_contacts')
to_contacts = to_contacts.replace('\r\n',',')
to_contacts = to_contacts.rstrip(',')
return to_contacts
def clean(self):
if not self.cleaned_data.get('body', None) and not self.has_attachments():
self._errors['body'] = ErrorList(['You must provide a body or attachment files'])
self._errors['attachments'] = ErrorList(['You must provide a body or attachment files'])
# if purpose=response there must be a related statement
purpose = LiaisonStatementPurposeName.objects.get(slug='response')
if self.cleaned_data.get('purpose') == purpose and not self.cleaned_data.get('related_to'):
self._errors['related_to'] = ErrorList(['You must provide a related statement when purpose is In Response'])
return self.cleaned_data
def full_clean(self):
self.set_required_fields()
super(LiaisonModelForm, self).full_clean()
self.reset_required_fields()
def has_attachments(self):
for key in list(self.files.keys()):
if key.startswith('attach_file_') and key.replace('file', 'title') in list(self.data.keys()):
return True
return False
def is_approved(self):
assert NotImplemented
def save(self, *args, **kwargs):
super(LiaisonModelForm, self).save(*args,**kwargs)
# set state for new statements
if self.is_new:
self.instance.change_state(state_id='pending',person=self.person)
if self.is_approved():
self.instance.change_state(state_id='posted',person=self.person)
else:
# create modified event
LiaisonStatementEvent.objects.create(
type_id='modified',
by=self.person,
statement=self.instance,
desc='Statement Modified'
)
self.save_related_liaisons()
self.save_attachments()
self.save_tags()
return self.instance
def save_attachments(self):
written = self.instance.attachments.all().count()
for key in list(self.files.keys()):
title_key = key.replace('file', 'title')
attachment_title = self.data.get(title_key)
if not key.startswith('attach_file_') or not title_key in list(self.data.keys()):
continue
attached_file = self.files.get(key)
extension=attached_file.name.rsplit('.', 1)
if len(extension) > 1:
extension = '.' + extension[1]
else:
extension = ''
written += 1
name = self.instance.name() + ("-attachment-%s" % written)
attach, created = Document.objects.get_or_create(
name = name,
defaults=dict(
title = attachment_title,
type_id = "liai-att",
uploaded_filename = name + extension,
)
)
if created:
DocAlias.objects.create(name=attach.name).docs.add(attach)
LiaisonStatementAttachment.objects.create(statement=self.instance,document=attach)
attach_file = io.open(os.path.join(settings.LIAISON_ATTACH_PATH, attach.name + extension), 'wb')
attach_file.write(attached_file.read())
attach_file.close()
if not self.is_new:
# create modified event
LiaisonStatementEvent.objects.create(
type_id='modified',
by=self.person,
statement=self.instance,
desc='Added attachment: {}'.format(attachment_title)
)
def save_related_liaisons(self):
rel = DocRelationshipName.objects.get(slug='refold')
new_related = self.cleaned_data.get('related_to', [])
# add new ones
for stmt in new_related:
self.instance.source_of_set.get_or_create(target=stmt,relationship=rel)
# delete removed ones
for related in self.instance.source_of_set.all():
if related.target not in new_related:
related.delete()
def save_tags(self):
if self.instance.deadline and not self.instance.tags.filter(slug='taken'):
self.instance.tags.add('required')
def set_from_fields(self):
assert NotImplemented
def set_required_fields(self):
purpose = self.data.get('purpose', None)
if purpose in ['action', 'comment']:
self.fields['deadline'].required = True
else:
self.fields['deadline'].required = False
def reset_required_fields(self):
self.fields['deadline'].required = True
def set_to_fields(self):
assert NotImplemented
class IncomingLiaisonForm(LiaisonModelForm):
def clean(self):
if 'send' in list(self.data.keys()) and self.get_post_only():
raise forms.ValidationError('As an IETF Liaison Manager you can not send incoming liaison statements, you only can post them')
return super(IncomingLiaisonForm, self).clean()
def is_approved(self):
return True
def get_post_only(self):
from_groups = self.cleaned_data.get('from_groups')
if has_role(self.user, "Secretariat") or is_authorized_individual(self.user,from_groups):
return False
return True
def set_from_fields(self):
if has_role(self.user, "Secretariat"):
queryset = Group.objects.filter(type="sdo", state="active").order_by('name')
else:
queryset = Group.objects.filter(type="sdo", state="active", role__person=self.person, role__name__in=("liaiman", "auth")).distinct().order_by('name')
self.fields['from_contact'].initial = self.person.role_set.filter(group=queryset[0]).first().email.address
self.fields['from_contact'].widget.attrs['readonly'] = True
self.fields['from_groups'].queryset = queryset
self.fields['from_groups'].widget.submitter = str(self.person)
# if there's only one possibility make it the default
if len(queryset) == 1:
self.fields['from_groups'].initial = queryset
def set_to_fields(self):
self.fields['to_groups'].choices = get_internal_choices(None)
class OutgoingLiaisonForm(LiaisonModelForm):
from_contact = SearchableEmailField(only_users=True)
approved = forms.BooleanField(label="Obtained prior approval", required=False)
class Meta:
model = LiaisonStatement
exclude = ('attachments','state','from_name','to_name','action_holder_contacts')
fieldsets = [('From', {'fields': ['from_groups','from_contact','response_contacts','approved'], 'legend': ''}),
('To', {'fields': ['to_groups','to_contacts'], 'legend': ''}),
('Other email addresses', {'fields': ['technical_contacts','cc_contacts'], 'legend': ''}),
('Purpose', {'fields':['purpose', 'deadline'], 'legend': ''}),
('Reference', {'fields': ['other_identifiers','related_to'], 'legend': ''}),
('Liaison Statement', {'fields': ['title', 'submitted_date', 'body', 'attachments'], 'legend': ''}),
('Add attachment', {'fields': ['attach_title', 'attach_file', 'attach_button'], 'legend': ''})]
def is_approved(self):
return self.cleaned_data['approved']
def set_from_fields(self):
choices = get_internal_choices(self.user)
self.fields['from_groups'].choices = choices
flat_choices = flatten_choices(choices)
if len(flat_choices) == 1:
self.fields['from_groups'].initial = [flat_choices[0][0]]
if has_role(self.user, "Secretariat"):
return
if self.person.role_set.filter(name='liaiman',group__state='active'):
email = self.person.role_set.filter(name='liaiman',group__state='active').first().email.address
elif self.person.role_set.filter(name__in=('ad','chair'),group__state='active'):
email = self.person.role_set.filter(name__in=('ad','chair'),group__state='active').first().email.address
else:
email = self.person.email_address()
self.fields['from_contact'].initial = email
self.fields['from_contact'].widget.attrs['readonly'] = True
def set_to_fields(self):
if has_role(self.user, "Liaison Manager") and not self.person.role_set.filter(name__in=('ad','chair'),group__state='active'):
queryset = Group.objects.filter(type="sdo", state="active", role__person=self.person, role__name="liaiman").distinct().order_by('name')
else:
queryset = Group.objects.filter(type="sdo", state="active").order_by('name')
self.fields['to_groups'].queryset = queryset
if has_role(self.user, "Liaison Manager"):
self.fields['to_groups'].initial = [queryset.first()]
class EditLiaisonForm(LiaisonModelForm):
def __init__(self, *args, **kwargs):
super(EditLiaisonForm, self).__init__(*args, **kwargs)
self.edit = True
self.fields['attachments'].initial = self.instance.liaisonstatementattachment_set.exclude(removed=True)
related = [ str(x.pk) for x in self.instance.source_of_set.all() ]
self.fields['related_to'].initial = ','.join(related)
self.fields['submitted_date'].initial = self.instance.submitted
def save(self, *args, **kwargs):
super(EditLiaisonForm, self).save(*args,**kwargs)
if self.has_changed() and 'submitted_date' in self.changed_data:
event = self.instance.liaisonstatementevent_set.filter(type='submitted').first()
event.time = self.cleaned_data.get('submitted_date')
event.save()
return self.instance
def set_from_fields(self):
if self.instance.is_outgoing():
self.fields['from_groups'].choices = get_internal_choices(self.user)
else:
if has_role(self.user, "Secretariat"):
queryset = Group.objects.filter(type="sdo").order_by('name')
else:
queryset = Group.objects.filter(type="sdo", role__person=self.person, role__name__in=("liaiman", "auth")).distinct().order_by('name')
self.fields['from_contact'].widget.attrs['readonly'] = True
self.fields['from_groups'].queryset = queryset
def set_to_fields(self):
if self.instance.is_outgoing():
if has_role(self.user, "Liaison Manager") and not self.person.role_set.filter(name__in=('ad','chair'),group__state='active'):
queryset = Group.objects.filter(type="sdo", role__person=self.person, role__name="liaiman").distinct().order_by('name')
else:
queryset = Group.objects.filter(type="sdo").order_by('name')
self.fields['to_groups'].queryset = queryset
else:
self.fields['to_groups'].choices = get_internal_choices(None)
class EditAttachmentForm(forms.Form):
title = forms.CharField(max_length=255)
| true | true |
f72bedbab95f862cc621615212d09c74118e2e36 | 18,978 | py | Python | benchexec/tools/ultimate.py | mikhailramalho/benchexec | 5fc5180c26e0fa18e137b142c5890a3dd7cab795 | [
"Apache-2.0"
] | null | null | null | benchexec/tools/ultimate.py | mikhailramalho/benchexec | 5fc5180c26e0fa18e137b142c5890a3dd7cab795 | [
"Apache-2.0"
] | null | null | null | benchexec/tools/ultimate.py | mikhailramalho/benchexec | 5fc5180c26e0fa18e137b142c5890a3dd7cab795 | [
"Apache-2.0"
] | null | null | null | """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2015 Daniel Dietsch
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import functools
import logging
import os
import re
import subprocess
import sys
import benchexec.result as result
import benchexec.tools.template
import benchexec.util as util
from benchexec import BenchExecException
from benchexec.model import MEMLIMIT
from benchexec.tools.template import UnsupportedFeatureException
_OPTION_NO_WRAPPER = "--force-no-wrapper"
_SVCOMP17_VERSIONS = {"f7c3ed31"}
_SVCOMP17_FORBIDDEN_FLAGS = {"--full-output", "--architecture"}
_ULTIMATE_VERSION_REGEX = re.compile(r"^Version is (.*)$", re.MULTILINE)
# .jar files that are used as launcher arguments with most recent .jar first
_LAUNCHER_JARS = ["plugins/org.eclipse.equinox.launcher_1.3.100.v20150511-1540.jar"]
class UltimateTool(benchexec.tools.template.BaseTool):
"""
Abstract tool info for Ultimate-based tools.
"""
REQUIRED_PATHS = [
"artifacts.xml",
"config",
"configuration",
"cvc4",
"cvc4nyu",
"cvc4-LICENSE",
"features",
"LICENSE",
"LICENSE.GPL",
"LICENSE.GPL.LESSER",
"mathsat",
"mathsat-LICENSE",
"p2",
"plugins",
"README",
"Ultimate",
"Ultimate.ini",
"Ultimate.py",
"z3",
"z3-LICENSE",
]
REQUIRED_PATHS_SVCOMP17 = []
def __init__(self):
self._uses_propertyfile = False
@functools.lru_cache()
def executable(self):
exe = util.find_executable("Ultimate.py")
for (dirpath, dirnames, filenames) in os.walk(exe):
if "Ultimate" in filenames and "plugins" in dirnames:
return exe
break
# possibly another Ultimate.py was found, check in the current dir
current = os.getcwd()
for (dirpath, dirnames, filenames) in os.walk(current):
if (
"Ultimate" in filenames
and "Ultimate.py" in filenames
and "plugins" in dirnames
):
return "./Ultimate.py"
break
sys.exit(
"ERROR: Could not find Ultimate executable in '{0}' or '{1}'".format(
str(exe), str(current)
)
)
def _ultimate_version(self, executable):
data_dir = os.path.join(os.path.dirname(executable), "data")
launcher_jar = self._get_current_launcher_jar(executable)
cmds = [
# 2
[
self.get_java(),
"-Xss4m",
"-jar",
launcher_jar,
"-data",
"@noDefault",
"-ultimatedata",
data_dir,
"--version",
],
# 1
[
self.get_java(),
"-Xss4m",
"-jar",
launcher_jar,
"-data",
data_dir,
"--version",
],
]
self.api = len(cmds)
for cmd in cmds:
version = self._query_ultimate_version(cmd, self.api)
if version != "":
return version
self.api = self.api - 1
raise BenchExecException("Could not determine Ultimate version")
def _query_ultimate_version(self, cmd, api):
try:
process = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
(stdout, stderr) = process.communicate()
except OSError as e:
logging.warning(
"Cannot run Java to determine Ultimate version (API %s): %s",
api,
e.strerror,
)
return ""
stdout = util.decode_to_string(stdout).strip()
if stderr or process.returncode:
logging.warning(
"Cannot determine Ultimate version (API %s).\n"
"Command was: %s\n"
"Exit code: %s\n"
"Error output: %s\n"
"Standard output: %s",
api,
" ".join(map(util.escape_string_shell, cmd)),
process.returncode,
util.decode_to_string(stderr),
stdout,
)
return ""
version_ultimate_match = _ULTIMATE_VERSION_REGEX.search(stdout)
if not version_ultimate_match:
logging.warning(
"Cannot determine Ultimate version (API %s), output was: %s",
api,
stdout,
)
return ""
return version_ultimate_match.group(1)
@functools.lru_cache()
def _get_current_launcher_jar(self, executable):
ultimatedir = os.path.dirname(executable)
for jar in _LAUNCHER_JARS:
launcher_jar = os.path.join(ultimatedir, jar)
if os.path.isfile(launcher_jar):
return launcher_jar
raise FileNotFoundError(
"No suitable launcher jar found in {0}".format(ultimatedir)
)
@functools.lru_cache()
def version(self, executable):
wrapper_version = self._version_from_tool(executable)
if wrapper_version in _SVCOMP17_VERSIONS:
# Keep reported version number for old versions as they were before
return wrapper_version
ultimate_version = self._ultimate_version(executable)
return ultimate_version + "-" + wrapper_version
@functools.lru_cache()
def _is_svcomp17_version(self, executable):
return self.version(executable) in _SVCOMP17_VERSIONS
@functools.lru_cache()
def _requires_ultimate_data(self, executable):
if self._is_svcomp17_version(executable):
return False
version = self.version(executable)
ult, wrapper = version.split("-")
major, minor, patch = ult.split(".")
# all versions before 0.1.24 do not require ultimatedata
return not (int(major) == 0 and int(minor) < 2 and int(patch) < 24)
def cmdline(self, executable, options, tasks, propertyfile=None, rlimits=None):
if rlimits is None:
rlimits = {}
self._uses_propertyfile = propertyfile is not None
if _OPTION_NO_WRAPPER in options:
# do not use old wrapper script even if property file is given
self._uses_propertyfile = False
propertyfile = None
options.remove(_OPTION_NO_WRAPPER)
if self._is_svcomp17_version(executable):
assert propertyfile
cmdline = [executable, propertyfile]
cmdline += [
option for option in options if option not in _SVCOMP17_FORBIDDEN_FLAGS
]
cmdline.append("--full-output")
cmdline += tasks
self.__assert_cmdline(
cmdline,
"cmdline contains empty or None argument when using SVCOMP17 mode: ",
)
return cmdline
if self._uses_propertyfile:
# use the old wrapper script if a property file is given
cmdline = [executable, "--spec", propertyfile]
if tasks:
cmdline += ["--file"] + tasks
cmdline += options
self.__assert_cmdline(
cmdline,
"cmdline contains empty or None argument when using default SVCOMP mode: ",
)
return cmdline
# if no property file is given and toolchain (-tc) is, use ultimate directly
if "-tc" in options or "--toolchain" in options:
# ignore executable (old executable is just around for backwards compatibility)
mem_bytes = rlimits.get(MEMLIMIT, None)
cmdline = [self.get_java()]
# -ea has to be given directly to java
if "-ea" in options:
options = [e for e in options if e != "-ea"]
cmdline += ["-ea"]
if mem_bytes:
cmdline += ["-Xmx" + str(mem_bytes)]
cmdline += ["-Xss4m"]
cmdline += ["-jar", self._get_current_launcher_jar(executable)]
if self._requires_ultimate_data(executable):
if "-ultimatedata" not in options and "-data" not in options:
if self.api == 2:
cmdline += [
"-data",
"@noDefault",
"-ultimatedata",
os.path.join(os.path.dirname(executable), "data"),
]
if self.api == 1:
raise ValueError(
"Illegal option -ultimatedata for API {} and Ultimate version {}".format(
self.api, self.version(executable)
)
)
elif "-ultimatedata" in options and "-data" not in options:
if self.api == 2:
cmdline += ["-data", "@noDefault"]
if self.api == 1:
raise ValueError(
"Illegal option -ultimatedata for API {} and Ultimate version {}".format(
self.api, self.version(executable)
)
)
else:
if "-data" not in options:
if self.api == 2 or self.api == 1:
cmdline += [
"-data",
os.path.join(os.path.dirname(executable), "data"),
]
cmdline += options
if tasks:
cmdline += ["-i"] + tasks
self.__assert_cmdline(
cmdline,
"cmdline contains empty or None argument when using Ultimate raw mode: ",
)
return cmdline
# there is no way to run ultimate; not enough parameters
raise UnsupportedFeatureException(
"Unsupported argument combination: options={} propertyfile={} rlimits={}".format(
options, propertyfile, rlimits
)
)
def __assert_cmdline(self, cmdline, msg):
assert all(cmdline), msg + str(cmdline)
pass
def program_files(self, executable):
paths = (
self.REQUIRED_PATHS_SVCOMP17
if self._is_svcomp17_version(executable)
else self.REQUIRED_PATHS
)
return [executable] + self._program_files_from_executable(executable, paths)
def determine_result(self, returncode, returnsignal, output, is_timeout):
if self._uses_propertyfile:
return self._determine_result_with_propertyfile(
returncode, returnsignal, output, is_timeout
)
return self._determine_result_without_propertyfile(
returncode, returnsignal, output, is_timeout
)
def _determine_result_without_propertyfile(
self, returncode, returnsignal, output, is_timeout
):
# special strings in ultimate output
treeautomizer_sat = "TreeAutomizerSatResult"
treeautomizer_unsat = "TreeAutomizerUnsatResult"
unsupported_syntax_errorstring = "ShortDescription: Unsupported Syntax"
incorrect_syntax_errorstring = "ShortDescription: Incorrect Syntax"
type_errorstring = "Type Error"
witness_errorstring = "InvalidWitnessErrorResult"
exception_errorstring = "ExceptionOrErrorResult"
safety_string = "Ultimate proved your program to be correct"
all_spec_string = "AllSpecificationsHoldResult"
unsafety_string = "Ultimate proved your program to be incorrect"
mem_deref_false_string = "pointer dereference may fail"
mem_deref_false_string_2 = "array index can be out of bounds"
mem_free_false_string = "free of unallocated memory possible"
mem_memtrack_false_string = "not all allocated memory was freed"
termination_false_string = (
"Found a nonterminating execution for the following "
"lasso shaped sequence of statements"
)
termination_true_string = "TerminationAnalysisResult: Termination proven"
ltl_false_string = "execution that violates the LTL property"
ltl_true_string = "Buchi Automizer proved that the LTL property"
overflow_false_string = "overflow possible"
for line in output:
if line.find(unsupported_syntax_errorstring) != -1:
return "ERROR: UNSUPPORTED SYNTAX"
if line.find(incorrect_syntax_errorstring) != -1:
return "ERROR: INCORRECT SYNTAX"
if line.find(type_errorstring) != -1:
return "ERROR: TYPE ERROR"
if line.find(witness_errorstring) != -1:
return "ERROR: INVALID WITNESS FILE"
if line.find(exception_errorstring) != -1:
return "ERROR: EXCEPTION"
if self._contains_overapproximation_result(line):
return "UNKNOWN: OverapproxCex"
if line.find(termination_false_string) != -1:
return result.RESULT_FALSE_TERMINATION
if line.find(termination_true_string) != -1:
return result.RESULT_TRUE_PROP
if line.find(ltl_false_string) != -1:
return "FALSE(valid-ltl)"
if line.find(ltl_true_string) != -1:
return result.RESULT_TRUE_PROP
if line.find(unsafety_string) != -1:
return result.RESULT_FALSE_REACH
if line.find(mem_deref_false_string) != -1:
return result.RESULT_FALSE_DEREF
if line.find(mem_deref_false_string_2) != -1:
return result.RESULT_FALSE_DEREF
if line.find(mem_free_false_string) != -1:
return result.RESULT_FALSE_FREE
if line.find(mem_memtrack_false_string) != -1:
return result.RESULT_FALSE_MEMTRACK
if line.find(overflow_false_string) != -1:
return result.RESULT_FALSE_OVERFLOW
if line.find(safety_string) != -1 or line.find(all_spec_string) != -1:
return result.RESULT_TRUE_PROP
if line.find(treeautomizer_unsat) != -1:
return "unsat"
if line.find(treeautomizer_sat) != -1 or line.find(all_spec_string) != -1:
return "sat"
return result.RESULT_UNKNOWN
def _contains_overapproximation_result(self, line):
triggers = [
"Reason: overapproximation of",
"Reason: overapproximation of bitwiseAnd",
"Reason: overapproximation of bitwiseOr",
"Reason: overapproximation of bitwiseXor",
"Reason: overapproximation of shiftLeft",
"Reason: overapproximation of shiftRight",
"Reason: overapproximation of bitwiseComplement",
]
for trigger in triggers:
if line.find(trigger) != -1:
return True
return False
def _determine_result_with_propertyfile(
self, returncode, returnsignal, output, is_timeout
):
for line in output:
if line.startswith("FALSE(valid-free)"):
return result.RESULT_FALSE_FREE
elif line.startswith("FALSE(valid-deref)"):
return result.RESULT_FALSE_DEREF
elif line.startswith("FALSE(valid-memtrack)"):
return result.RESULT_FALSE_MEMTRACK
elif line.startswith("FALSE(valid-memcleanup)"):
return result.RESULT_FALSE_MEMCLEANUP
elif line.startswith("FALSE(TERM)"):
return result.RESULT_FALSE_TERMINATION
elif line.startswith("FALSE(OVERFLOW)"):
return result.RESULT_FALSE_OVERFLOW
elif line.startswith("FALSE"):
return result.RESULT_FALSE_REACH
elif line.startswith("TRUE"):
return result.RESULT_TRUE_PROP
elif line.startswith("UNKNOWN"):
return result.RESULT_UNKNOWN
elif line.startswith("ERROR"):
status = result.RESULT_ERROR
if line.startswith("ERROR: INVALID WITNESS FILE"):
status += " (invalid witness file)"
return status
return result.RESULT_UNKNOWN
def get_value_from_output(self, lines, identifier):
# search for the text in output and get its value,
# stop after the first line, that contains the searched text
for line in lines:
if identifier in line:
start_position = line.find("=") + 1
return line[start_position:].strip()
return None
@functools.lru_cache(maxsize=1)
def get_java(self):
candidates = [
"java",
"/usr/bin/java",
"/opt/oracle-jdk-bin-1.8.0.202/bin/java",
"/usr/lib/jvm/java-8-openjdk-amd64/bin/java",
]
for c in candidates:
candidate = self.which(c)
if not candidate:
continue
try:
process = subprocess.Popen(
[candidate, "-version"],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
(stdout, stderr) = process.communicate()
except OSError as e:
continue
stdout = util.decode_to_string(stdout).strip()
if not stdout:
continue
if "1.8" in stdout:
return candidate
raise BenchExecException(
"Could not find a suitable Java version: Need Java 1.8"
)
def which(self, program):
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
| 37.431953 | 101 | 0.566077 |
import functools
import logging
import os
import re
import subprocess
import sys
import benchexec.result as result
import benchexec.tools.template
import benchexec.util as util
from benchexec import BenchExecException
from benchexec.model import MEMLIMIT
from benchexec.tools.template import UnsupportedFeatureException
_OPTION_NO_WRAPPER = "--force-no-wrapper"
_SVCOMP17_VERSIONS = {"f7c3ed31"}
_SVCOMP17_FORBIDDEN_FLAGS = {"--full-output", "--architecture"}
_ULTIMATE_VERSION_REGEX = re.compile(r"^Version is (.*)$", re.MULTILINE)
_LAUNCHER_JARS = ["plugins/org.eclipse.equinox.launcher_1.3.100.v20150511-1540.jar"]
class UltimateTool(benchexec.tools.template.BaseTool):
REQUIRED_PATHS = [
"artifacts.xml",
"config",
"configuration",
"cvc4",
"cvc4nyu",
"cvc4-LICENSE",
"features",
"LICENSE",
"LICENSE.GPL",
"LICENSE.GPL.LESSER",
"mathsat",
"mathsat-LICENSE",
"p2",
"plugins",
"README",
"Ultimate",
"Ultimate.ini",
"Ultimate.py",
"z3",
"z3-LICENSE",
]
REQUIRED_PATHS_SVCOMP17 = []
def __init__(self):
self._uses_propertyfile = False
@functools.lru_cache()
def executable(self):
exe = util.find_executable("Ultimate.py")
for (dirpath, dirnames, filenames) in os.walk(exe):
if "Ultimate" in filenames and "plugins" in dirnames:
return exe
break
current = os.getcwd()
for (dirpath, dirnames, filenames) in os.walk(current):
if (
"Ultimate" in filenames
and "Ultimate.py" in filenames
and "plugins" in dirnames
):
return "./Ultimate.py"
break
sys.exit(
"ERROR: Could not find Ultimate executable in '{0}' or '{1}'".format(
str(exe), str(current)
)
)
def _ultimate_version(self, executable):
data_dir = os.path.join(os.path.dirname(executable), "data")
launcher_jar = self._get_current_launcher_jar(executable)
cmds = [
[
self.get_java(),
"-Xss4m",
"-jar",
launcher_jar,
"-data",
"@noDefault",
"-ultimatedata",
data_dir,
"--version",
],
[
self.get_java(),
"-Xss4m",
"-jar",
launcher_jar,
"-data",
data_dir,
"--version",
],
]
self.api = len(cmds)
for cmd in cmds:
version = self._query_ultimate_version(cmd, self.api)
if version != "":
return version
self.api = self.api - 1
raise BenchExecException("Could not determine Ultimate version")
def _query_ultimate_version(self, cmd, api):
try:
process = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
(stdout, stderr) = process.communicate()
except OSError as e:
logging.warning(
"Cannot run Java to determine Ultimate version (API %s): %s",
api,
e.strerror,
)
return ""
stdout = util.decode_to_string(stdout).strip()
if stderr or process.returncode:
logging.warning(
"Cannot determine Ultimate version (API %s).\n"
"Command was: %s\n"
"Exit code: %s\n"
"Error output: %s\n"
"Standard output: %s",
api,
" ".join(map(util.escape_string_shell, cmd)),
process.returncode,
util.decode_to_string(stderr),
stdout,
)
return ""
version_ultimate_match = _ULTIMATE_VERSION_REGEX.search(stdout)
if not version_ultimate_match:
logging.warning(
"Cannot determine Ultimate version (API %s), output was: %s",
api,
stdout,
)
return ""
return version_ultimate_match.group(1)
@functools.lru_cache()
def _get_current_launcher_jar(self, executable):
ultimatedir = os.path.dirname(executable)
for jar in _LAUNCHER_JARS:
launcher_jar = os.path.join(ultimatedir, jar)
if os.path.isfile(launcher_jar):
return launcher_jar
raise FileNotFoundError(
"No suitable launcher jar found in {0}".format(ultimatedir)
)
@functools.lru_cache()
def version(self, executable):
wrapper_version = self._version_from_tool(executable)
if wrapper_version in _SVCOMP17_VERSIONS:
return wrapper_version
ultimate_version = self._ultimate_version(executable)
return ultimate_version + "-" + wrapper_version
@functools.lru_cache()
def _is_svcomp17_version(self, executable):
return self.version(executable) in _SVCOMP17_VERSIONS
@functools.lru_cache()
def _requires_ultimate_data(self, executable):
if self._is_svcomp17_version(executable):
return False
version = self.version(executable)
ult, wrapper = version.split("-")
major, minor, patch = ult.split(".")
return not (int(major) == 0 and int(minor) < 2 and int(patch) < 24)
def cmdline(self, executable, options, tasks, propertyfile=None, rlimits=None):
if rlimits is None:
rlimits = {}
self._uses_propertyfile = propertyfile is not None
if _OPTION_NO_WRAPPER in options:
self._uses_propertyfile = False
propertyfile = None
options.remove(_OPTION_NO_WRAPPER)
if self._is_svcomp17_version(executable):
assert propertyfile
cmdline = [executable, propertyfile]
cmdline += [
option for option in options if option not in _SVCOMP17_FORBIDDEN_FLAGS
]
cmdline.append("--full-output")
cmdline += tasks
self.__assert_cmdline(
cmdline,
"cmdline contains empty or None argument when using SVCOMP17 mode: ",
)
return cmdline
if self._uses_propertyfile:
cmdline = [executable, "--spec", propertyfile]
if tasks:
cmdline += ["--file"] + tasks
cmdline += options
self.__assert_cmdline(
cmdline,
"cmdline contains empty or None argument when using default SVCOMP mode: ",
)
return cmdline
if "-tc" in options or "--toolchain" in options:
mem_bytes = rlimits.get(MEMLIMIT, None)
cmdline = [self.get_java()]
if "-ea" in options:
options = [e for e in options if e != "-ea"]
cmdline += ["-ea"]
if mem_bytes:
cmdline += ["-Xmx" + str(mem_bytes)]
cmdline += ["-Xss4m"]
cmdline += ["-jar", self._get_current_launcher_jar(executable)]
if self._requires_ultimate_data(executable):
if "-ultimatedata" not in options and "-data" not in options:
if self.api == 2:
cmdline += [
"-data",
"@noDefault",
"-ultimatedata",
os.path.join(os.path.dirname(executable), "data"),
]
if self.api == 1:
raise ValueError(
"Illegal option -ultimatedata for API {} and Ultimate version {}".format(
self.api, self.version(executable)
)
)
elif "-ultimatedata" in options and "-data" not in options:
if self.api == 2:
cmdline += ["-data", "@noDefault"]
if self.api == 1:
raise ValueError(
"Illegal option -ultimatedata for API {} and Ultimate version {}".format(
self.api, self.version(executable)
)
)
else:
if "-data" not in options:
if self.api == 2 or self.api == 1:
cmdline += [
"-data",
os.path.join(os.path.dirname(executable), "data"),
]
cmdline += options
if tasks:
cmdline += ["-i"] + tasks
self.__assert_cmdline(
cmdline,
"cmdline contains empty or None argument when using Ultimate raw mode: ",
)
return cmdline
raise UnsupportedFeatureException(
"Unsupported argument combination: options={} propertyfile={} rlimits={}".format(
options, propertyfile, rlimits
)
)
def __assert_cmdline(self, cmdline, msg):
assert all(cmdline), msg + str(cmdline)
pass
def program_files(self, executable):
paths = (
self.REQUIRED_PATHS_SVCOMP17
if self._is_svcomp17_version(executable)
else self.REQUIRED_PATHS
)
return [executable] + self._program_files_from_executable(executable, paths)
def determine_result(self, returncode, returnsignal, output, is_timeout):
if self._uses_propertyfile:
return self._determine_result_with_propertyfile(
returncode, returnsignal, output, is_timeout
)
return self._determine_result_without_propertyfile(
returncode, returnsignal, output, is_timeout
)
def _determine_result_without_propertyfile(
self, returncode, returnsignal, output, is_timeout
):
treeautomizer_sat = "TreeAutomizerSatResult"
treeautomizer_unsat = "TreeAutomizerUnsatResult"
unsupported_syntax_errorstring = "ShortDescription: Unsupported Syntax"
incorrect_syntax_errorstring = "ShortDescription: Incorrect Syntax"
type_errorstring = "Type Error"
witness_errorstring = "InvalidWitnessErrorResult"
exception_errorstring = "ExceptionOrErrorResult"
safety_string = "Ultimate proved your program to be correct"
all_spec_string = "AllSpecificationsHoldResult"
unsafety_string = "Ultimate proved your program to be incorrect"
mem_deref_false_string = "pointer dereference may fail"
mem_deref_false_string_2 = "array index can be out of bounds"
mem_free_false_string = "free of unallocated memory possible"
mem_memtrack_false_string = "not all allocated memory was freed"
termination_false_string = (
"Found a nonterminating execution for the following "
"lasso shaped sequence of statements"
)
termination_true_string = "TerminationAnalysisResult: Termination proven"
ltl_false_string = "execution that violates the LTL property"
ltl_true_string = "Buchi Automizer proved that the LTL property"
overflow_false_string = "overflow possible"
for line in output:
if line.find(unsupported_syntax_errorstring) != -1:
return "ERROR: UNSUPPORTED SYNTAX"
if line.find(incorrect_syntax_errorstring) != -1:
return "ERROR: INCORRECT SYNTAX"
if line.find(type_errorstring) != -1:
return "ERROR: TYPE ERROR"
if line.find(witness_errorstring) != -1:
return "ERROR: INVALID WITNESS FILE"
if line.find(exception_errorstring) != -1:
return "ERROR: EXCEPTION"
if self._contains_overapproximation_result(line):
return "UNKNOWN: OverapproxCex"
if line.find(termination_false_string) != -1:
return result.RESULT_FALSE_TERMINATION
if line.find(termination_true_string) != -1:
return result.RESULT_TRUE_PROP
if line.find(ltl_false_string) != -1:
return "FALSE(valid-ltl)"
if line.find(ltl_true_string) != -1:
return result.RESULT_TRUE_PROP
if line.find(unsafety_string) != -1:
return result.RESULT_FALSE_REACH
if line.find(mem_deref_false_string) != -1:
return result.RESULT_FALSE_DEREF
if line.find(mem_deref_false_string_2) != -1:
return result.RESULT_FALSE_DEREF
if line.find(mem_free_false_string) != -1:
return result.RESULT_FALSE_FREE
if line.find(mem_memtrack_false_string) != -1:
return result.RESULT_FALSE_MEMTRACK
if line.find(overflow_false_string) != -1:
return result.RESULT_FALSE_OVERFLOW
if line.find(safety_string) != -1 or line.find(all_spec_string) != -1:
return result.RESULT_TRUE_PROP
if line.find(treeautomizer_unsat) != -1:
return "unsat"
if line.find(treeautomizer_sat) != -1 or line.find(all_spec_string) != -1:
return "sat"
return result.RESULT_UNKNOWN
def _contains_overapproximation_result(self, line):
triggers = [
"Reason: overapproximation of",
"Reason: overapproximation of bitwiseAnd",
"Reason: overapproximation of bitwiseOr",
"Reason: overapproximation of bitwiseXor",
"Reason: overapproximation of shiftLeft",
"Reason: overapproximation of shiftRight",
"Reason: overapproximation of bitwiseComplement",
]
for trigger in triggers:
if line.find(trigger) != -1:
return True
return False
def _determine_result_with_propertyfile(
self, returncode, returnsignal, output, is_timeout
):
for line in output:
if line.startswith("FALSE(valid-free)"):
return result.RESULT_FALSE_FREE
elif line.startswith("FALSE(valid-deref)"):
return result.RESULT_FALSE_DEREF
elif line.startswith("FALSE(valid-memtrack)"):
return result.RESULT_FALSE_MEMTRACK
elif line.startswith("FALSE(valid-memcleanup)"):
return result.RESULT_FALSE_MEMCLEANUP
elif line.startswith("FALSE(TERM)"):
return result.RESULT_FALSE_TERMINATION
elif line.startswith("FALSE(OVERFLOW)"):
return result.RESULT_FALSE_OVERFLOW
elif line.startswith("FALSE"):
return result.RESULT_FALSE_REACH
elif line.startswith("TRUE"):
return result.RESULT_TRUE_PROP
elif line.startswith("UNKNOWN"):
return result.RESULT_UNKNOWN
elif line.startswith("ERROR"):
status = result.RESULT_ERROR
if line.startswith("ERROR: INVALID WITNESS FILE"):
status += " (invalid witness file)"
return status
return result.RESULT_UNKNOWN
def get_value_from_output(self, lines, identifier):
for line in lines:
if identifier in line:
start_position = line.find("=") + 1
return line[start_position:].strip()
return None
@functools.lru_cache(maxsize=1)
def get_java(self):
candidates = [
"java",
"/usr/bin/java",
"/opt/oracle-jdk-bin-1.8.0.202/bin/java",
"/usr/lib/jvm/java-8-openjdk-amd64/bin/java",
]
for c in candidates:
candidate = self.which(c)
if not candidate:
continue
try:
process = subprocess.Popen(
[candidate, "-version"],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
(stdout, stderr) = process.communicate()
except OSError as e:
continue
stdout = util.decode_to_string(stdout).strip()
if not stdout:
continue
if "1.8" in stdout:
return candidate
raise BenchExecException(
"Could not find a suitable Java version: Need Java 1.8"
)
def which(self, program):
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
| true | true |
f72beded7ef5a032d409d3786dee6fcc951198a6 | 18,914 | py | Python | site-packages/neutronclient/osc/v2/fwaas/firewallrule.py | hariza17/freezer_libraries | e0bd890eba5e7438976fb3b4d66c41c128bab790 | [
"PSF-2.0"
] | null | null | null | site-packages/neutronclient/osc/v2/fwaas/firewallrule.py | hariza17/freezer_libraries | e0bd890eba5e7438976fb3b4d66c41c128bab790 | [
"PSF-2.0"
] | null | null | null | site-packages/neutronclient/osc/v2/fwaas/firewallrule.py | hariza17/freezer_libraries | e0bd890eba5e7438976fb3b4d66c41c128bab790 | [
"PSF-2.0"
] | null | null | null | # Copyright 2016-2017 FUJITSU LIMITED
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import copy
import logging
from cliff import columns as cliff_columns
from osc_lib.command import command
from osc_lib import exceptions
from osc_lib import utils
from osc_lib.utils import columns as column_util
from neutronclient._i18n import _
from neutronclient.common import utils as nc_utils
from neutronclient.osc import utils as osc_utils
from neutronclient.osc.v2.fwaas import constants as const
LOG = logging.getLogger(__name__)
_attr_map = (
('id', 'ID', column_util.LIST_BOTH),
('name', 'Name', column_util.LIST_BOTH),
('enabled', 'Enabled', column_util.LIST_BOTH),
('summary', 'Summary', column_util.LIST_SHORT_ONLY),
('description', 'Description', column_util.LIST_LONG_ONLY),
('ip_version', 'IP Version', column_util.LIST_LONG_ONLY),
('action', 'Action', column_util.LIST_LONG_ONLY),
('protocol', 'Protocol', column_util.LIST_LONG_ONLY),
('source_ip_address', 'Source IP Address', column_util.LIST_LONG_ONLY),
('source_port', 'Source Port', column_util.LIST_LONG_ONLY),
('destination_ip_address', 'Destination IP Address',
column_util.LIST_LONG_ONLY),
('destination_port', 'Destination Port', column_util.LIST_LONG_ONLY),
('shared', 'Shared', column_util.LIST_LONG_ONLY),
('tenant_id', 'Project', column_util.LIST_LONG_ONLY),
('source_firewall_group_id', 'Source Firewall Group ID',
column_util.LIST_LONG_ONLY),
('destination_firewall_group_id', 'Destination Firewall Group ID',
column_util.LIST_LONG_ONLY),
)
def _get_common_parser(parser):
parser.add_argument(
'--name',
metavar='<name>',
help=_('Name of the firewall rule'))
parser.add_argument(
'--description',
metavar='<description>',
help=_('Description of the firewall rule'))
parser.add_argument(
'--protocol',
choices=['tcp', 'udp', 'icmp', 'any'],
type=nc_utils.convert_to_lowercase,
help=_('Protocol for the firewall rule'))
parser.add_argument(
'--action',
choices=['allow', 'deny', 'reject'],
type=nc_utils.convert_to_lowercase,
help=_('Action for the firewall rule'))
parser.add_argument(
'--ip-version',
metavar='<ip-version>',
choices=['4', '6'],
help=_('Set IP version 4 or 6 (default is 4)'))
src_ip_group = parser.add_mutually_exclusive_group()
src_ip_group.add_argument(
'--source-ip-address',
metavar='<source-ip-address>',
help=_('Source IP address or subnet'))
src_ip_group.add_argument(
'--no-source-ip-address',
action='store_true',
help=_('Detach source IP address'))
dst_ip_group = parser.add_mutually_exclusive_group()
dst_ip_group.add_argument(
'--destination-ip-address',
metavar='<destination-ip-address>',
help=_('Destination IP address or subnet'))
dst_ip_group.add_argument(
'--no-destination-ip-address',
action='store_true',
help=_('Detach destination IP address'))
src_port_group = parser.add_mutually_exclusive_group()
src_port_group.add_argument(
'--source-port',
metavar='<source-port>',
help=_('Source port number or range'
'(integer in [1, 65535] or range like 123:456)'))
src_port_group.add_argument(
'--no-source-port',
action='store_true',
help=_('Detach source port number or range'))
dst_port_group = parser.add_mutually_exclusive_group()
dst_port_group.add_argument(
'--destination-port',
metavar='<destination-port>',
help=_('Destination port number or range'
'(integer in [1, 65535] or range like 123:456)'))
dst_port_group.add_argument(
'--no-destination-port',
action='store_true',
help=_('Detach destination port number or range'))
shared_group = parser.add_mutually_exclusive_group()
shared_group.add_argument(
'--public',
action='store_true',
help=_('Make the firewall policy public, which allows it to be '
'used in all projects (as opposed to the default, '
'which is to restrict its use to the current project). '
'This option is deprecated and would be removed in R Release'))
shared_group.add_argument(
'--private',
action='store_true',
help=_(
'Restrict use of the firewall rule to the current project.'
'This option is deprecated and would be removed in R release.'))
shared_group.add_argument(
'--share',
action='store_true',
help=_('Share the firewall rule to be used in all projects '
'(by default, it is restricted to be used by the '
'current project).'))
shared_group.add_argument(
'--no-share',
action='store_true',
help=_('Restrict use of the firewall rule to the current project'))
enable_group = parser.add_mutually_exclusive_group()
enable_group.add_argument(
'--enable-rule',
action='store_true',
help=_('Enable this rule (default is enabled)'))
enable_group.add_argument(
'--disable-rule',
action='store_true',
help=_('Disable this rule'))
src_fwg_group = parser.add_mutually_exclusive_group()
src_fwg_group.add_argument(
'--source-firewall-group',
metavar='<source-firewall-group>',
help=_('Source firewall group (name or ID)'))
src_fwg_group.add_argument(
'--no-source-firewall-group',
action='store_true',
help=_('No associated destination firewall group'))
dst_fwg_group = parser.add_mutually_exclusive_group()
dst_fwg_group.add_argument(
'--destination-firewall-group',
metavar='<destination-firewall-group>',
help=_('Destination firewall group (name or ID)'))
dst_fwg_group.add_argument(
'--no-destination-firewall-group',
action='store_true',
help=_('No associated destination firewall group'))
return parser
def _get_common_attrs(client_manager, parsed_args, is_create=True):
attrs = {}
client = client_manager.neutronclient
if is_create:
if 'project' in parsed_args and parsed_args.project is not None:
attrs['tenant_id'] = osc_utils.find_project(
client_manager.identity,
parsed_args.project,
parsed_args.project_domain,
).id
if parsed_args.name:
attrs['name'] = str(parsed_args.name)
if parsed_args.description:
attrs['description'] = str(parsed_args.description)
if parsed_args.protocol:
protocol = parsed_args.protocol
attrs['protocol'] = None if protocol == 'any' else protocol
if parsed_args.action:
attrs['action'] = parsed_args.action
if parsed_args.ip_version:
attrs['ip_version'] = str(parsed_args.ip_version)
if parsed_args.source_port:
attrs['source_port'] = parsed_args.source_port
if parsed_args.no_source_port:
attrs['source_port'] = None
if parsed_args.source_ip_address:
attrs['source_ip_address'] = parsed_args.source_ip_address
if parsed_args.no_source_ip_address:
attrs['source_ip_address'] = None
if parsed_args.destination_port:
attrs['destination_port'] = str(parsed_args.destination_port)
if parsed_args.no_destination_port:
attrs['destination_port'] = None
if parsed_args.destination_ip_address:
attrs['destination_ip_address'] = str(
parsed_args.destination_ip_address)
if parsed_args.no_destination_ip_address:
attrs['destination_ip_address'] = None
if parsed_args.enable_rule:
attrs['enabled'] = True
if parsed_args.disable_rule:
attrs['enabled'] = False
if parsed_args.share or parsed_args.public:
attrs['shared'] = True
if parsed_args.no_share or parsed_args.private:
attrs['shared'] = False
if parsed_args.source_firewall_group:
attrs['source_firewall_group_id'] = client.find_resource(
const.FWG, parsed_args.source_firewall_group,
cmd_resource=const.CMD_FWG)['id']
if parsed_args.no_source_firewall_group:
attrs['source_firewall_group_id'] = None
if parsed_args.destination_firewall_group:
attrs['destination_firewall_group_id'] = client.find_resource(
const.FWG, parsed_args.destination_firewall_group,
cmd_resource=const.CMD_FWG)['id']
if parsed_args.no_destination_firewall_group:
attrs['destination_firewall_group_id'] = None
return attrs
class ProtocolColumn(cliff_columns.FormattableColumn):
def human_readable(self):
return self._value if self._value else 'any'
_formatters = {'protocol': ProtocolColumn}
class CreateFirewallRule(command.ShowOne):
_description = _("Create a new firewall rule")
def get_parser(self, prog_name):
parser = super(CreateFirewallRule, self).get_parser(prog_name)
_get_common_parser(parser)
osc_utils.add_project_owner_option_to_parser(parser)
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
attrs = _get_common_attrs(self.app.client_manager, parsed_args)
obj = client.create_fwaas_firewall_rule(
{const.FWR: attrs})[const.FWR]
columns, display_columns = column_util.get_columns(obj, _attr_map)
data = utils.get_dict_properties(obj, columns, formatters=_formatters)
return display_columns, data
class DeleteFirewallRule(command.Command):
_description = _("Delete firewall rule(s)")
def get_parser(self, prog_name):
parser = super(DeleteFirewallRule, self).get_parser(prog_name)
parser.add_argument(
const.FWR,
metavar='<firewall-rule>',
nargs='+',
help=_('Firewall rule(s) to delete (name or ID)'))
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
result = 0
for fwr in parsed_args.firewall_rule:
try:
fwr_id = client.find_resource(
const.FWR, fwr, cmd_resource=const.CMD_FWR)['id']
client.delete_fwaas_firewall_rule(fwr_id)
except Exception as e:
result += 1
LOG.error(_("Failed to delete Firewall rule with "
"name or ID '%(firewall_rule)s': %(e)s"),
{const.FWR: fwr, 'e': e})
if result > 0:
total = len(parsed_args.firewall_rule)
msg = (_("%(result)s of %(total)s firewall rule(s) failed "
"to delete.") % {'result': result, 'total': total})
raise exceptions.CommandError(msg)
class ListFirewallRule(command.Lister):
_description = _("List firewall rules that belong to a given tenant")
def get_parser(self, prog_name):
parser = super(ListFirewallRule, self).get_parser(prog_name)
parser.add_argument(
'--long',
action='store_true',
default=False,
help=_("List additional fields in output")
)
return parser
def extend_list(self, data, parsed_args):
ext_data = copy.deepcopy(data)
for d in ext_data:
protocol = d['protocol'].upper() if d['protocol'] else 'ANY'
src_ip = 'none specified'
dst_ip = 'none specified'
src_port = '(none specified)'
dst_port = '(none specified)'
if 'source_ip_address' in d and d['source_ip_address']:
src_ip = str(d['source_ip_address']).lower()
if 'source_port' in d and d['source_port']:
src_port = '(' + str(d['source_port']).lower() + ')'
if 'destination_ip_address' in d and d['destination_ip_address']:
dst_ip = str(d['destination_ip_address']).lower()
if 'destination_port' in d and d['destination_port']:
dst_port = '(' + str(d['destination_port']).lower() + ')'
action = d['action'] if d.get('action') else 'no-action'
src = 'source(port): ' + src_ip + src_port
dst = 'dest(port): ' + dst_ip + dst_port
d['summary'] = ',\n '.join([protocol, src, dst, action])
return ext_data
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
obj = client.list_fwaas_firewall_rules()[const.FWRS]
obj_extend = self.extend_list(obj, parsed_args)
headers, columns = column_util.get_column_definitions(
_attr_map, long_listing=parsed_args.long)
return (headers, (utils.get_dict_properties(
s, columns, formatters=_formatters) for s in obj_extend))
class SetFirewallRule(command.Command):
_description = _("Set firewall rule properties")
def get_parser(self, prog_name):
parser = super(SetFirewallRule, self).get_parser(prog_name)
_get_common_parser(parser)
parser.add_argument(
const.FWR,
metavar='<firewall-rule>',
help=_('Firewall rule to set (name or ID)'))
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
attrs = _get_common_attrs(self.app.client_manager,
parsed_args, is_create=False)
fwr_id = client.find_resource(
const.FWR, parsed_args.firewall_rule,
cmd_resource=const.CMD_FWR)['id']
try:
client.update_fwaas_firewall_rule(fwr_id, {const.FWR: attrs})
except Exception as e:
msg = (_("Failed to set firewall rule '%(rule)s': %(e)s")
% {'rule': parsed_args.firewall_rule, 'e': e})
raise exceptions.CommandError(msg)
class ShowFirewallRule(command.ShowOne):
_description = _("Display firewall rule details")
def get_parser(self, prog_name):
parser = super(ShowFirewallRule, self).get_parser(prog_name)
parser.add_argument(
const.FWR,
metavar='<firewall-rule>',
help=_('Firewall rule to display (name or ID)'))
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
fwr_id = client.find_resource(
const.FWR, parsed_args.firewall_rule,
cmd_resource=const.CMD_FWR)['id']
obj = client.show_fwaas_firewall_rule(fwr_id)[const.FWR]
columns, display_columns = column_util.get_columns(obj, _attr_map)
data = utils.get_dict_properties(obj, columns, formatters=_formatters)
return (display_columns, data)
class UnsetFirewallRule(command.Command):
_description = _("Unset firewall rule properties")
def get_parser(self, prog_name):
parser = super(UnsetFirewallRule, self).get_parser(prog_name)
parser.add_argument(
const.FWR,
metavar='<firewall-rule>',
help=_('Firewall rule to unset (name or ID)'))
parser.add_argument(
'--source-ip-address',
action='store_true',
help=_('Source IP address or subnet'))
parser.add_argument(
'--destination-ip-address',
action='store_true',
help=_('Destination IP address or subnet'))
parser.add_argument(
'--source-port',
action='store_true',
help=_('Source port number or range'
'(integer in [1, 65535] or range like 123:456)'))
parser.add_argument(
'--destination-port',
action='store_true',
help=_('Destination port number or range'
'(integer in [1, 65535] or range like 123:456)'))
parser.add_argument(
'--share',
action='store_true',
help=_('Restrict use of the firewall rule to the current project'))
parser.add_argument(
'--public',
action='store_true',
help=_('Restrict use of the firewall rule to the current project. '
'This option is deprecated and would be removed in '
'R Release.'))
parser.add_argument(
'--enable-rule',
action='store_true',
help=_('Disable this rule'))
parser.add_argument(
'--source-firewall-group',
action='store_true',
help=_('Source firewall group (name or ID)'))
parser.add_argument(
'--destination-firewall-group',
action='store_true',
help=_('Destination firewall group (name or ID)'))
return parser
def _get_attrs(self, client_manager, parsed_args):
attrs = {}
if parsed_args.source_ip_address:
attrs['source_ip_address'] = None
if parsed_args.source_port:
attrs['source_port'] = None
if parsed_args.destination_ip_address:
attrs['destination_ip_address'] = None
if parsed_args.destination_port:
attrs['destination_port'] = None
if parsed_args.share or parsed_args.public:
attrs['shared'] = False
if parsed_args.enable_rule:
attrs['enabled'] = False
if parsed_args.source_firewall_group:
attrs['source_firewall_group_id'] = None
if parsed_args.source_firewall_group:
attrs['destination_firewall_group_id'] = None
return attrs
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
attrs = self._get_attrs(self.app.client_manager, parsed_args)
fwr_id = client.find_resource(
const.FWR, parsed_args.firewall_rule,
cmd_resource=const.CMD_FWR)['id']
try:
client.update_fwaas_firewall_rule(fwr_id, {const.FWR: attrs})
except Exception as e:
msg = (_("Failed to unset firewall rule '%(rule)s': %(e)s")
% {'rule': parsed_args.firewall_rule, 'e': e})
raise exceptions.CommandError(msg)
| 39.987315 | 79 | 0.635297 |
import copy
import logging
from cliff import columns as cliff_columns
from osc_lib.command import command
from osc_lib import exceptions
from osc_lib import utils
from osc_lib.utils import columns as column_util
from neutronclient._i18n import _
from neutronclient.common import utils as nc_utils
from neutronclient.osc import utils as osc_utils
from neutronclient.osc.v2.fwaas import constants as const
LOG = logging.getLogger(__name__)
_attr_map = (
('id', 'ID', column_util.LIST_BOTH),
('name', 'Name', column_util.LIST_BOTH),
('enabled', 'Enabled', column_util.LIST_BOTH),
('summary', 'Summary', column_util.LIST_SHORT_ONLY),
('description', 'Description', column_util.LIST_LONG_ONLY),
('ip_version', 'IP Version', column_util.LIST_LONG_ONLY),
('action', 'Action', column_util.LIST_LONG_ONLY),
('protocol', 'Protocol', column_util.LIST_LONG_ONLY),
('source_ip_address', 'Source IP Address', column_util.LIST_LONG_ONLY),
('source_port', 'Source Port', column_util.LIST_LONG_ONLY),
('destination_ip_address', 'Destination IP Address',
column_util.LIST_LONG_ONLY),
('destination_port', 'Destination Port', column_util.LIST_LONG_ONLY),
('shared', 'Shared', column_util.LIST_LONG_ONLY),
('tenant_id', 'Project', column_util.LIST_LONG_ONLY),
('source_firewall_group_id', 'Source Firewall Group ID',
column_util.LIST_LONG_ONLY),
('destination_firewall_group_id', 'Destination Firewall Group ID',
column_util.LIST_LONG_ONLY),
)
def _get_common_parser(parser):
parser.add_argument(
'--name',
metavar='<name>',
help=_('Name of the firewall rule'))
parser.add_argument(
'--description',
metavar='<description>',
help=_('Description of the firewall rule'))
parser.add_argument(
'--protocol',
choices=['tcp', 'udp', 'icmp', 'any'],
type=nc_utils.convert_to_lowercase,
help=_('Protocol for the firewall rule'))
parser.add_argument(
'--action',
choices=['allow', 'deny', 'reject'],
type=nc_utils.convert_to_lowercase,
help=_('Action for the firewall rule'))
parser.add_argument(
'--ip-version',
metavar='<ip-version>',
choices=['4', '6'],
help=_('Set IP version 4 or 6 (default is 4)'))
src_ip_group = parser.add_mutually_exclusive_group()
src_ip_group.add_argument(
'--source-ip-address',
metavar='<source-ip-address>',
help=_('Source IP address or subnet'))
src_ip_group.add_argument(
'--no-source-ip-address',
action='store_true',
help=_('Detach source IP address'))
dst_ip_group = parser.add_mutually_exclusive_group()
dst_ip_group.add_argument(
'--destination-ip-address',
metavar='<destination-ip-address>',
help=_('Destination IP address or subnet'))
dst_ip_group.add_argument(
'--no-destination-ip-address',
action='store_true',
help=_('Detach destination IP address'))
src_port_group = parser.add_mutually_exclusive_group()
src_port_group.add_argument(
'--source-port',
metavar='<source-port>',
help=_('Source port number or range'
'(integer in [1, 65535] or range like 123:456)'))
src_port_group.add_argument(
'--no-source-port',
action='store_true',
help=_('Detach source port number or range'))
dst_port_group = parser.add_mutually_exclusive_group()
dst_port_group.add_argument(
'--destination-port',
metavar='<destination-port>',
help=_('Destination port number or range'
'(integer in [1, 65535] or range like 123:456)'))
dst_port_group.add_argument(
'--no-destination-port',
action='store_true',
help=_('Detach destination port number or range'))
shared_group = parser.add_mutually_exclusive_group()
shared_group.add_argument(
'--public',
action='store_true',
help=_('Make the firewall policy public, which allows it to be '
'used in all projects (as opposed to the default, '
'which is to restrict its use to the current project). '
'This option is deprecated and would be removed in R Release'))
shared_group.add_argument(
'--private',
action='store_true',
help=_(
'Restrict use of the firewall rule to the current project.'
'This option is deprecated and would be removed in R release.'))
shared_group.add_argument(
'--share',
action='store_true',
help=_('Share the firewall rule to be used in all projects '
'(by default, it is restricted to be used by the '
'current project).'))
shared_group.add_argument(
'--no-share',
action='store_true',
help=_('Restrict use of the firewall rule to the current project'))
enable_group = parser.add_mutually_exclusive_group()
enable_group.add_argument(
'--enable-rule',
action='store_true',
help=_('Enable this rule (default is enabled)'))
enable_group.add_argument(
'--disable-rule',
action='store_true',
help=_('Disable this rule'))
src_fwg_group = parser.add_mutually_exclusive_group()
src_fwg_group.add_argument(
'--source-firewall-group',
metavar='<source-firewall-group>',
help=_('Source firewall group (name or ID)'))
src_fwg_group.add_argument(
'--no-source-firewall-group',
action='store_true',
help=_('No associated destination firewall group'))
dst_fwg_group = parser.add_mutually_exclusive_group()
dst_fwg_group.add_argument(
'--destination-firewall-group',
metavar='<destination-firewall-group>',
help=_('Destination firewall group (name or ID)'))
dst_fwg_group.add_argument(
'--no-destination-firewall-group',
action='store_true',
help=_('No associated destination firewall group'))
return parser
def _get_common_attrs(client_manager, parsed_args, is_create=True):
attrs = {}
client = client_manager.neutronclient
if is_create:
if 'project' in parsed_args and parsed_args.project is not None:
attrs['tenant_id'] = osc_utils.find_project(
client_manager.identity,
parsed_args.project,
parsed_args.project_domain,
).id
if parsed_args.name:
attrs['name'] = str(parsed_args.name)
if parsed_args.description:
attrs['description'] = str(parsed_args.description)
if parsed_args.protocol:
protocol = parsed_args.protocol
attrs['protocol'] = None if protocol == 'any' else protocol
if parsed_args.action:
attrs['action'] = parsed_args.action
if parsed_args.ip_version:
attrs['ip_version'] = str(parsed_args.ip_version)
if parsed_args.source_port:
attrs['source_port'] = parsed_args.source_port
if parsed_args.no_source_port:
attrs['source_port'] = None
if parsed_args.source_ip_address:
attrs['source_ip_address'] = parsed_args.source_ip_address
if parsed_args.no_source_ip_address:
attrs['source_ip_address'] = None
if parsed_args.destination_port:
attrs['destination_port'] = str(parsed_args.destination_port)
if parsed_args.no_destination_port:
attrs['destination_port'] = None
if parsed_args.destination_ip_address:
attrs['destination_ip_address'] = str(
parsed_args.destination_ip_address)
if parsed_args.no_destination_ip_address:
attrs['destination_ip_address'] = None
if parsed_args.enable_rule:
attrs['enabled'] = True
if parsed_args.disable_rule:
attrs['enabled'] = False
if parsed_args.share or parsed_args.public:
attrs['shared'] = True
if parsed_args.no_share or parsed_args.private:
attrs['shared'] = False
if parsed_args.source_firewall_group:
attrs['source_firewall_group_id'] = client.find_resource(
const.FWG, parsed_args.source_firewall_group,
cmd_resource=const.CMD_FWG)['id']
if parsed_args.no_source_firewall_group:
attrs['source_firewall_group_id'] = None
if parsed_args.destination_firewall_group:
attrs['destination_firewall_group_id'] = client.find_resource(
const.FWG, parsed_args.destination_firewall_group,
cmd_resource=const.CMD_FWG)['id']
if parsed_args.no_destination_firewall_group:
attrs['destination_firewall_group_id'] = None
return attrs
class ProtocolColumn(cliff_columns.FormattableColumn):
def human_readable(self):
return self._value if self._value else 'any'
_formatters = {'protocol': ProtocolColumn}
class CreateFirewallRule(command.ShowOne):
_description = _("Create a new firewall rule")
def get_parser(self, prog_name):
parser = super(CreateFirewallRule, self).get_parser(prog_name)
_get_common_parser(parser)
osc_utils.add_project_owner_option_to_parser(parser)
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
attrs = _get_common_attrs(self.app.client_manager, parsed_args)
obj = client.create_fwaas_firewall_rule(
{const.FWR: attrs})[const.FWR]
columns, display_columns = column_util.get_columns(obj, _attr_map)
data = utils.get_dict_properties(obj, columns, formatters=_formatters)
return display_columns, data
class DeleteFirewallRule(command.Command):
_description = _("Delete firewall rule(s)")
def get_parser(self, prog_name):
parser = super(DeleteFirewallRule, self).get_parser(prog_name)
parser.add_argument(
const.FWR,
metavar='<firewall-rule>',
nargs='+',
help=_('Firewall rule(s) to delete (name or ID)'))
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
result = 0
for fwr in parsed_args.firewall_rule:
try:
fwr_id = client.find_resource(
const.FWR, fwr, cmd_resource=const.CMD_FWR)['id']
client.delete_fwaas_firewall_rule(fwr_id)
except Exception as e:
result += 1
LOG.error(_("Failed to delete Firewall rule with "
"name or ID '%(firewall_rule)s': %(e)s"),
{const.FWR: fwr, 'e': e})
if result > 0:
total = len(parsed_args.firewall_rule)
msg = (_("%(result)s of %(total)s firewall rule(s) failed "
"to delete.") % {'result': result, 'total': total})
raise exceptions.CommandError(msg)
class ListFirewallRule(command.Lister):
_description = _("List firewall rules that belong to a given tenant")
def get_parser(self, prog_name):
parser = super(ListFirewallRule, self).get_parser(prog_name)
parser.add_argument(
'--long',
action='store_true',
default=False,
help=_("List additional fields in output")
)
return parser
def extend_list(self, data, parsed_args):
ext_data = copy.deepcopy(data)
for d in ext_data:
protocol = d['protocol'].upper() if d['protocol'] else 'ANY'
src_ip = 'none specified'
dst_ip = 'none specified'
src_port = '(none specified)'
dst_port = '(none specified)'
if 'source_ip_address' in d and d['source_ip_address']:
src_ip = str(d['source_ip_address']).lower()
if 'source_port' in d and d['source_port']:
src_port = '(' + str(d['source_port']).lower() + ')'
if 'destination_ip_address' in d and d['destination_ip_address']:
dst_ip = str(d['destination_ip_address']).lower()
if 'destination_port' in d and d['destination_port']:
dst_port = '(' + str(d['destination_port']).lower() + ')'
action = d['action'] if d.get('action') else 'no-action'
src = 'source(port): ' + src_ip + src_port
dst = 'dest(port): ' + dst_ip + dst_port
d['summary'] = ',\n '.join([protocol, src, dst, action])
return ext_data
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
obj = client.list_fwaas_firewall_rules()[const.FWRS]
obj_extend = self.extend_list(obj, parsed_args)
headers, columns = column_util.get_column_definitions(
_attr_map, long_listing=parsed_args.long)
return (headers, (utils.get_dict_properties(
s, columns, formatters=_formatters) for s in obj_extend))
class SetFirewallRule(command.Command):
_description = _("Set firewall rule properties")
def get_parser(self, prog_name):
parser = super(SetFirewallRule, self).get_parser(prog_name)
_get_common_parser(parser)
parser.add_argument(
const.FWR,
metavar='<firewall-rule>',
help=_('Firewall rule to set (name or ID)'))
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
attrs = _get_common_attrs(self.app.client_manager,
parsed_args, is_create=False)
fwr_id = client.find_resource(
const.FWR, parsed_args.firewall_rule,
cmd_resource=const.CMD_FWR)['id']
try:
client.update_fwaas_firewall_rule(fwr_id, {const.FWR: attrs})
except Exception as e:
msg = (_("Failed to set firewall rule '%(rule)s': %(e)s")
% {'rule': parsed_args.firewall_rule, 'e': e})
raise exceptions.CommandError(msg)
class ShowFirewallRule(command.ShowOne):
_description = _("Display firewall rule details")
def get_parser(self, prog_name):
parser = super(ShowFirewallRule, self).get_parser(prog_name)
parser.add_argument(
const.FWR,
metavar='<firewall-rule>',
help=_('Firewall rule to display (name or ID)'))
return parser
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
fwr_id = client.find_resource(
const.FWR, parsed_args.firewall_rule,
cmd_resource=const.CMD_FWR)['id']
obj = client.show_fwaas_firewall_rule(fwr_id)[const.FWR]
columns, display_columns = column_util.get_columns(obj, _attr_map)
data = utils.get_dict_properties(obj, columns, formatters=_formatters)
return (display_columns, data)
class UnsetFirewallRule(command.Command):
_description = _("Unset firewall rule properties")
def get_parser(self, prog_name):
parser = super(UnsetFirewallRule, self).get_parser(prog_name)
parser.add_argument(
const.FWR,
metavar='<firewall-rule>',
help=_('Firewall rule to unset (name or ID)'))
parser.add_argument(
'--source-ip-address',
action='store_true',
help=_('Source IP address or subnet'))
parser.add_argument(
'--destination-ip-address',
action='store_true',
help=_('Destination IP address or subnet'))
parser.add_argument(
'--source-port',
action='store_true',
help=_('Source port number or range'
'(integer in [1, 65535] or range like 123:456)'))
parser.add_argument(
'--destination-port',
action='store_true',
help=_('Destination port number or range'
'(integer in [1, 65535] or range like 123:456)'))
parser.add_argument(
'--share',
action='store_true',
help=_('Restrict use of the firewall rule to the current project'))
parser.add_argument(
'--public',
action='store_true',
help=_('Restrict use of the firewall rule to the current project. '
'This option is deprecated and would be removed in '
'R Release.'))
parser.add_argument(
'--enable-rule',
action='store_true',
help=_('Disable this rule'))
parser.add_argument(
'--source-firewall-group',
action='store_true',
help=_('Source firewall group (name or ID)'))
parser.add_argument(
'--destination-firewall-group',
action='store_true',
help=_('Destination firewall group (name or ID)'))
return parser
def _get_attrs(self, client_manager, parsed_args):
attrs = {}
if parsed_args.source_ip_address:
attrs['source_ip_address'] = None
if parsed_args.source_port:
attrs['source_port'] = None
if parsed_args.destination_ip_address:
attrs['destination_ip_address'] = None
if parsed_args.destination_port:
attrs['destination_port'] = None
if parsed_args.share or parsed_args.public:
attrs['shared'] = False
if parsed_args.enable_rule:
attrs['enabled'] = False
if parsed_args.source_firewall_group:
attrs['source_firewall_group_id'] = None
if parsed_args.source_firewall_group:
attrs['destination_firewall_group_id'] = None
return attrs
def take_action(self, parsed_args):
client = self.app.client_manager.neutronclient
attrs = self._get_attrs(self.app.client_manager, parsed_args)
fwr_id = client.find_resource(
const.FWR, parsed_args.firewall_rule,
cmd_resource=const.CMD_FWR)['id']
try:
client.update_fwaas_firewall_rule(fwr_id, {const.FWR: attrs})
except Exception as e:
msg = (_("Failed to unset firewall rule '%(rule)s': %(e)s")
% {'rule': parsed_args.firewall_rule, 'e': e})
raise exceptions.CommandError(msg)
| true | true |
f72bee82c057e8b1b9e1dcbe3c8f34b55a9c0a95 | 3,518 | py | Python | django_libs/test_email_backend.py | Reston/django-libs | 8c44a0851e3be564a100df50d257c1ce5b30dc25 | [
"MIT"
] | null | null | null | django_libs/test_email_backend.py | Reston/django-libs | 8c44a0851e3be564a100df50d257c1ce5b30dc25 | [
"MIT"
] | null | null | null | django_libs/test_email_backend.py | Reston/django-libs | 8c44a0851e3be564a100df50d257c1ce5b30dc25 | [
"MIT"
] | 1 | 2020-01-09T10:23:13.000Z | 2020-01-09T10:23:13.000Z | """Custom email backend for testing the project."""
import re
from django.core.mail.backends.smtp import EmailBackend as SmtpEmailBackend
from django.core.mail.message import sanitize_address
from . import default_settings as settings
class EmailBackend(SmtpEmailBackend):
"""
Email backend that sends all emails to a defined address, no matter what
the recipient really is.
In order to use it, set this in your local_settings.py::
EMAIL_BACKEND = 'django_libs.test_email_backend.EmailBackend'
TEST_EMAIL_BACKEND_RECIPIENTS = (
('Name', 'email@gmail.com'),
)
"""
def _send(self, email_message):
"""A helper method that does the actual sending."""
if not email_message.recipients() or \
not settings.TEST_EMAIL_BACKEND_RECIPIENTS:
return False
from_email = sanitize_address(
email_message.from_email, email_message.encoding)
recipients = [sanitize_address(addr, email_message.encoding)
for name, addr in settings.TEST_EMAIL_BACKEND_RECIPIENTS]
try:
self.connection.sendmail(
from_email, recipients, email_message.message().as_string())
except:
if not self.fail_silently:
raise
return False
return True
class WhitelistEmailBackend(SmtpEmailBackend):
"""
Email backend that sends only these emails, that match the whitelist
setting.
In order to use it, set this in your local_settings.py::
EMAIL_BACKEND = 'django_libs.test_email_backend.EmailBackend'
EMAIL_BACKEND_WHITELIST = [
r'.*@example\.com',
]
This setting would allow all emails to @example.com to be sent and all
others are discarded. The setting expects regex, so better test it before
adding it here to prevent errors.
If the setting does not exist, no emails are sent at all.
"""
def _send(self, email_message):
"""A helper method that does the actual sending."""
from_email = sanitize_address(
email_message.from_email, email_message.encoding)
recipients = self.clean_recipients(email_message)
if not recipients:
return False
try:
self.connection.sendmail(
from_email, recipients, email_message.message().as_string())
except:
if not self.fail_silently:
raise
return False
return True
def clean_recipients(self, email_message):
"""Removes all the unallowed recipients."""
new_recipients = []
recipients = [sanitize_address(addr, email_message.encoding)
for addr in email_message.recipients()]
for recipient in recipients:
if self.matches_whitelist(recipient):
new_recipients.append(recipient)
elif settings.EMAIL_BACKEND_REROUTE_BLACKLIST:
for name, addr in settings.TEST_EMAIL_BACKEND_RECIPIENTS:
new_recipients.append(addr)
# remove duplicates
new_recipients = list(set(new_recipients))
return new_recipients
def matches_whitelist(self, recipient):
"""Checks if the email address matches one of the whitelist entries."""
matches = False
for entry in settings.EMAIL_BACKEND_WHITELIST:
if re.match(entry, recipient):
matches = True
return matches
| 34.490196 | 79 | 0.645253 | import re
from django.core.mail.backends.smtp import EmailBackend as SmtpEmailBackend
from django.core.mail.message import sanitize_address
from . import default_settings as settings
class EmailBackend(SmtpEmailBackend):
def _send(self, email_message):
if not email_message.recipients() or \
not settings.TEST_EMAIL_BACKEND_RECIPIENTS:
return False
from_email = sanitize_address(
email_message.from_email, email_message.encoding)
recipients = [sanitize_address(addr, email_message.encoding)
for name, addr in settings.TEST_EMAIL_BACKEND_RECIPIENTS]
try:
self.connection.sendmail(
from_email, recipients, email_message.message().as_string())
except:
if not self.fail_silently:
raise
return False
return True
class WhitelistEmailBackend(SmtpEmailBackend):
def _send(self, email_message):
from_email = sanitize_address(
email_message.from_email, email_message.encoding)
recipients = self.clean_recipients(email_message)
if not recipients:
return False
try:
self.connection.sendmail(
from_email, recipients, email_message.message().as_string())
except:
if not self.fail_silently:
raise
return False
return True
def clean_recipients(self, email_message):
new_recipients = []
recipients = [sanitize_address(addr, email_message.encoding)
for addr in email_message.recipients()]
for recipient in recipients:
if self.matches_whitelist(recipient):
new_recipients.append(recipient)
elif settings.EMAIL_BACKEND_REROUTE_BLACKLIST:
for name, addr in settings.TEST_EMAIL_BACKEND_RECIPIENTS:
new_recipients.append(addr)
new_recipients = list(set(new_recipients))
return new_recipients
def matches_whitelist(self, recipient):
matches = False
for entry in settings.EMAIL_BACKEND_WHITELIST:
if re.match(entry, recipient):
matches = True
return matches
| true | true |
f72beeeefd11624a2d91beb7c8d8ca61bb669461 | 14,901 | py | Python | py/redrock/templates.py | echaussidon/redrock | 9a3d4f0aed8c0792f2cc731dbdf04a99018083bf | [
"BSD-3-Clause"
] | 14 | 2017-09-22T23:57:33.000Z | 2022-03-15T10:36:16.000Z | py/redrock/templates.py | echaussidon/redrock | 9a3d4f0aed8c0792f2cc731dbdf04a99018083bf | [
"BSD-3-Clause"
] | 154 | 2017-06-04T22:57:39.000Z | 2022-03-11T23:01:16.000Z | py/redrock/templates.py | echaussidon/redrock | 9a3d4f0aed8c0792f2cc731dbdf04a99018083bf | [
"BSD-3-Clause"
] | 10 | 2017-06-09T15:24:59.000Z | 2021-05-26T13:16:42.000Z | """
Classes and functions for templates.
"""
from __future__ import absolute_import, division, print_function
import sys
from glob import glob
import os
import traceback
import numpy as np
from astropy.io import fits
from .utils import native_endian, elapsed, transmission_Lyman
from .rebin import rebin_template, trapz_rebin
class Template(object):
"""A spectral Template PCA object.
The template data is read from a redrock-format template file.
Alternatively, the data can be specified in the constructor.
Args:
filename (str): the path to the template file, either absolute or
relative to the RR_TEMPLATE_DIR environment variable.
"""
def __init__(self, filename=None, spectype=None, redshifts=None,
wave=None, flux=None, subtype=None):
if filename is not None:
fx = None
if os.path.exists(filename):
fx = fits.open(filename, memmap=False)
else:
xfilename = os.path.join(os.getenv('RR_TEMPLATE_DIR'), filename)
if os.path.exists(xfilename):
fx = fits.open(xfilename, memmap=False)
else:
raise IOError('unable to find '+filename)
hdr = fx['BASIS_VECTORS'].header
if 'VERSION' in hdr:
self._version = hdr['VERSION']
else:
self._version = 'unknown'
self.wave = np.asarray(hdr['CRVAL1'] + \
hdr['CDELT1']*np.arange(hdr['NAXIS1']), dtype=np.float64)
if 'LOGLAM' in hdr and hdr['LOGLAM'] != 0:
self.wave = 10**self.wave
self.flux = np.asarray(native_endian(fx['BASIS_VECTORS'].data),
dtype=np.float64)
self._redshifts = None
## find out if redshift info is present in the file
old_style_templates = True
try:
self._redshifts = native_endian(fx['REDSHIFTS'].data)
old_style_templates = False
except KeyError:
pass
fx.close()
self._rrtype = hdr['RRTYPE'].strip().upper()
if old_style_templates:
if self._rrtype == 'GALAXY':
# redshifts = 10**np.arange(np.log10(1+0.005),
# np.log10(1+2.0), 1.5e-4) - 1
self._redshifts = 10**np.arange(np.log10(1-0.005),
np.log10(1+1.7), 3e-4) - 1
elif self._rrtype == 'STAR':
self._redshifts = np.arange(-0.002, 0.00201, 4e-5)
elif self._rrtype == 'QSO':
self._redshifts = 10**np.arange(np.log10(1+0.05),
np.log10(1+6.0), 5e-4) - 1
else:
raise ValueError("Unknown redshift range to use for "
"template type {}".format(self._rrtype))
zmin = self._redshifts[0]
zmax = self._redshifts[-1]
print("DEBUG: Using default redshift range {:.4f}-{:.4f} for "
"{}".format(zmin, zmax, os.path.basename(filename)))
else:
zmin = self._redshifts[0]
zmax = self._redshifts[-1]
print("DEBUG: Using redshift range {:.4f}-{:.4f} for "
"{}".format(zmin, zmax, os.path.basename(filename)))
self._subtype = None
if 'RRSUBTYP' in hdr:
self._subtype = hdr['RRSUBTYP'].strip().upper()
else:
self._subtype = ''
else:
self._rrtype = spectype
self._redshifts = redshifts
self.wave = wave
self.flux = flux
self._subtype = subtype
self._nbasis = self.flux.shape[0]
self._nwave = self.flux.shape[1]
@property
def nbasis(self):
return self._nbasis
@property
def nwave(self):
return self._nwave
@property
def template_type(self):
return self._rrtype
@property
def sub_type(self):
return self._subtype
@property
def full_type(self):
"""Return formatted type:subtype string.
"""
if self._subtype != '':
return '{}:::{}'.format(self._rrtype, self._subtype)
else:
return self._rrtype
@property
def redshifts(self):
return self._redshifts
def eval(self, coeff, wave, z):
"""Return template for given coefficients, wavelengths, and redshift
Args:
coeff : array of coefficients length self.nbasis
wave : wavelengths at which to evaluate template flux
z : redshift at which to evaluate template flux
Returns:
template flux array
Notes:
A single factor of (1+z)^-1 is applied to the resampled flux
to conserve integrated flux after redshifting.
"""
assert len(coeff) == self.nbasis
flux = self.flux.T.dot(coeff).T / (1+z)
return trapz_rebin(self.wave*(1+z), flux, wave)
def find_templates(template_dir=None):
"""Return list of redrock-\*.fits template files
Search directories in this order, returning results from first one found:
- template_dir
- $RR_TEMPLATE_DIR
- <redrock_code>/templates/
Args:
template_dir (str): optional directory containing the templates.
Returns:
list: a list of template files.
"""
if template_dir is None:
if 'RR_TEMPLATE_DIR' in os.environ:
template_dir = os.environ['RR_TEMPLATE_DIR']
else:
thisdir = os.path.dirname(__file__)
tempdir = os.path.join(os.path.abspath(thisdir), 'templates')
if os.path.exists(tempdir):
template_dir = tempdir
if template_dir is None:
raise IOError("ERROR: can't find template_dir, $RR_TEMPLATE_DIR, or {rrcode}/templates/")
else:
print('DEBUG: Read templates from {}'.format(template_dir) )
return sorted(glob(os.path.join(template_dir, 'rrtemplate-*.fits')))
class DistTemplatePiece(object):
"""One piece of the distributed template data.
This is a simple container for storing interpolated templates for a set of
redshift values. It is used for communicating the interpolated templates
between processes.
In the MPI case, each process will store at most two of these
simultaneously. This is the data that is computed on a single process and
passed between processes.
Args:
index (int): the chunk index of this piece- this corresponds to
the process rank that originally computed this piece.
redshifts (array): the redshift range contained in this piece.
data (list): a list of dictionaries, one for each redshift, and
each containing the 2D interpolated template values for all
"wavehash" keys.
"""
def __init__(self, index, redshifts, data):
self.index = index
self.redshifts = redshifts
self.data = data
def _mp_rebin_template(template, dwave, zlist, qout):
"""Function for multiprocessing version of rebinning.
"""
try:
results = dict()
for z in zlist:
binned = rebin_template(template, z, dwave)
results[z] = binned
qout.put(results)
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
lines = [ "MP rebin: {}".format(x) for x in lines ]
print("".join(lines))
sys.stdout.flush()
return
class DistTemplate(object):
"""Distributed template data interpolated to all redshifts.
For a given template, the redshifts are distributed among the
processes in the communicator. Then each process will rebin the
template to those redshifts for the wavelength grids specified by
dwave.
Args:
template (Template): the template to distribute
dwave (dict): the keys are the "wavehash" and the values
are a 1D array containing the wavelength grid.
mp_procs (int): if not using MPI, restrict the number of
multiprocesses to this.
comm (mpi4py.MPI.Comm): (optional) the MPI communicator.
"""
def __init__(self, template, dwave, mp_procs=1, comm=None):
self._comm = comm
self._template = template
self._dwave = dwave
self._comm_rank = 0
self._comm_size = 1
if self._comm is not None:
self._comm_rank = self._comm.rank
self._comm_size = self._comm.size
self._distredshifts = np.array_split(self._template.redshifts,
self._comm_size)
myz = self._distredshifts[self._comm_rank]
nz = len(myz)
data = list()
# In the case of not using MPI (comm == None), one process is rebinning
# all the templates. In that scenario, use multiprocessing
# workers to do the rebinning.
if self._comm is not None:
# MPI case- compute our local redshifts
for z in myz:
binned = rebin_template(self._template, z, self._dwave)
data.append(binned)
else:
# We don't have MPI, so use multiprocessing
import multiprocessing as mp
qout = mp.Queue()
work = np.array_split(myz, mp_procs)
procs = list()
for i in range(mp_procs):
p = mp.Process(target=_mp_rebin_template,
args=(self._template, self._dwave, work[i], qout))
procs.append(p)
p.start()
# Extract the output into a single list
results = dict()
for i in range(mp_procs):
res = qout.get()
results.update(res)
for z in myz:
data.append(results[z])
# Correct spectra for Lyman-series
for i, z in enumerate(myz):
for k in list(self._dwave.keys()):
T = transmission_Lyman(z,self._dwave[k])
for vect in range(data[i][k].shape[1]):
data[i][k][:,vect] *= T
self._piece = DistTemplatePiece(self._comm_rank, myz, data)
@property
def comm(self):
return self._comm
@property
def template(self):
return self._template
@property
def local(self):
return self._piece
def cycle(self):
"""Pass our piece of data to the next process.
If we have returned to our original data, then return True, otherwise
return False.
Args:
Nothing
Returns (bool):
Whether we have finished (True) else False.
"""
# If we are not using MPI, this function is a no-op, so just return.
if self._comm is None:
return True
rank = self._comm_rank
nproc = self._comm_size
to_proc = rank + 1
if to_proc >= nproc:
to_proc = 0
from_proc = rank - 1
if from_proc < 0:
from_proc = nproc - 1
# Send our data and get a request handle for later checking.
req = self._comm.isend(self._piece, to_proc)
# Receive our data
incoming = self._comm.recv(source=from_proc)
# Wait for send to finishself._comm_rank = self._comm.rank
req.wait()
# Now replace our local piece with the new one
self._piece = incoming
# Are we done?
done = False
if self._piece.index == rank:
done = True
return done
def load_dist_templates(dwave, templates=None, comm=None, mp_procs=1):
"""Read and distribute templates from disk.
This reads one or more template files from disk and distributes them among
an MPI communicator. Each process will locally store interpolated data
for a redshift slice of each template. For a single redshift, the template
is interpolated to the wavelength grids specified by "dwave".
As an example, imagine 3 templates with independent redshift ranges. Also
imagine that the communicator has 2 processes. This function would return
a list of 3 DistTemplate objects. Within each of those objects, the 2
processes store the interpolated data for a subset of the redshift range:
DistTemplate #1: zmin1 <---- p0 ----> | <---- p1 ----> zmax1
DistTemplate #2: zmin2 <-- p0 --> | <-- p1 --> zmax2
DistTemplate #3: zmin3 <--- p0 ---> | <--- p1 ---> zmax3
Args:
dwave (dict): the dictionary of wavelength grids. Keys are the
"wavehash" and values are an array of wavelengths.
templates (str or None): if None, find all templates from the
redrock template directory. If a path to a file is specified,
load that single template. If a path to a directory is given,
load all templates in that directory.
comm (mpi4py.MPI.Comm): (optional) the MPI communicator.
mp_procs (int): if not using MPI, restrict the number of
multiprocesses to this.
Returns:
list: a list of DistTemplate objects.
"""
timer = elapsed(None, "", comm=comm)
template_files = None
if (comm is None) or (comm.rank == 0):
# Only one process needs to do this
if templates is not None:
if os.path.isfile(templates):
# we are using just a single file
template_files = [ templates ]
elif os.path.isdir(templates):
# this is a template dir
template_files = find_templates(template_dir=templates)
else:
print("{} is neither a file nor a directory"\
.format(templates))
sys.stdout.flush()
if comm is not None:
comm.Abort()
else:
template_files = find_templates()
if comm is not None:
template_files = comm.bcast(template_files, root=0)
template_data = list()
if (comm is None) or (comm.rank == 0):
for t in template_files:
template_data.append(Template(filename=t))
if comm is not None:
template_data = comm.bcast(template_data, root=0)
timer = elapsed(timer, "Read and broadcast of {} templates"\
.format(len(template_files)), comm=comm)
# Compute the interpolated templates in a distributed way with every
# process generating a slice of the redshift range.
dtemplates = list()
for t in template_data:
dtemplates.append(DistTemplate(t, dwave, mp_procs=mp_procs, comm=comm))
timer = elapsed(timer, "Rebinning templates", comm=comm)
return dtemplates
| 32.253247 | 97 | 0.587075 |
from __future__ import absolute_import, division, print_function
import sys
from glob import glob
import os
import traceback
import numpy as np
from astropy.io import fits
from .utils import native_endian, elapsed, transmission_Lyman
from .rebin import rebin_template, trapz_rebin
class Template(object):
def __init__(self, filename=None, spectype=None, redshifts=None,
wave=None, flux=None, subtype=None):
if filename is not None:
fx = None
if os.path.exists(filename):
fx = fits.open(filename, memmap=False)
else:
xfilename = os.path.join(os.getenv('RR_TEMPLATE_DIR'), filename)
if os.path.exists(xfilename):
fx = fits.open(xfilename, memmap=False)
else:
raise IOError('unable to find '+filename)
hdr = fx['BASIS_VECTORS'].header
if 'VERSION' in hdr:
self._version = hdr['VERSION']
else:
self._version = 'unknown'
self.wave = np.asarray(hdr['CRVAL1'] + \
hdr['CDELT1']*np.arange(hdr['NAXIS1']), dtype=np.float64)
if 'LOGLAM' in hdr and hdr['LOGLAM'] != 0:
self.wave = 10**self.wave
self.flux = np.asarray(native_endian(fx['BASIS_VECTORS'].data),
dtype=np.float64)
self._redshifts = None
try:
self._redshifts = native_endian(fx['REDSHIFTS'].data)
old_style_templates = False
except KeyError:
pass
fx.close()
self._rrtype = hdr['RRTYPE'].strip().upper()
if old_style_templates:
if self._rrtype == 'GALAXY':
self._redshifts = 10**np.arange(np.log10(1-0.005),
np.log10(1+1.7), 3e-4) - 1
elif self._rrtype == 'STAR':
self._redshifts = np.arange(-0.002, 0.00201, 4e-5)
elif self._rrtype == 'QSO':
self._redshifts = 10**np.arange(np.log10(1+0.05),
np.log10(1+6.0), 5e-4) - 1
else:
raise ValueError("Unknown redshift range to use for "
"template type {}".format(self._rrtype))
zmin = self._redshifts[0]
zmax = self._redshifts[-1]
print("DEBUG: Using default redshift range {:.4f}-{:.4f} for "
"{}".format(zmin, zmax, os.path.basename(filename)))
else:
zmin = self._redshifts[0]
zmax = self._redshifts[-1]
print("DEBUG: Using redshift range {:.4f}-{:.4f} for "
"{}".format(zmin, zmax, os.path.basename(filename)))
self._subtype = None
if 'RRSUBTYP' in hdr:
self._subtype = hdr['RRSUBTYP'].strip().upper()
else:
self._subtype = ''
else:
self._rrtype = spectype
self._redshifts = redshifts
self.wave = wave
self.flux = flux
self._subtype = subtype
self._nbasis = self.flux.shape[0]
self._nwave = self.flux.shape[1]
@property
def nbasis(self):
return self._nbasis
@property
def nwave(self):
return self._nwave
@property
def template_type(self):
return self._rrtype
@property
def sub_type(self):
return self._subtype
@property
def full_type(self):
if self._subtype != '':
return '{}:::{}'.format(self._rrtype, self._subtype)
else:
return self._rrtype
@property
def redshifts(self):
return self._redshifts
def eval(self, coeff, wave, z):
assert len(coeff) == self.nbasis
flux = self.flux.T.dot(coeff).T / (1+z)
return trapz_rebin(self.wave*(1+z), flux, wave)
def find_templates(template_dir=None):
if template_dir is None:
if 'RR_TEMPLATE_DIR' in os.environ:
template_dir = os.environ['RR_TEMPLATE_DIR']
else:
thisdir = os.path.dirname(__file__)
tempdir = os.path.join(os.path.abspath(thisdir), 'templates')
if os.path.exists(tempdir):
template_dir = tempdir
if template_dir is None:
raise IOError("ERROR: can't find template_dir, $RR_TEMPLATE_DIR, or {rrcode}/templates/")
else:
print('DEBUG: Read templates from {}'.format(template_dir) )
return sorted(glob(os.path.join(template_dir, 'rrtemplate-*.fits')))
class DistTemplatePiece(object):
def __init__(self, index, redshifts, data):
self.index = index
self.redshifts = redshifts
self.data = data
def _mp_rebin_template(template, dwave, zlist, qout):
try:
results = dict()
for z in zlist:
binned = rebin_template(template, z, dwave)
results[z] = binned
qout.put(results)
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
lines = [ "MP rebin: {}".format(x) for x in lines ]
print("".join(lines))
sys.stdout.flush()
return
class DistTemplate(object):
def __init__(self, template, dwave, mp_procs=1, comm=None):
self._comm = comm
self._template = template
self._dwave = dwave
self._comm_rank = 0
self._comm_size = 1
if self._comm is not None:
self._comm_rank = self._comm.rank
self._comm_size = self._comm.size
self._distredshifts = np.array_split(self._template.redshifts,
self._comm_size)
myz = self._distredshifts[self._comm_rank]
nz = len(myz)
data = list()
# In the case of not using MPI (comm == None), one process is rebinning
# all the templates. In that scenario, use multiprocessing
# workers to do the rebinning.
if self._comm is not None:
# MPI case- compute our local redshifts
for z in myz:
binned = rebin_template(self._template, z, self._dwave)
data.append(binned)
else:
# We don't have MPI, so use multiprocessing
import multiprocessing as mp
qout = mp.Queue()
work = np.array_split(myz, mp_procs)
procs = list()
for i in range(mp_procs):
p = mp.Process(target=_mp_rebin_template,
args=(self._template, self._dwave, work[i], qout))
procs.append(p)
p.start()
results = dict()
for i in range(mp_procs):
res = qout.get()
results.update(res)
for z in myz:
data.append(results[z])
for i, z in enumerate(myz):
for k in list(self._dwave.keys()):
T = transmission_Lyman(z,self._dwave[k])
for vect in range(data[i][k].shape[1]):
data[i][k][:,vect] *= T
self._piece = DistTemplatePiece(self._comm_rank, myz, data)
@property
def comm(self):
return self._comm
@property
def template(self):
return self._template
@property
def local(self):
return self._piece
def cycle(self):
if self._comm is None:
return True
rank = self._comm_rank
nproc = self._comm_size
to_proc = rank + 1
if to_proc >= nproc:
to_proc = 0
from_proc = rank - 1
if from_proc < 0:
from_proc = nproc - 1
req = self._comm.isend(self._piece, to_proc)
incoming = self._comm.recv(source=from_proc)
req.wait()
self._piece = incoming
done = False
if self._piece.index == rank:
done = True
return done
def load_dist_templates(dwave, templates=None, comm=None, mp_procs=1):
timer = elapsed(None, "", comm=comm)
template_files = None
if (comm is None) or (comm.rank == 0):
if templates is not None:
if os.path.isfile(templates):
template_files = [ templates ]
elif os.path.isdir(templates):
template_files = find_templates(template_dir=templates)
else:
print("{} is neither a file nor a directory"\
.format(templates))
sys.stdout.flush()
if comm is not None:
comm.Abort()
else:
template_files = find_templates()
if comm is not None:
template_files = comm.bcast(template_files, root=0)
template_data = list()
if (comm is None) or (comm.rank == 0):
for t in template_files:
template_data.append(Template(filename=t))
if comm is not None:
template_data = comm.bcast(template_data, root=0)
timer = elapsed(timer, "Read and broadcast of {} templates"\
.format(len(template_files)), comm=comm)
dtemplates = list()
for t in template_data:
dtemplates.append(DistTemplate(t, dwave, mp_procs=mp_procs, comm=comm))
timer = elapsed(timer, "Rebinning templates", comm=comm)
return dtemplates
| true | true |
f72bf05616bf34cf98330d2cbaf1008bb162279a | 1,420 | py | Python | redbot/message/headers/x_cache.py | thinkbox/redbot | 90744dd971389bbf435d200483309b70b748785a | [
"Unlicense"
] | 1 | 2019-06-27T13:02:52.000Z | 2019-06-27T13:02:52.000Z | redbot/message/headers/x_cache.py | thinkbox/redbot | 90744dd971389bbf435d200483309b70b748785a | [
"Unlicense"
] | null | null | null | redbot/message/headers/x_cache.py | thinkbox/redbot | 90744dd971389bbf435d200483309b70b748785a | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
__author__ = "Mark Nottingham <mnot@mnot.net>"
__copyright__ = """\
Copyright (c) 2008-2013 Mark Nottingham
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import redbot.speak as rs
from redbot.message import headers as rh
from redbot.message import http_syntax as syntax
@rh.GenericHeaderSyntax
def parse(subject, value, red):
# see #63
return value
def join(subject, values, red):
return values | 37.368421 | 77 | 0.783803 |
__author__ = "Mark Nottingham <mnot@mnot.net>"
__copyright__ = """\
Copyright (c) 2008-2013 Mark Nottingham
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import redbot.speak as rs
from redbot.message import headers as rh
from redbot.message import http_syntax as syntax
@rh.GenericHeaderSyntax
def parse(subject, value, red):
return value
def join(subject, values, red):
return values | true | true |
f72bf18fa11ed33b50a81cfc1f4e0efdcb4066ad | 8,001 | py | Python | Source/FetchData/Fetch_Data_Stock_CHN_Daily.py | guissy/StockRecommendSystem | 2e8694d0bb2ceaa42585ee7414564d921cc5a854 | [
"MIT"
] | null | null | null | Source/FetchData/Fetch_Data_Stock_CHN_Daily.py | guissy/StockRecommendSystem | 2e8694d0bb2ceaa42585ee7414564d921cc5a854 | [
"MIT"
] | null | null | null | Source/FetchData/Fetch_Data_Stock_CHN_Daily.py | guissy/StockRecommendSystem | 2e8694d0bb2ceaa42585ee7414564d921cc5a854 | [
"MIT"
] | null | null | null | import sys, os, time, datetime, warnings, configparser
import pandas as pd
import numpy as np
import tushare as ts
import concurrent.futures
from tqdm import tqdm
cur_path = os.path.dirname(os.path.abspath(__file__))
for _ in range(2):
root_path = cur_path[0:cur_path.rfind('/', 0, len(cur_path))]
cur_path = root_path
sys.path.append(root_path + "/" + 'Source/DataBase/')
from Source.DataBase.DB_API import queryStock, storeStock, queryStockList, storeStockList, queryStockPublishDay, storePublishDay
def getStocksList(root_path):
try:
df = queryStockList(root_path, "DB_STOCK", "SHEET_CHN_DAILY")
df.index = df.index.astype(str).str.zfill(6)
except Exception as e:
df = pd.DataFrame()
if df.empty == False: return df
import subprocess
subprocess.Popen('brew services restart mongodb'.split())
stock_info = ts.get_stock_basics()
listData = pd.DataFrame(stock_info)
#listData.index.name = 'symbol'
#listData.index = listData.index.astype(str).str.zfill(6) #[str(symbol).zfill(6) for symbol in listData.index] #listData.index.astype(str).str.zfill(6)
#print(listData.index)
#listData['symbol'] = listData['symbol'].str.strip()
storeStockList(root_path, "DB_STOCK", "SHEET_CHN_DAILY", listData)
df = queryStockList(root_path, "DB_STOCK", "SHEET_CHN_DAILY")
df.index = df.index.astype(str).str.zfill(6)
return df
def getSingleStock(symbol):
repeat_times = 1
message = ""
df = pd.DataFrame()
for _ in range(repeat_times):
try:
data = ts.get_hist_data(symbol)
data.sort_index(ascending=True, inplace=True)
return data, ""
except Exception as e:
message = symbol + " fetch exception: " + str(e)
continue
return df, message
def getSingleStockByTime(symbol, from_date, till_date):
start = from_date.split('-')
start_y, start_m, start_d = start[0], start[1], start[2] # starting date
end = till_date.split('-')
end_y, end_m, end_d = end[0], end[1], end[2] # until now
repeat_times = 1
message = ""
df = pd.DataFrame()
for _ in range(repeat_times):
try:
data = ts.get_hist_data(symbol, from_date, till_date)
data.sort_index(ascending=True, inplace=True)
return data, ""
except Exception as e:
message = symbol + " fetch exception: " + str(e)
continue
return df, message
def judgeOpenDaysInRange(from_date, to_date):
holidays=["2017-01-01", "2017-01-02",
"2017-01-27", "2017-01-28", "2017-01-29", "2017-01-30", "2017-01-31", "2017-02-01", "2017-02-02",
"2017-04-02", "2017-04-03", "2017-04-04",
"2017-05-01",
"2017-05-28", "2017-05-29", "2017-05-30",
"2017-10-01", "2017-10-02", "2017-10-03", "2017-10-04", "2017-10-05","2017-10-06","2017-10-07","2017-10-08"]
#holidays = cal.holidays(from_date, to_date)
duedays = pd.bdate_range(from_date, to_date)
df = pd.DataFrame()
df['date'] = duedays
df['holiday'] = duedays.isin(holidays)
opendays = df[df['holiday'] == False]
return opendays
def judgeNeedPostDownload(from_date, to_date):
today = datetime.datetime.now()
start_date = pd.Timestamp(from_date)
end_date = pd.Timestamp(to_date)
if start_date > today: return False
if end_date > today: to_date = today.strftime("%Y-%m-%d")
dateList = judgeOpenDaysInRange(from_date, to_date)
if len(dateList) > 0: return True
return False
def updateSingleStockData(root_path, symbol, force_check):
startTime = time.time()
message = ""
if len(symbol) == 0: return startTime, message
till_date = (datetime.datetime.now()).strftime("%Y-%m-%d")
end_date = pd.Timestamp(till_date)
stockData, lastUpdateTime = queryStock(root_path, "DB_STOCK", "SHEET_CHN_DAILY", symbol)
if stockData.empty:
stockData, message = getSingleStock(symbol)
if stockData.empty == False:
storeStock(root_path, "DB_STOCK", "SHEET_CHN_DAILY", symbol, stockData)
return startTime, message
modified = False
first_date = pd.Timestamp(stockData.index[0])
last_date = pd.Timestamp(stockData.index[-1])
updateOnce = end_date > lastUpdateTime
if end_date > last_date and (updateOnce or force_check):
to_date = (last_date + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
if judgeNeedPostDownload(to_date, till_date):
message = message + ", download post data from " + to_date + " to " + till_date
moreStockData, tempMessage = getSingleStockByTime(symbol, to_date, till_date)
message = message + tempMessage
if len(moreStockData) > 0:
if isinstance(moreStockData.index, pd.DatetimeIndex):
moreStockData.index = moreStockData.index.strftime("%Y-%m-%d")
modified = True
stockData = pd.concat([stockData, moreStockData])
stockData.index.name = 'date'
if modified:
stockData = stockData[~stockData.index.duplicated(keep='first')]
storeStock(root_path, "DB_STOCK", "SHEET_CHN_DAILY", symbol, stockData)
elif updateOnce:
stockData = stockData[~stockData.index.duplicated(keep='first')]
storeStock(root_path, "DB_STOCK", "SHEET_CHN_DAILY", symbol, stockData)
message = message + ", nothing updated"
else:
message = ""
return startTime, message
def updateStockData_CHN(root_path, storeType, force_check = False):
symbols = getStocksList(root_path).index.values.tolist()
pbar = tqdm(total=len(symbols))
if storeType == 2:
for symbol in symbols:
startTime, message = updateSingleStockData(root_path, symbol, force_check)
outMessage = '%-*s fetched in: %.4s seconds' % (6, symbol, (time.time() - startTime))
pbar.set_description(outMessage)
pbar.update(1)
if storeType == 1:
log_errors = []
log_update = []
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
# Start the load operations and mark each future with its URL
future_to_stock = {executor.submit(updateSingleStockData, root_path, symbol, force_check): symbol for symbol in symbols}
for future in concurrent.futures.as_completed(future_to_stock):
stock = future_to_stock[future]
try:
startTime, message = future.result()
except Exception as exc:
startTime = time.time()
log_errors.append('%r generated an exception: %s' % (stock, exc))
else:
if len(message) > 0: log_update.append(message)
outMessage = '%-*s fetched in: %.4s seconds' % (6, stock, (time.time() - startTime))
pbar.set_description(outMessage)
pbar.update(1)
if len(log_errors) > 0: print(log_errors)
# if len(log_update) > 0: print(log_update)
pbar.close()
return symbols
if __name__ == "__main__":
pd.set_option('precision', 3)
pd.set_option('display.width',1000)
warnings.filterwarnings('ignore', category=pd.io.pytables.PerformanceWarning)
config = configparser.ConfigParser()
config.read(root_path + "/" + "config.ini")
storeType = int(config.get('Setting', 'StoreType'))
if storeType == 1:
from Start_DB_Server import StartServer, ShutdownServer
# start database server (async)
thread = StartServer(root_path)
# wait for db start, the standard procedure should listen to
# the completed event of function "StartServer"
time.sleep(5)
updateStockData_CHN(root_path, storeType)
if storeType == 1:
# stop database server (sync)
time.sleep(5)
ShutdownServer()
| 38.282297 | 155 | 0.632921 | import sys, os, time, datetime, warnings, configparser
import pandas as pd
import numpy as np
import tushare as ts
import concurrent.futures
from tqdm import tqdm
cur_path = os.path.dirname(os.path.abspath(__file__))
for _ in range(2):
root_path = cur_path[0:cur_path.rfind('/', 0, len(cur_path))]
cur_path = root_path
sys.path.append(root_path + "/" + 'Source/DataBase/')
from Source.DataBase.DB_API import queryStock, storeStock, queryStockList, storeStockList, queryStockPublishDay, storePublishDay
def getStocksList(root_path):
try:
df = queryStockList(root_path, "DB_STOCK", "SHEET_CHN_DAILY")
df.index = df.index.astype(str).str.zfill(6)
except Exception as e:
df = pd.DataFrame()
if df.empty == False: return df
import subprocess
subprocess.Popen('brew services restart mongodb'.split())
stock_info = ts.get_stock_basics()
listData = pd.DataFrame(stock_info)
ET_CHN_DAILY")
df.index = df.index.astype(str).str.zfill(6)
return df
def getSingleStock(symbol):
repeat_times = 1
message = ""
df = pd.DataFrame()
for _ in range(repeat_times):
try:
data = ts.get_hist_data(symbol)
data.sort_index(ascending=True, inplace=True)
return data, ""
except Exception as e:
message = symbol + " fetch exception: " + str(e)
continue
return df, message
def getSingleStockByTime(symbol, from_date, till_date):
start = from_date.split('-')
start_y, start_m, start_d = start[0], start[1], start[2]
end = till_date.split('-')
end_y, end_m, end_d = end[0], end[1], end[2]
repeat_times = 1
message = ""
df = pd.DataFrame()
for _ in range(repeat_times):
try:
data = ts.get_hist_data(symbol, from_date, till_date)
data.sort_index(ascending=True, inplace=True)
return data, ""
except Exception as e:
message = symbol + " fetch exception: " + str(e)
continue
return df, message
def judgeOpenDaysInRange(from_date, to_date):
holidays=["2017-01-01", "2017-01-02",
"2017-01-27", "2017-01-28", "2017-01-29", "2017-01-30", "2017-01-31", "2017-02-01", "2017-02-02",
"2017-04-02", "2017-04-03", "2017-04-04",
"2017-05-01",
"2017-05-28", "2017-05-29", "2017-05-30",
"2017-10-01", "2017-10-02", "2017-10-03", "2017-10-04", "2017-10-05","2017-10-06","2017-10-07","2017-10-08"]
duedays = pd.bdate_range(from_date, to_date)
df = pd.DataFrame()
df['date'] = duedays
df['holiday'] = duedays.isin(holidays)
opendays = df[df['holiday'] == False]
return opendays
def judgeNeedPostDownload(from_date, to_date):
today = datetime.datetime.now()
start_date = pd.Timestamp(from_date)
end_date = pd.Timestamp(to_date)
if start_date > today: return False
if end_date > today: to_date = today.strftime("%Y-%m-%d")
dateList = judgeOpenDaysInRange(from_date, to_date)
if len(dateList) > 0: return True
return False
def updateSingleStockData(root_path, symbol, force_check):
startTime = time.time()
message = ""
if len(symbol) == 0: return startTime, message
till_date = (datetime.datetime.now()).strftime("%Y-%m-%d")
end_date = pd.Timestamp(till_date)
stockData, lastUpdateTime = queryStock(root_path, "DB_STOCK", "SHEET_CHN_DAILY", symbol)
if stockData.empty:
stockData, message = getSingleStock(symbol)
if stockData.empty == False:
storeStock(root_path, "DB_STOCK", "SHEET_CHN_DAILY", symbol, stockData)
return startTime, message
modified = False
first_date = pd.Timestamp(stockData.index[0])
last_date = pd.Timestamp(stockData.index[-1])
updateOnce = end_date > lastUpdateTime
if end_date > last_date and (updateOnce or force_check):
to_date = (last_date + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
if judgeNeedPostDownload(to_date, till_date):
message = message + ", download post data from " + to_date + " to " + till_date
moreStockData, tempMessage = getSingleStockByTime(symbol, to_date, till_date)
message = message + tempMessage
if len(moreStockData) > 0:
if isinstance(moreStockData.index, pd.DatetimeIndex):
moreStockData.index = moreStockData.index.strftime("%Y-%m-%d")
modified = True
stockData = pd.concat([stockData, moreStockData])
stockData.index.name = 'date'
if modified:
stockData = stockData[~stockData.index.duplicated(keep='first')]
storeStock(root_path, "DB_STOCK", "SHEET_CHN_DAILY", symbol, stockData)
elif updateOnce:
stockData = stockData[~stockData.index.duplicated(keep='first')]
storeStock(root_path, "DB_STOCK", "SHEET_CHN_DAILY", symbol, stockData)
message = message + ", nothing updated"
else:
message = ""
return startTime, message
def updateStockData_CHN(root_path, storeType, force_check = False):
symbols = getStocksList(root_path).index.values.tolist()
pbar = tqdm(total=len(symbols))
if storeType == 2:
for symbol in symbols:
startTime, message = updateSingleStockData(root_path, symbol, force_check)
outMessage = '%-*s fetched in: %.4s seconds' % (6, symbol, (time.time() - startTime))
pbar.set_description(outMessage)
pbar.update(1)
if storeType == 1:
log_errors = []
log_update = []
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
future_to_stock = {executor.submit(updateSingleStockData, root_path, symbol, force_check): symbol for symbol in symbols}
for future in concurrent.futures.as_completed(future_to_stock):
stock = future_to_stock[future]
try:
startTime, message = future.result()
except Exception as exc:
startTime = time.time()
log_errors.append('%r generated an exception: %s' % (stock, exc))
else:
if len(message) > 0: log_update.append(message)
outMessage = '%-*s fetched in: %.4s seconds' % (6, stock, (time.time() - startTime))
pbar.set_description(outMessage)
pbar.update(1)
if len(log_errors) > 0: print(log_errors)
pbar.close()
return symbols
if __name__ == "__main__":
pd.set_option('precision', 3)
pd.set_option('display.width',1000)
warnings.filterwarnings('ignore', category=pd.io.pytables.PerformanceWarning)
config = configparser.ConfigParser()
config.read(root_path + "/" + "config.ini")
storeType = int(config.get('Setting', 'StoreType'))
if storeType == 1:
from Start_DB_Server import StartServer, ShutdownServer
thread = StartServer(root_path)
time.sleep(5)
updateStockData_CHN(root_path, storeType)
if storeType == 1:
time.sleep(5)
ShutdownServer()
| true | true |
f72bf3adc21681236ec82aae83471ffedf35ef9b | 817 | py | Python | src/10-async-web/acityscape_api/app.py | NissesSenap/async-techniques-python-course | 455c7222b52e7c3aa7f4eb4b03e9b6f0a99adaef | [
"MIT"
] | 380 | 2018-09-26T17:40:52.000Z | 2022-03-29T02:38:17.000Z | src/10-async-web/acityscape_api/app.py | NissesSenap/async-techniques-python-course | 455c7222b52e7c3aa7f4eb4b03e9b6f0a99adaef | [
"MIT"
] | 13 | 2018-09-30T05:55:40.000Z | 2022-01-24T20:40:09.000Z | src/10-async-web/acityscape_api/app.py | NissesSenap/async-techniques-python-course | 455c7222b52e7c3aa7f4eb4b03e9b6f0a99adaef | [
"MIT"
] | 214 | 2018-09-26T18:53:17.000Z | 2021-12-30T16:58:27.000Z | import quart
from views import city_api
from views import home
from config import settings
import services.weather_service
import services.sun_service
import services.location_service
app = quart.Quart(__name__)
is_debug = True
app.register_blueprint(home.blueprint)
app.register_blueprint(city_api.blueprint)
def configure_app():
mode = 'dev' if is_debug else 'prod'
data = settings.load(mode)
services.weather_service.global_init(data.get('weather_key'))
services.sun_service.use_cached_data = data.get('use_cached_data')
services.location_service.use_cached_data = data.get('use_cached_data')
print("Using cached data? {}".format(data.get('use_cached_data')))
def run_web_app():
app.run(debug=is_debug, port=5001)
configure_app()
if __name__ == '__main__':
run_web_app()
| 23.342857 | 75 | 0.767442 | import quart
from views import city_api
from views import home
from config import settings
import services.weather_service
import services.sun_service
import services.location_service
app = quart.Quart(__name__)
is_debug = True
app.register_blueprint(home.blueprint)
app.register_blueprint(city_api.blueprint)
def configure_app():
mode = 'dev' if is_debug else 'prod'
data = settings.load(mode)
services.weather_service.global_init(data.get('weather_key'))
services.sun_service.use_cached_data = data.get('use_cached_data')
services.location_service.use_cached_data = data.get('use_cached_data')
print("Using cached data? {}".format(data.get('use_cached_data')))
def run_web_app():
app.run(debug=is_debug, port=5001)
configure_app()
if __name__ == '__main__':
run_web_app()
| true | true |
f72bf4d9ad27aa607870ea1c9ce5ee5bb7ccd384 | 1,009 | py | Python | plaid_project/contrib/sites/migrations/0003_set_site_domain_and_name.py | reetikaSR/PlaidProject | 904bd7fd3412a4b5149aae899abcf8794bebba81 | [
"MIT"
] | null | null | null | plaid_project/contrib/sites/migrations/0003_set_site_domain_and_name.py | reetikaSR/PlaidProject | 904bd7fd3412a4b5149aae899abcf8794bebba81 | [
"MIT"
] | null | null | null | plaid_project/contrib/sites/migrations/0003_set_site_domain_and_name.py | reetikaSR/PlaidProject | 904bd7fd3412a4b5149aae899abcf8794bebba81 | [
"MIT"
] | null | null | null | """
To understand why this file is here, please read:
http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django
"""
from django.conf import settings
from django.db import migrations
def update_site_forward(apps, schema_editor):
"""Set site domain and name."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
"domain": "127.0.0.1",
"name": "plaid_project",
},
)
def update_site_backward(apps, schema_editor):
"""Revert site domain and name to default."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID, defaults={"domain": "example.com", "name": "example.com"}
)
class Migration(migrations.Migration):
dependencies = [("sites", "0002_alter_domain_unique")]
operations = [migrations.RunPython(update_site_forward, update_site_backward)]
| 28.828571 | 129 | 0.684836 | from django.conf import settings
from django.db import migrations
def update_site_forward(apps, schema_editor):
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
"domain": "127.0.0.1",
"name": "plaid_project",
},
)
def update_site_backward(apps, schema_editor):
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID, defaults={"domain": "example.com", "name": "example.com"}
)
class Migration(migrations.Migration):
dependencies = [("sites", "0002_alter_domain_unique")]
operations = [migrations.RunPython(update_site_forward, update_site_backward)]
| true | true |
f72bf52ed5ae857e2f0a8b16ca7cacfc142bfbdb | 3,895 | py | Python | utils.py | ambareeshravi/TrafficSignClassifier_API | 8628057439ee70f6d827abf931071e9b6539bd5b | [
"MIT"
] | null | null | null | utils.py | ambareeshravi/TrafficSignClassifier_API | 8628057439ee70f6d827abf931071e9b6539bd5b | [
"MIT"
] | null | null | null | utils.py | ambareeshravi/TrafficSignClassifier_API | 8628057439ee70f6d827abf931071e9b6539bd5b | [
"MIT"
] | null | null | null | '''
Author: Ambareesh Ravi
Date: Jul 31, 2021
Title: utils.py
Description:
Contains utility and helper functions for the project
'''
# Libraries imports
import numpy as np
import pandas as pd
import os
from tqdm import tqdm
from time import time
from glob import glob
from PIL import Image
import matplotlib.pyplot as plt
import argparse
import cv2
# Global variables
MANUAL_SEED = 42
np.random.seed(42)
def INFO(s):
'''
Prints information in a particular format
Args:
s - string <str> to be printed
Returns:
-
Exception:
-
'''
print("-"*40)
print("INFO:", s)
print("-"*40)
def read_directory_content(path):
'''
Reads all files in a directory given a path
Args:
path - path for the directory as <str>
Returns:
sorted list of files in the directory
Exception:
-
'''
if "*" not in path: path = os.path.join(path, "*")
return sorted(glob(path))
def create_directory(path):
'''
Creates a directory given a path if the path does not exist
Args:
path - path for the directory as <str>
Returns:
-
Exception:
-
'''
# Create a directory
if not os.path.exists(path): os.mkdir(path)
def save_image(array, path, resize = False, extension = ".png"):
'''
Saves an array into an image file
Args:
array - image as a <np.array>
path - path for the image as <str>
resize - [optional] to resize image to given size - <tuple> of <int> (w,h)
extension - [optional] type of image file as <str>
Returns:
-
Exception:
-
'''
# Add image extension
if extension not in path:
path = path.split(".")[0] + extension
# Save image into a file using PIL Image handle
img = Image.fromarray(array)
# Resize image if reaquired
if resize: img = img.resize(resize)
# Save image
img.save(path)
def read_image(image_path):
'''
Reads an image from the given path as a PIL.Image handle
Args:
image_path - path for the image as <str>
Returns:
-
Exception:
-
'''
return Image.open(image_path)
class Visualizer:
def __init__(self,):
'''
Initializes the class to visualize results in comparison with the inputs
Args:
-
Returns:
-
Exception:
-
'''
pass
def gray2color(self, x):
'''
Converts a single channel grayscale image to coloured 3 channel format
Args:
x - input as <np.array>
Returns:
-
Exception:
-
'''
return np.repeat(np.expand_dims(x, axis = -1), 3, axis = -1)
def visualize_composite(self, input_image, label, prediction, margin = 8, save_path = None):
'''
Function to visualize input, label, prediction together in an image
Args:
input_image - input RGB image as <np.array>
label - label binary mask Grayscale image as <np.array>
prediction - predicted binary mask Grayscale image as <np.array>
margin - margin between images in terms of pixels in <int>
save_path - path to save the file <str>
Returns:
-
Exception:
-
'''
rounded_pred = np.round(prediction)
margin = np.ones((label.shape[0], margin, 3))
composite = np.hstack((input_image, margin, self.gray2color(label), margin, self.gray2color(rounded_pred)))
img = Image.fromarray((composite*255).astype(np.uint8))
if save_path: save_image()
return img
| 22.911765 | 115 | 0.558408 |
import numpy as np
import pandas as pd
import os
from tqdm import tqdm
from time import time
from glob import glob
from PIL import Image
import matplotlib.pyplot as plt
import argparse
import cv2
MANUAL_SEED = 42
np.random.seed(42)
def INFO(s):
print("-"*40)
print("INFO:", s)
print("-"*40)
def read_directory_content(path):
if "*" not in path: path = os.path.join(path, "*")
return sorted(glob(path))
def create_directory(path):
if not os.path.exists(path): os.mkdir(path)
def save_image(array, path, resize = False, extension = ".png"):
if extension not in path:
path = path.split(".")[0] + extension
img = Image.fromarray(array)
if resize: img = img.resize(resize)
img.save(path)
def read_image(image_path):
return Image.open(image_path)
class Visualizer:
def __init__(self,):
pass
def gray2color(self, x):
return np.repeat(np.expand_dims(x, axis = -1), 3, axis = -1)
def visualize_composite(self, input_image, label, prediction, margin = 8, save_path = None):
rounded_pred = np.round(prediction)
margin = np.ones((label.shape[0], margin, 3))
composite = np.hstack((input_image, margin, self.gray2color(label), margin, self.gray2color(rounded_pred)))
img = Image.fromarray((composite*255).astype(np.uint8))
if save_path: save_image()
return img
| true | true |
f72bf7299d2697d596b1a5964fe20792a37b1d6a | 14,540 | py | Python | fortiosapi/fortiosapi.py | javcasalc/fortiosapi | 5dd35b59cfa8b87aee2a10f2303595c3da2347df | [
"Apache-2.0"
] | null | null | null | fortiosapi/fortiosapi.py | javcasalc/fortiosapi | 5dd35b59cfa8b87aee2a10f2303595c3da2347df | [
"Apache-2.0"
] | null | null | null | fortiosapi/fortiosapi.py | javcasalc/fortiosapi | 5dd35b59cfa8b87aee2a10f2303595c3da2347df | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Copyright 2015 Fortinet, Inc.
#
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
###################################################################
#
# fortiosapi.py aims at simplyfing the configuration and
# integration of Fortgate configuration using the restapi
#
# A Python module to abstract configuration using FortiOS REST API
#
###################################################################
import json
# Set default logging handler to avoid "No handler found" warnings.
import logging
import subprocess
import time
import paramiko
import requests
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Disable warnings about certificates.
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
# may need to move to specifying the ca or use Verify=false
# cafile = 'cacert.pem'
# r = requests.get(url, verify=cafile)
logging.getLogger(__name__).addHandler(NullHandler())
# create logger
LOG = logging.getLogger('fortiosapi')
class FortiOSAPI(object):
def __init__(self):
self._https = True
self._fortiversion = "Version is set when logged"
# reference the fortinet version of the targeted product.
self._session = requests.session() # use single session
# persistant and same for all
self._session.verify = False
# (can be changed to) self._session.verify = '/path/to/certfile'
def logging(self, response):
try:
LOG.debug("Request : %s on url : %s ", response.request.method,
response.request.url)
LOG.debug("Response : http code %s reason : %s ",
response.status_code, response.reason)
LOG.debug("raw response: %s ", response.content)
except:
LOG.warning("method errors in request when global")
def debug(self, status):
if status == 'on':
LOG.setLevel(logging.DEBUG)
def formatresponse(self, res, vdom=None):
LOG.debug("formating response")
self.logging(res)
# Generic way to format the return from FortiAPI
# If vdom is global the resp is a dict of resp (even 1)
# 1 per vdom we check only the first one here (might need a more
# complex check)
if vdom == "global":
resp = json.loads(res.content.decode('utf-8'))[0]
resp['vdom'] = "global"
else:
LOG.debug("content res: %s", res.content)
resp = json.loads(res.content.decode('utf-8'))
return resp
def https(self, status):
if status == 'on':
self._https = True
if status == 'off':
self._https = False
def update_cookie(self):
# Retrieve server csrf and update session's headers
LOG.debug("cookies are : %s ", self._session.cookies)
for cookie in self._session.cookies:
if cookie.name == 'ccsrftoken':
csrftoken = cookie.value[1:-1] # token stored as a list
LOG.debug("csrftoken before update : %s ", csrftoken)
self._session.headers.update({'X-CSRFTOKEN': csrftoken})
LOG.debug("csrftoken after update : %s ", csrftoken)
def login(self, host, username, password, https_port=443):
self.host = host
if self._https is True:
self.url_prefix = 'https://' + self.host + ':' + https_port
else:
self.url_prefix = 'http://' + self.host
url = self.url_prefix + '/logincheck'
res = self._session.post(
url,
data='username=' + username + '&secretkey=' + password + "&ajax=1")
self.logging(res)
# Ajax=1 documented in 5.6 API ref but available on 5.4
if res.content.decode('ascii')[0] == '1':
# Update session's csrftoken
self.update_cookie()
else:
raise Exception('login failed')
try:
self._fortiversion = self.monitor('system', 'interface')['version']
except:
raise Exception('can not get following login')
# Might be wise to return the license status here
def get_version(self):
return self._fortiversion
def get_mkey(self, path, name, vdom=None, data=None):
# retreive the table mkey from schema
schema = self.schema(path, name, vdom=None)
try:
keyname = schema['mkey']
except KeyError:
LOG.warning("there is no mkey for %s/%s", path, name)
return None
try:
mkey = data[keyname]
except KeyError:
LOG.warning("mkey %s not set in the data", mkey)
return None
return mkey
def logout(self):
url = self.url_prefix + '/logout'
res = self._session.post(url)
self._session.close()
self._session.cookies.clear()
self.logging(res)
def cmdb_url(self, path, name, vdom, mkey=None):
# return builded URL
url_postfix = '/api/v2/cmdb/' + path + '/' + name
if mkey:
url_postfix = url_postfix + '/' + str(mkey)
if vdom:
LOG.debug("vdom is: %s", vdom)
if vdom == "global":
url_postfix += '?global=1'
else:
url_postfix += '?vdom=' + vdom
url = self.url_prefix + url_postfix
LOG.debug("urlbuild is %s with crsf: %s", url, self._session.headers)
return url
def mon_url(self, path, name, vdom=None, mkey=None):
# return builded URL
url_postfix = '/api/v2/monitor/' + path + '/' + name
if mkey:
url_postfix = url_postfix + '/' + str(mkey)
if vdom:
LOG.debug("vdom is: %s", vdom)
if vdom == "global":
url_postfix += '?global=1'
else:
url_postfix += '?vdom=' + vdom
url = self.url_prefix + url_postfix
return url
def monitor(self, path, name, vdom=None, mkey=None, parameters=None):
url = self.mon_url(path, name, vdom, mkey)
res = self._session.get(url, params=parameters)
LOG.debug("in MONITOR function")
return self.formatresponse(res, vdom=vdom)
def download(self, path, name, vdom=None, mkey=None, parameters=None):
url = self.mon_url(path, name)
res = self._session.get(url, params=parameters)
LOG.debug("in DOWNLOAD function")
return res
def upload(self, path, name, vdom=None, mkey=None,
parameters=None, data=None, files=None):
url = self.mon_url(path, name)
res = self._session.post(url, params=parameters,
data=data, files=files)
LOG.debug("in UPLOAD function")
return res
def get(self, path, name, vdom=None, mkey=None, parameters=None):
url = self.cmdb_url(path, name, vdom, mkey)
res = self._session.get(url, params=parameters)
LOG.debug("in GET function")
return self.formatresponse(res, vdom=vdom)
def schema(self, path, name, vdom=None):
# vdom or global is managed in cmdb_url
if vdom is None:
url = self.cmdb_url(path, name, vdom) + "?action=schema"
else:
url = self.cmdb_url(path, name, vdom) + "&action=schema"
res = self._session.get(url)
self.logging(res)
if res.status_code is 200:
return json.loads(res.content.decode('utf-8'))['results']
else:
return json.loads(res.content.decode('utf-8'))
def get_name_path_dict(self, vdom=None):
# return builded URL
url_postfix = '/api/v2/cmdb/'
if vdom is None:
url_postfix += '?vdom=' + vdom + "&action=schema"
else:
url_postfix += "?action=schema"
url = self.url_prefix + url_postfix
cmdbschema = self._session.get(url)
self.logging(cmdbschema)
j = json.loads(cmdbschema.content.decode('utf-8'))['results']
dict = []
for keys in j:
if "__tree__" not in keys['path']:
dict.append(keys['path'] + " " + keys['name'])
return dict
def post(self, path, name, vdom=None,
mkey=None, parameters=None, data=None):
if not mkey:
mkey = self.get_mkey(path, name, vdom=vdom, data=data)
# post with mkey will return a 404 as the next level is not there yet
url = self.cmdb_url(path, name, vdom, mkey=None)
res = self._session.post(
url, params=parameters, data=json.dumps(data))
LOG.debug("in POST function")
return self.formatresponse(res, vdom=vdom)
def put(self, path, name, vdom=None,
mkey=None, parameters=None, data=None):
if not mkey:
mkey = self.get_mkey(path, name, vdom=vdom, data=data)
url = self.cmdb_url(path, name, vdom, mkey)
res = self._session.put(url, params=parameters,
data=json.dumps(data))
LOG.debug("in PUT function")
return self.formatresponse(res, vdom=vdom)
def delete(self, path, name, vdom=None,
mkey=None, parameters=None, data=None):
# Need to find the type of the mkey to avoid error when integer assume
# the other types will be ok.
if not mkey:
mkey = self.get_mkey(path, name, vdom=vdom, data=data)
url = self.cmdb_url(path, name, vdom, mkey)
res = self._session.delete(
url, params=parameters, data=json.dumps(data))
LOG.debug("in DELETE function")
return self.formatresponse(res, vdom=vdom)
# Set will try to put if err code is 424 will try put (ressource exists)
# may add a force option to delete and redo if troubles.
def set(self, path, name, vdom=None,
mkey=None, parameters=None, data=None):
# post with mkey will return a 404 as the next level is not there yet
url = self.cmdb_url(path, name, vdom, mkey=mkey)
if not mkey:
mkey = self.get_mkey(path, name, vdom=vdom, data=data)
url = self.cmdb_url(path, name, mkey=mkey, vdom=vdom)
res = self._session.put(
url, params=parameters, data=json.dumps(data))
LOG.debug("in SET function after PUT")
r = self.formatresponse(res, vdom=vdom)
if r['http_status'] == 404 or r['http_status'] == 405:
LOG.warning(
"Try to put on %s failed doing a put to force parameters\
change consider delete if still fails ",
res.request.url)
#need to reset the url without mkey if doing a post
url = self.cmdb_url(path, name, mkey=None, vdom=vdom)
res = self._session.post(
url, params=parameters, data=json.dumps(data))
LOG.debug("in SET function after POST")
return self.formatresponse(res, vdom=vdom)
else:
return r
# send multiline string ''' get system status ''' using ssh
def ssh(self, cmds, host, user, password=None, private_key=None, ssh_port=22):
''' Send a multi line string via ssh to the fortigate '''
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
if private_key is None:
client.connect(host, port=ssh_port, username=user, password=password,
allow_agent=False, timeout=10)
else:
k = paramiko.RSAKey.from_private_key_file(private_key)
client.connect(host, port=ssh_port, username=user, pkey=k,
allow_agent=False, timeout=10)
LOG.debug("ssh login to %s:%s ", host, ssh_port)
# commands is a multiline string using the ''' string ''' format
try:
stdin, stdout, stderr = client.exec_command(cmds)
except:
LOG.debug("exec_command failed")
raise subprocess.CalledProcessError(returncode=retcode, cmd=cmds,
output=output)
LOG.debug("ssh command in: %s out: %s err: %s ",
stdin, stdout, stderr)
retcode = stdout.channel.recv_exit_status()
LOG.debug("Paramiko return code : %s ", retcode)
client.close() # @TODO re-use connections
if retcode > 0:
output = stderr.read().strip()
raise subprocess.CalledProcessError(returncode=retcode, cmd=cmds,
output=output)
results = stdout.read()
LOG.debug("ssh cmd %s | out: %s | err: %s ", cmds, results, retcode)
# fortigate ssh send errors on stdout so checking that
if "Command fail. Return code" in str(results):
# TODO fill retcode with the output of the FGT
raise subprocess.CalledProcessError(returncode=retcode, cmd=cmds,
output=results)
return (''.join(str(results)), ''.join(str(stderr)))
def license(self):
resp = self.monitor('license', 'status')
if resp['status'] == 'success':
return resp
else:
# if vm license not valid we try to update and check again
url = self.mon_url('system', 'fortiguard', mkey='update')
postres = self._session.post(url)
LOG.debug("Return POST fortiguard %s:", postres)
postresp = json.loads(postres.content.decode('utf-8'))
if postresp['status'] == 'success':
time.sleep(17)
return self.monitor('license', 'status')
# Todo for license check and update
# GET /api/v2/monitor/license/status
# To update FortiGuard license status, you can use the following API
# POST api/v2/monitor/system/fortiguard/update
| 39.297297 | 82 | 0.588308 |
session.headers)
return url
def mon_url(self, path, name, vdom=None, mkey=None):
url_postfix = '/api/v2/monitor/' + path + '/' + name
if mkey:
url_postfix = url_postfix + '/' + str(mkey)
if vdom:
LOG.debug("vdom is: %s", vdom)
if vdom == "global":
url_postfix += '?global=1'
else:
url_postfix += '?vdom=' + vdom
url = self.url_prefix + url_postfix
return url
def monitor(self, path, name, vdom=None, mkey=None, parameters=None):
url = self.mon_url(path, name, vdom, mkey)
res = self._session.get(url, params=parameters)
LOG.debug("in MONITOR function")
return self.formatresponse(res, vdom=vdom)
def download(self, path, name, vdom=None, mkey=None, parameters=None):
url = self.mon_url(path, name)
res = self._session.get(url, params=parameters)
LOG.debug("in DOWNLOAD function")
return res
def upload(self, path, name, vdom=None, mkey=None,
parameters=None, data=None, files=None):
url = self.mon_url(path, name)
res = self._session.post(url, params=parameters,
data=data, files=files)
LOG.debug("in UPLOAD function")
return res
def get(self, path, name, vdom=None, mkey=None, parameters=None):
url = self.cmdb_url(path, name, vdom, mkey)
res = self._session.get(url, params=parameters)
LOG.debug("in GET function")
return self.formatresponse(res, vdom=vdom)
def schema(self, path, name, vdom=None):
if vdom is None:
url = self.cmdb_url(path, name, vdom) + "?action=schema"
else:
url = self.cmdb_url(path, name, vdom) + "&action=schema"
res = self._session.get(url)
self.logging(res)
if res.status_code is 200:
return json.loads(res.content.decode('utf-8'))['results']
else:
return json.loads(res.content.decode('utf-8'))
def get_name_path_dict(self, vdom=None):
url_postfix = '/api/v2/cmdb/'
if vdom is None:
url_postfix += '?vdom=' + vdom + "&action=schema"
else:
url_postfix += "?action=schema"
url = self.url_prefix + url_postfix
cmdbschema = self._session.get(url)
self.logging(cmdbschema)
j = json.loads(cmdbschema.content.decode('utf-8'))['results']
dict = []
for keys in j:
if "__tree__" not in keys['path']:
dict.append(keys['path'] + " " + keys['name'])
return dict
def post(self, path, name, vdom=None,
mkey=None, parameters=None, data=None):
if not mkey:
mkey = self.get_mkey(path, name, vdom=vdom, data=data)
url = self.cmdb_url(path, name, vdom, mkey=None)
res = self._session.post(
url, params=parameters, data=json.dumps(data))
LOG.debug("in POST function")
return self.formatresponse(res, vdom=vdom)
def put(self, path, name, vdom=None,
mkey=None, parameters=None, data=None):
if not mkey:
mkey = self.get_mkey(path, name, vdom=vdom, data=data)
url = self.cmdb_url(path, name, vdom, mkey)
res = self._session.put(url, params=parameters,
data=json.dumps(data))
LOG.debug("in PUT function")
return self.formatresponse(res, vdom=vdom)
def delete(self, path, name, vdom=None,
mkey=None, parameters=None, data=None):
if not mkey:
mkey = self.get_mkey(path, name, vdom=vdom, data=data)
url = self.cmdb_url(path, name, vdom, mkey)
res = self._session.delete(
url, params=parameters, data=json.dumps(data))
LOG.debug("in DELETE function")
return self.formatresponse(res, vdom=vdom)
def set(self, path, name, vdom=None,
mkey=None, parameters=None, data=None):
url = self.cmdb_url(path, name, vdom, mkey=mkey)
if not mkey:
mkey = self.get_mkey(path, name, vdom=vdom, data=data)
url = self.cmdb_url(path, name, mkey=mkey, vdom=vdom)
res = self._session.put(
url, params=parameters, data=json.dumps(data))
LOG.debug("in SET function after PUT")
r = self.formatresponse(res, vdom=vdom)
if r['http_status'] == 404 or r['http_status'] == 405:
LOG.warning(
"Try to put on %s failed doing a put to force parameters\
change consider delete if still fails ",
res.request.url)
url = self.cmdb_url(path, name, mkey=None, vdom=vdom)
res = self._session.post(
url, params=parameters, data=json.dumps(data))
LOG.debug("in SET function after POST")
return self.formatresponse(res, vdom=vdom)
else:
return r
def ssh(self, cmds, host, user, password=None, private_key=None, ssh_port=22):
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
if private_key is None:
client.connect(host, port=ssh_port, username=user, password=password,
allow_agent=False, timeout=10)
else:
k = paramiko.RSAKey.from_private_key_file(private_key)
client.connect(host, port=ssh_port, username=user, pkey=k,
allow_agent=False, timeout=10)
LOG.debug("ssh login to %s:%s ", host, ssh_port)
try:
stdin, stdout, stderr = client.exec_command(cmds)
except:
LOG.debug("exec_command failed")
raise subprocess.CalledProcessError(returncode=retcode, cmd=cmds,
output=output)
LOG.debug("ssh command in: %s out: %s err: %s ",
stdin, stdout, stderr)
retcode = stdout.channel.recv_exit_status()
LOG.debug("Paramiko return code : %s ", retcode)
client.close()
if retcode > 0:
output = stderr.read().strip()
raise subprocess.CalledProcessError(returncode=retcode, cmd=cmds,
output=output)
results = stdout.read()
LOG.debug("ssh cmd %s | out: %s | err: %s ", cmds, results, retcode)
if "Command fail. Return code" in str(results):
raise subprocess.CalledProcessError(returncode=retcode, cmd=cmds,
output=results)
return (''.join(str(results)), ''.join(str(stderr)))
def license(self):
resp = self.monitor('license', 'status')
if resp['status'] == 'success':
return resp
else:
url = self.mon_url('system', 'fortiguard', mkey='update')
postres = self._session.post(url)
LOG.debug("Return POST fortiguard %s:", postres)
postresp = json.loads(postres.content.decode('utf-8'))
if postresp['status'] == 'success':
time.sleep(17)
return self.monitor('license', 'status')
| true | true |
f72bf76df8d11eb6f121b20434afa79afc78b832 | 5,472 | py | Python | tests/deep_eq.py | textioHQ/PynamoDB | cc00ff616d1ff04793af2a43a68cef7611de4e85 | [
"MIT"
] | 1,586 | 2017-04-11T13:09:30.000Z | 2022-03-30T01:38:48.000Z | tests/deep_eq.py | textioHQ/PynamoDB | cc00ff616d1ff04793af2a43a68cef7611de4e85 | [
"MIT"
] | 606 | 2017-04-11T12:53:27.000Z | 2022-03-29T12:12:51.000Z | tests/deep_eq.py | textioHQ/textio-pynamodb | cc00ff616d1ff04793af2a43a68cef7611de4e85 | [
"MIT"
] | 366 | 2017-05-04T19:36:31.000Z | 2022-03-20T14:05:53.000Z | # Copyright (c) 2010-2013 Samuel Sutch [samuel.sutch@gmail.com]
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import datetime, time, functools, operator
default_fudge = datetime.timedelta(seconds=0, microseconds=0, days=0)
def deep_eq(_v1, _v2, datetime_fudge=default_fudge, _assert=False):
"""
Tests for deep equality between two python data structures recursing
into sub-structures if necessary. Works with all python types including
iterators and generators. This function was dreampt up to test API responses
but could be used for anything. Be careful. With deeply nested structures
you may blow the stack.
Options:
datetime_fudge => this is a datetime.timedelta object which, when
comparing dates, will accept values that differ
by the number of seconds specified
_assert => passing yes for this will raise an assertion error
when values do not match, instead of returning
false (very useful in combination with pdb)
Doctests included:
>>> x1, y1 = ({'a': 'b'}, {'a': 'b'})
>>> deep_eq(x1, y1)
True
>>> x2, y2 = ({'a': 'b'}, {'b': 'a'})
>>> deep_eq(x2, y2)
False
>>> x3, y3 = ({'a': {'b': 'c'}}, {'a': {'b': 'c'}})
>>> deep_eq(x3, y3)
True
>>> x4, y4 = ({'c': 't', 'a': {'b': 'c'}}, {'a': {'b': 'n'}, 'c': 't'})
>>> deep_eq(x4, y4)
False
>>> x5, y5 = ({'a': [1,2,3]}, {'a': [1,2,3]})
>>> deep_eq(x5, y5)
True
>>> x6, y6 = ({'a': [1,'b',8]}, {'a': [2,'b',8]})
>>> deep_eq(x6, y6)
False
>>> x7, y7 = ('a', 'a')
>>> deep_eq(x7, y7)
True
>>> x8, y8 = (['p','n',['asdf']], ['p','n',['asdf']])
>>> deep_eq(x8, y8)
True
>>> x9, y9 = (['p','n',['asdf',['omg']]], ['p', 'n', ['asdf',['nowai']]])
>>> deep_eq(x9, y9)
False
>>> x10, y10 = (1, 2)
>>> deep_eq(x10, y10)
False
>>> deep_eq((str(p) for p in xrange(10)), (str(p) for p in xrange(10)))
True
>>> str(deep_eq(range(4), range(4)))
'True'
>>> deep_eq(xrange(100), xrange(100))
True
>>> deep_eq(xrange(2), xrange(5))
False
>>> import datetime
>>> from datetime import datetime as dt
>>> d1, d2 = (dt.now(), dt.now() + datetime.timedelta(seconds=4))
>>> deep_eq(d1, d2)
False
>>> deep_eq(d1, d2, datetime_fudge=datetime.timedelta(seconds=5))
True
"""
_deep_eq = functools.partial(deep_eq, datetime_fudge=datetime_fudge,
_assert=_assert)
def _check_assert(R, a, b, reason=''):
if _assert and not R:
assert 0, "an assertion has failed in deep_eq ({}) {} != {}".format(
reason, str(a), str(b))
return R
def _deep_dict_eq(d1, d2):
k1, k2 = (sorted(d1.keys()), sorted(d2.keys()))
if k1 != k2: # keys should be exactly equal
return _check_assert(False, k1, k2, "keys")
return _check_assert(operator.eq(sum(_deep_eq(d1[k], d2[k])
for k in k1),
len(k1)), d1, d2, "dictionaries")
def _deep_iter_eq(l1, l2):
if len(l1) != len(l2):
return _check_assert(False, l1, l2, "lengths")
return _check_assert(operator.eq(sum(_deep_eq(v1, v2)
for v1, v2 in zip(l1, l2)),
len(l1)), l1, l2, "iterables")
def op(a, b):
_op = operator.eq
if type(a) == datetime.datetime and type(b) == datetime.datetime:
s = datetime_fudge.seconds
t1, t2 = (time.mktime(a.timetuple()), time.mktime(b.timetuple()))
l = t1 - t2
l = -l if l > 0 else l
return _check_assert((-s if s > 0 else s) <= l, a, b, "dates")
return _check_assert(_op(a, b), a, b, "values")
c1, c2 = (_v1, _v2)
# guard against strings because they are iterable and their
# elements yield iterables infinitely.
# I N C E P T I O N
if not isinstance(_v1, str):
if isinstance(_v1, dict):
op = _deep_dict_eq
else:
try:
c1, c2 = (list(iter(_v1)), list(iter(_v2)))
except TypeError:
c1, c2 = _v1, _v2
else:
op = _deep_iter_eq
return op(c1, c2)
| 38.535211 | 82 | 0.561586 |
import datetime, time, functools, operator
default_fudge = datetime.timedelta(seconds=0, microseconds=0, days=0)
def deep_eq(_v1, _v2, datetime_fudge=default_fudge, _assert=False):
_deep_eq = functools.partial(deep_eq, datetime_fudge=datetime_fudge,
_assert=_assert)
def _check_assert(R, a, b, reason=''):
if _assert and not R:
assert 0, "an assertion has failed in deep_eq ({}) {} != {}".format(
reason, str(a), str(b))
return R
def _deep_dict_eq(d1, d2):
k1, k2 = (sorted(d1.keys()), sorted(d2.keys()))
if k1 != k2:
return _check_assert(False, k1, k2, "keys")
return _check_assert(operator.eq(sum(_deep_eq(d1[k], d2[k])
for k in k1),
len(k1)), d1, d2, "dictionaries")
def _deep_iter_eq(l1, l2):
if len(l1) != len(l2):
return _check_assert(False, l1, l2, "lengths")
return _check_assert(operator.eq(sum(_deep_eq(v1, v2)
for v1, v2 in zip(l1, l2)),
len(l1)), l1, l2, "iterables")
def op(a, b):
_op = operator.eq
if type(a) == datetime.datetime and type(b) == datetime.datetime:
s = datetime_fudge.seconds
t1, t2 = (time.mktime(a.timetuple()), time.mktime(b.timetuple()))
l = t1 - t2
l = -l if l > 0 else l
return _check_assert((-s if s > 0 else s) <= l, a, b, "dates")
return _check_assert(_op(a, b), a, b, "values")
c1, c2 = (_v1, _v2)
if not isinstance(_v1, str):
if isinstance(_v1, dict):
op = _deep_dict_eq
else:
try:
c1, c2 = (list(iter(_v1)), list(iter(_v2)))
except TypeError:
c1, c2 = _v1, _v2
else:
op = _deep_iter_eq
return op(c1, c2)
| true | true |
f72bf78a0312644a15cd73be8689aef834f595d3 | 437 | py | Python | exe077 - COntando vogais em Tupla.py | carlosbandelli/Exercicios_em_Python | 2cd5bd837fdc51932f9605db32366ad0e3871d87 | [
"MIT"
] | null | null | null | exe077 - COntando vogais em Tupla.py | carlosbandelli/Exercicios_em_Python | 2cd5bd837fdc51932f9605db32366ad0e3871d87 | [
"MIT"
] | null | null | null | exe077 - COntando vogais em Tupla.py | carlosbandelli/Exercicios_em_Python | 2cd5bd837fdc51932f9605db32366ad0e3871d87 | [
"MIT"
] | null | null | null | palavras = ('aprender', 'programar', 'Linguagem', 'python',
'cruso', 'gratis', 'estudar', 'praticar',
'trabalhar', 'mercado', 'programador', 'futuro')
for p in palavras: # para cada palavra dentro do array de palavra
print(f'\nNa palavra {p.upper()} temos', end='')
for letra in p: # para cada letra de palvra tirado do array palavras
if letra.lower() in 'aeiou':
print(letra, end=' ') | 54.625 | 72 | 0.606407 | palavras = ('aprender', 'programar', 'Linguagem', 'python',
'cruso', 'gratis', 'estudar', 'praticar',
'trabalhar', 'mercado', 'programador', 'futuro')
for p in palavras:
print(f'\nNa palavra {p.upper()} temos', end='')
for letra in p:
if letra.lower() in 'aeiou':
print(letra, end=' ') | true | true |
f72bf78b887dc5d6cf67f9a36e0f156cb0c5c2ab | 4,851 | py | Python | profiles_api/views.py | rawaihtun/profiles-rest-api | 2a810cfcf41b3fbf336956a30805492b8249d36e | [
"MIT"
] | null | null | null | profiles_api/views.py | rawaihtun/profiles-rest-api | 2a810cfcf41b3fbf336956a30805492b8249d36e | [
"MIT"
] | 4 | 2021-03-19T12:01:37.000Z | 2022-02-10T09:30:39.000Z | profiles_api/views.py | rawaihtun/profiles-rest-api | 2a810cfcf41b3fbf336956a30805492b8249d36e | [
"MIT"
] | null | null | null | from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import viewsets
from rest_framework.authentication import TokenAuthentication
from rest_framework import filters
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.settings import api_settings
# from rest_framework.permissions import IsAuthenticatedOrReadOnly
from rest_framework.permissions import IsAuthenticated
from profiles_api import serializers
from profiles_api import models
from profiles_api import permissions
class HelloApiView(APIView):
"""Test API View"""
serializer_class = serializers.HelloSerializer
def get(self, request, format=None):
"""Returns a list of APIView features"""
an_apiview = [
'Uses HTTP methods as functions (get, post, patch, put, delete)',
'Is similar to a traditional Django View',
'Gives you the most control over your logic',
'Is mapped manually to URLs',
]
return Response({'message': 'Hello!', 'an_apiview': an_apiview})
def post(self, request):
"""Create hello message with our name"""
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
name = serializer.validated_data.get('name')
message = f'Hello {name}'
return Response({'message': message})
else:
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
def put(self, request, pk=None):
"""Handle updating an object"""
return Response({'method': 'PUT'})
def patch(self, request, pk=None):
"""Handle partial update of object"""
return Response({'method': 'PATCH'})
def delete(self, request, pk=None):
"""Delete an object"""
return Response({'method': 'DELETE'})
class HelloViewSet(viewsets.ViewSet):
"""Test API ViewSet"""
serializer_class = serializers.HelloSerializer
def list(self, request):
"""Return a hello message."""
a_viewset = [
'Uses actions (list, create, retrieve, update, partial_update)',
'Automatically maps to URLS using Routers',
'Provides more functionality with less code',
]
return Response({'message': 'Hello!', 'a_viewset': a_viewset})
# class HelloViewSet(viewsets.ViewSet):
# serializer_class = serializers.HelloSerializer
#
# def list(self, request):
# """ Return hello message viewset"""
# a_viewset = [
# 'This is return',
# 'This is return',
# 'This is return',
# 'This is return'
# ]
#
# return Response({'message':'Hello viewset return', 'a_viewset':a_viewset})
def create(self, request):
""" Create User"""
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
name=serializer.validated_data.get('name')
message = f'Hello {name}'
return Response({'Message':message})
else:
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, pk=None):
"""Handle getting an object by its ID"""
return Response({'http_method': 'GET'})
def update(self, request, pk=None):
"""Handle updating an object"""
return Response({'http_method': 'PUT'})
def partial_update(self, request, pk=None):
"""Handle updating part of an object"""
return Response({'http_method': 'PATCH'})
def destroy(self, request, pk=None):
"""Handle removing an object"""
return Response({'http_method': 'DELETE'})
class UserProfileViewSet(viewsets.ModelViewSet):
"""Handle creating, creating and updating profiles"""
serializer_class = serializers.UserProfileSerializer
queryset = models.UserProfile.objects.all()
authentication_classes = (TokenAuthentication,)
permission_classes = (permissions.UpdateOwnPermissions,)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'email',)
class UserLoginApiView(ObtainAuthToken):
"""Handle creating user authentication tokens"""
renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES
class UserProfileFeedViewSet(viewsets.ModelViewSet):
"""Handle creating, reading and updating user profile"""
authentication_classes = (TokenAuthentication,)
serializer_class = serializers.ProfileFeedItemSerializer
queryset = models.ProfileFeedItem.objects.all()
permission_classes = (permissions.UpdateOwnStatus,IsAuthenticated)
def perform_create(self, serializer):
"""set the user profile to logged in user"""
serializer.save(user_profile=self.request.user)
| 32.777027 | 84 | 0.669553 | from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import viewsets
from rest_framework.authentication import TokenAuthentication
from rest_framework import filters
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.settings import api_settings
from rest_framework.permissions import IsAuthenticated
from profiles_api import serializers
from profiles_api import models
from profiles_api import permissions
class HelloApiView(APIView):
serializer_class = serializers.HelloSerializer
def get(self, request, format=None):
an_apiview = [
'Uses HTTP methods as functions (get, post, patch, put, delete)',
'Is similar to a traditional Django View',
'Gives you the most control over your logic',
'Is mapped manually to URLs',
]
return Response({'message': 'Hello!', 'an_apiview': an_apiview})
def post(self, request):
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
name = serializer.validated_data.get('name')
message = f'Hello {name}'
return Response({'message': message})
else:
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
def put(self, request, pk=None):
return Response({'method': 'PUT'})
def patch(self, request, pk=None):
return Response({'method': 'PATCH'})
def delete(self, request, pk=None):
return Response({'method': 'DELETE'})
class HelloViewSet(viewsets.ViewSet):
serializer_class = serializers.HelloSerializer
def list(self, request):
a_viewset = [
'Uses actions (list, create, retrieve, update, partial_update)',
'Automatically maps to URLS using Routers',
'Provides more functionality with less code',
]
return Response({'message': 'Hello!', 'a_viewset': a_viewset})
def create(self, request):
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
name=serializer.validated_data.get('name')
message = f'Hello {name}'
return Response({'Message':message})
else:
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, pk=None):
return Response({'http_method': 'GET'})
def update(self, request, pk=None):
return Response({'http_method': 'PUT'})
def partial_update(self, request, pk=None):
return Response({'http_method': 'PATCH'})
def destroy(self, request, pk=None):
return Response({'http_method': 'DELETE'})
class UserProfileViewSet(viewsets.ModelViewSet):
serializer_class = serializers.UserProfileSerializer
queryset = models.UserProfile.objects.all()
authentication_classes = (TokenAuthentication,)
permission_classes = (permissions.UpdateOwnPermissions,)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'email',)
class UserLoginApiView(ObtainAuthToken):
renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES
class UserProfileFeedViewSet(viewsets.ModelViewSet):
authentication_classes = (TokenAuthentication,)
serializer_class = serializers.ProfileFeedItemSerializer
queryset = models.ProfileFeedItem.objects.all()
permission_classes = (permissions.UpdateOwnStatus,IsAuthenticated)
def perform_create(self, serializer):
serializer.save(user_profile=self.request.user)
| true | true |
f72bf7f75d268ffc1761eda7cbca14afc9a54931 | 9,513 | py | Python | tests/test_v2.py | dajiaji/pyseto | 6e3f1259bd1a1671cccd75cb557bb63182f9e01a | [
"MIT"
] | 25 | 2021-09-06T08:53:45.000Z | 2022-02-19T20:17:23.000Z | tests/test_v2.py | dajiaji/pyseto | 6e3f1259bd1a1671cccd75cb557bb63182f9e01a | [
"MIT"
] | 124 | 2021-09-05T05:44:05.000Z | 2022-03-27T05:57:25.000Z | tests/test_v2.py | dajiaji/pyseto | 6e3f1259bd1a1671cccd75cb557bb63182f9e01a | [
"MIT"
] | 3 | 2021-09-11T02:37:09.000Z | 2022-01-06T10:49:14.000Z | from secrets import token_bytes
import pytest
import pyseto
from pyseto import DecryptError, EncryptError, Key, VerifyError
from pyseto.versions.v2 import V2Local, V2Public
from .utils import get_path, load_key
class TestV2Local:
"""
Tests for v2.local.
"""
@pytest.mark.parametrize(
"key, msg",
[
(b"", "key must be specified."),
(token_bytes(1), "key must be 32 bytes long."),
(token_bytes(8), "key must be 32 bytes long."),
(token_bytes(16), "key must be 32 bytes long."),
(token_bytes(31), "key must be 32 bytes long."),
(token_bytes(33), "key must be 32 bytes long."),
],
)
def test_v2_local_new_with_invalid_arg(self, key, msg):
with pytest.raises(ValueError) as err:
Key.new(2, "local", key)
pytest.fail("Key.new() should fail.")
assert msg in str(err.value)
@pytest.mark.parametrize(
"key",
[
None,
0,
token_bytes(65),
],
)
def test_v2_local__generate_hash_with_invalid_arg(self, key):
with pytest.raises(EncryptError) as err:
V2Local._generate_hash(key, b"Hello world!", 32)
pytest.fail("V2Local._generate_hash() should fail.")
assert "Failed to generate hash." in str(err.value)
@pytest.mark.parametrize(
"ptk",
[
None,
0,
],
)
def test_v2_local__encode_pie_with_invalid_ptk(self, ptk):
with pytest.raises(EncryptError) as err:
V2Local._encode_pie("v2.local-wrap.pie.", token_bytes(32), ptk)
pytest.fail("V2Local._encode_pie() should fail.")
assert "Failed to encrypt." in str(err.value)
def test_v2_local_decrypt_via_decode_with_wrong_key(self):
k1 = Key.new(2, "local", token_bytes(32))
k2 = Key.new(2, "local", token_bytes(32))
token = pyseto.encode(k1, b"Hello world!")
with pytest.raises(DecryptError) as err:
pyseto.decode(k2, token)
pytest.fail("pyseto.decode() should fail.")
assert "Failed to decrypt." in str(err.value)
def test_v2_local_encrypt_with_invalid_arg(self):
k = Key.new(2, "local", token_bytes(32))
with pytest.raises(EncryptError) as err:
k.encrypt(None)
pytest.fail("pyseto.encrypt() should fail.")
assert "Failed to generate internal nonce." in str(err.value)
@pytest.mark.parametrize(
"nonce",
[
token_bytes(1),
token_bytes(8),
token_bytes(23),
token_bytes(25),
token_bytes(32),
],
)
def test_v2_local_encrypt_via_encode_with_wrong_nonce(self, nonce):
k = Key.new(2, "local", token_bytes(32))
with pytest.raises(ValueError) as err:
pyseto.encode(k, b"Hello world!", nonce=nonce)
pytest.fail("pyseto.encode() should fail.")
assert "nonce must be 24 bytes long." in str(err.value)
@pytest.mark.parametrize(
"paserk, msg",
[
("xx.local.AAAAAAAAAAAAAAAA", "Invalid PASERK version: xx."),
("k3.local.AAAAAAAAAAAAAAAA", "Invalid PASERK version: k3."),
("k2.local.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.public.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK type: xxx."),
("k2.public.AAAAAAAAAAAAAAAA", "Invalid PASERK type: public."),
(
"k2.local-wrap.AAAAAAAAAAAAAAAA",
"local-wrap needs wrapping_key.",
),
(
"k2.secret-wrap.AAAAAAAAAAAAAAAA",
"Invalid PASERK type: secret-wrap.",
),
(
"k2.local-pw.AAAAAAAAAAAAAAAA",
"local-pw needs password.",
),
(
"k2.seal.AAAAAAAAAAAAAAAA",
"seal needs unsealing_key.",
),
],
)
def test_v2_local_from_paserk_with_invalid_args(self, paserk, msg):
with pytest.raises(ValueError) as err:
V2Local.from_paserk(paserk)
pytest.fail("Key.from_paserk should fail.")
assert msg in str(err.value)
@pytest.mark.parametrize(
"paserk, msg",
[
("xx.local-wrap.AAAAAAAAAAAAAAAA", "Invalid PASERK version: xx."),
("k2.local-wrap.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.local-wrap.xxx.AAAAAAAAAAAAAAAA", "Unknown wrapping algorithm: xxx."),
("k2.xxx.pie.AAAAAAAAAAAAAAAA", "Invalid PASERK type: xxx."),
],
)
def test_v2_local_from_paserk_with_wrapping_key_and_invalid_args(self, paserk, msg):
with pytest.raises(ValueError) as err:
V2Local.from_paserk(paserk, wrapping_key=token_bytes(32))
pytest.fail("Key.from_paserk should fail.")
assert msg in str(err.value)
@pytest.mark.parametrize(
"paserk, msg",
[
("k2.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK type: xxx."),
("k2.seal.AAAAAAAAAAAAAAAA", "Invalid or unsupported PEM format."),
],
)
def test_v2_local_from_paserk_with_unsealing_key_and_invalid_args(self, paserk, msg):
with pytest.raises(ValueError) as err:
V2Local.from_paserk(paserk, unsealing_key=token_bytes(32))
pytest.fail("Key.from_paserk should fail.")
assert msg in str(err.value)
def test_v2_local_to_paserk_with_invalid_sealing_key(self):
k = Key.new(2, "local", token_bytes(32))
with pytest.raises(ValueError) as err:
k.to_paserk(sealing_key=b"not-PEM-formatted-key")
pytest.fail("Key.from_paserk should fail.")
assert "Invalid or unsupported PEM format." in str(err.value)
def test_v2_local_from_paserk_with_wrong_unsealing_key(self):
k = Key.new(2, "local", token_bytes(32))
with open(get_path("keys/public_key_x25519.pem")) as key_file:
sealed_key = k.to_paserk(sealing_key=key_file.read())
with open(get_path("keys/private_key_x25519_2.pem")) as key_file:
unsealing_key = key_file.read()
with pytest.raises(DecryptError) as err:
Key.from_paserk(sealed_key, unsealing_key=unsealing_key)
pytest.fail("Key.from_paserk should fail.")
assert "Failed to unseal a key." in str(err.value)
class TestV2Public:
"""
Tests for v2.public.
"""
def test_v2_public_to_paserk_id(self):
sk = Key.new(2, "public", load_key("keys/private_key_ed25519.pem"))
pk = Key.new(2, "public", load_key("keys/public_key_ed25519.pem"))
assert sk.to_peer_paserk_id() == pk.to_paserk_id()
assert pk.to_peer_paserk_id() == ""
def test_v2_public_verify_via_encode_with_wrong_key(self):
sk = Key.new(2, "public", load_key("keys/private_key_ed25519.pem"))
pk = Key.new(2, "public", load_key("keys/public_key_ed25519_2.pem"))
token = pyseto.encode(sk, b"Hello world!")
with pytest.raises(VerifyError) as err:
pyseto.decode(pk, token)
pytest.fail("pyseto.decode() should fail.")
assert "Failed to verify." in str(err.value)
def test_v2_public_to_paserk_with_sealing_key(self):
k = Key.new(2, "public", load_key("keys/private_key_ed25519.pem"))
with pytest.raises(ValueError) as err:
k.to_paserk(sealing_key=b"xxx")
pytest.fail("pyseto.to_paserk() should fail.")
assert "Key sealing can only be used for local key." in str(err.value)
# def test_v2_public_from_paserk_with_wrong_unsealing_key(self):
# key = Key.new(2, "local", token_bytes(32))
# pk = Key.new(2, "public", load_key("keys/public_key_ed25519.pem"))
# sealing_key = pk.public_bytes(Encoding.Raw, PublicFormat.Raw)
# sealed = key.to_paserk(sealing_key=sealing_key)
# sk = Key.new(2, "public", load_key("keys/private_key_ed25519_2.pem"))
# with pytest.raises(ValueError) as err:
# Key.from_paserk(unsealing_key=unsealing_key)
# pytest.fail("pyseto.from_paserk() should fail.")
# assert "Failed to unseal a key." in str(err.value)
@pytest.mark.parametrize(
"paserk, msg",
[
("xx.public.AAAAAAAAAAAAAAAA", "Invalid PASERK version: xx."),
("k3.public.AAAAAAAAAAAAAAAA", "Invalid PASERK version: k3."),
("k2.public.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.local.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK type: xxx."),
("k2.local.AAAAAAAAAAAAAAAA", "Invalid PASERK type: local."),
(
"k2.local-wrap.AAAAAAAAAAAAAAAA",
"Invalid PASERK type: local-wrap.",
),
(
"k2.secret-wrap.AAAAAAAAAAAAAAAA",
"secret-wrap needs wrapping_key.",
),
(
"k2.secret-pw.AAAAAAAAAAAAAAAA",
"secret-pw needs password.",
),
],
)
def test_v2_public_from_paserk_with_invalid_args(self, paserk, msg):
with pytest.raises(ValueError) as err:
V2Public.from_paserk(paserk)
pytest.fail("Key.from_paserk should fail.")
assert msg in str(err.value)
| 38.358871 | 89 | 0.599495 | from secrets import token_bytes
import pytest
import pyseto
from pyseto import DecryptError, EncryptError, Key, VerifyError
from pyseto.versions.v2 import V2Local, V2Public
from .utils import get_path, load_key
class TestV2Local:
@pytest.mark.parametrize(
"key, msg",
[
(b"", "key must be specified."),
(token_bytes(1), "key must be 32 bytes long."),
(token_bytes(8), "key must be 32 bytes long."),
(token_bytes(16), "key must be 32 bytes long."),
(token_bytes(31), "key must be 32 bytes long."),
(token_bytes(33), "key must be 32 bytes long."),
],
)
def test_v2_local_new_with_invalid_arg(self, key, msg):
with pytest.raises(ValueError) as err:
Key.new(2, "local", key)
pytest.fail("Key.new() should fail.")
assert msg in str(err.value)
@pytest.mark.parametrize(
"key",
[
None,
0,
token_bytes(65),
],
)
def test_v2_local__generate_hash_with_invalid_arg(self, key):
with pytest.raises(EncryptError) as err:
V2Local._generate_hash(key, b"Hello world!", 32)
pytest.fail("V2Local._generate_hash() should fail.")
assert "Failed to generate hash." in str(err.value)
@pytest.mark.parametrize(
"ptk",
[
None,
0,
],
)
def test_v2_local__encode_pie_with_invalid_ptk(self, ptk):
with pytest.raises(EncryptError) as err:
V2Local._encode_pie("v2.local-wrap.pie.", token_bytes(32), ptk)
pytest.fail("V2Local._encode_pie() should fail.")
assert "Failed to encrypt." in str(err.value)
def test_v2_local_decrypt_via_decode_with_wrong_key(self):
k1 = Key.new(2, "local", token_bytes(32))
k2 = Key.new(2, "local", token_bytes(32))
token = pyseto.encode(k1, b"Hello world!")
with pytest.raises(DecryptError) as err:
pyseto.decode(k2, token)
pytest.fail("pyseto.decode() should fail.")
assert "Failed to decrypt." in str(err.value)
def test_v2_local_encrypt_with_invalid_arg(self):
k = Key.new(2, "local", token_bytes(32))
with pytest.raises(EncryptError) as err:
k.encrypt(None)
pytest.fail("pyseto.encrypt() should fail.")
assert "Failed to generate internal nonce." in str(err.value)
@pytest.mark.parametrize(
"nonce",
[
token_bytes(1),
token_bytes(8),
token_bytes(23),
token_bytes(25),
token_bytes(32),
],
)
def test_v2_local_encrypt_via_encode_with_wrong_nonce(self, nonce):
k = Key.new(2, "local", token_bytes(32))
with pytest.raises(ValueError) as err:
pyseto.encode(k, b"Hello world!", nonce=nonce)
pytest.fail("pyseto.encode() should fail.")
assert "nonce must be 24 bytes long." in str(err.value)
@pytest.mark.parametrize(
"paserk, msg",
[
("xx.local.AAAAAAAAAAAAAAAA", "Invalid PASERK version: xx."),
("k3.local.AAAAAAAAAAAAAAAA", "Invalid PASERK version: k3."),
("k2.local.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.public.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK type: xxx."),
("k2.public.AAAAAAAAAAAAAAAA", "Invalid PASERK type: public."),
(
"k2.local-wrap.AAAAAAAAAAAAAAAA",
"local-wrap needs wrapping_key.",
),
(
"k2.secret-wrap.AAAAAAAAAAAAAAAA",
"Invalid PASERK type: secret-wrap.",
),
(
"k2.local-pw.AAAAAAAAAAAAAAAA",
"local-pw needs password.",
),
(
"k2.seal.AAAAAAAAAAAAAAAA",
"seal needs unsealing_key.",
),
],
)
def test_v2_local_from_paserk_with_invalid_args(self, paserk, msg):
with pytest.raises(ValueError) as err:
V2Local.from_paserk(paserk)
pytest.fail("Key.from_paserk should fail.")
assert msg in str(err.value)
@pytest.mark.parametrize(
"paserk, msg",
[
("xx.local-wrap.AAAAAAAAAAAAAAAA", "Invalid PASERK version: xx."),
("k2.local-wrap.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.local-wrap.xxx.AAAAAAAAAAAAAAAA", "Unknown wrapping algorithm: xxx."),
("k2.xxx.pie.AAAAAAAAAAAAAAAA", "Invalid PASERK type: xxx."),
],
)
def test_v2_local_from_paserk_with_wrapping_key_and_invalid_args(self, paserk, msg):
with pytest.raises(ValueError) as err:
V2Local.from_paserk(paserk, wrapping_key=token_bytes(32))
pytest.fail("Key.from_paserk should fail.")
assert msg in str(err.value)
@pytest.mark.parametrize(
"paserk, msg",
[
("k2.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK type: xxx."),
("k2.seal.AAAAAAAAAAAAAAAA", "Invalid or unsupported PEM format."),
],
)
def test_v2_local_from_paserk_with_unsealing_key_and_invalid_args(self, paserk, msg):
with pytest.raises(ValueError) as err:
V2Local.from_paserk(paserk, unsealing_key=token_bytes(32))
pytest.fail("Key.from_paserk should fail.")
assert msg in str(err.value)
def test_v2_local_to_paserk_with_invalid_sealing_key(self):
k = Key.new(2, "local", token_bytes(32))
with pytest.raises(ValueError) as err:
k.to_paserk(sealing_key=b"not-PEM-formatted-key")
pytest.fail("Key.from_paserk should fail.")
assert "Invalid or unsupported PEM format." in str(err.value)
def test_v2_local_from_paserk_with_wrong_unsealing_key(self):
k = Key.new(2, "local", token_bytes(32))
with open(get_path("keys/public_key_x25519.pem")) as key_file:
sealed_key = k.to_paserk(sealing_key=key_file.read())
with open(get_path("keys/private_key_x25519_2.pem")) as key_file:
unsealing_key = key_file.read()
with pytest.raises(DecryptError) as err:
Key.from_paserk(sealed_key, unsealing_key=unsealing_key)
pytest.fail("Key.from_paserk should fail.")
assert "Failed to unseal a key." in str(err.value)
class TestV2Public:
def test_v2_public_to_paserk_id(self):
sk = Key.new(2, "public", load_key("keys/private_key_ed25519.pem"))
pk = Key.new(2, "public", load_key("keys/public_key_ed25519.pem"))
assert sk.to_peer_paserk_id() == pk.to_paserk_id()
assert pk.to_peer_paserk_id() == ""
def test_v2_public_verify_via_encode_with_wrong_key(self):
sk = Key.new(2, "public", load_key("keys/private_key_ed25519.pem"))
pk = Key.new(2, "public", load_key("keys/public_key_ed25519_2.pem"))
token = pyseto.encode(sk, b"Hello world!")
with pytest.raises(VerifyError) as err:
pyseto.decode(pk, token)
pytest.fail("pyseto.decode() should fail.")
assert "Failed to verify." in str(err.value)
def test_v2_public_to_paserk_with_sealing_key(self):
k = Key.new(2, "public", load_key("keys/private_key_ed25519.pem"))
with pytest.raises(ValueError) as err:
k.to_paserk(sealing_key=b"xxx")
pytest.fail("pyseto.to_paserk() should fail.")
assert "Key sealing can only be used for local key." in str(err.value)
@pytest.mark.parametrize(
"paserk, msg",
[
("xx.public.AAAAAAAAAAAAAAAA", "Invalid PASERK version: xx."),
("k3.public.AAAAAAAAAAAAAAAA", "Invalid PASERK version: k3."),
("k2.public.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.local.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK format."),
("k2.xxx.AAAAAAAAAAAAAAAA", "Invalid PASERK type: xxx."),
("k2.local.AAAAAAAAAAAAAAAA", "Invalid PASERK type: local."),
(
"k2.local-wrap.AAAAAAAAAAAAAAAA",
"Invalid PASERK type: local-wrap.",
),
(
"k2.secret-wrap.AAAAAAAAAAAAAAAA",
"secret-wrap needs wrapping_key.",
),
(
"k2.secret-pw.AAAAAAAAAAAAAAAA",
"secret-pw needs password.",
),
],
)
def test_v2_public_from_paserk_with_invalid_args(self, paserk, msg):
with pytest.raises(ValueError) as err:
V2Public.from_paserk(paserk)
pytest.fail("Key.from_paserk should fail.")
assert msg in str(err.value)
| true | true |
f72bfa63f1e88f5fb0116c005cf7d58d6612502d | 1,487 | py | Python | import_pictures.py | RikEnde/image-indexer | ffade3d618bb8a59d3cca676e46576d05d2bc122 | [
"Apache-2.0"
] | 2 | 2015-08-05T12:19:49.000Z | 2015-12-17T09:27:16.000Z | import_pictures.py | RikEnde/image-indexer | ffade3d618bb8a59d3cca676e46576d05d2bc122 | [
"Apache-2.0"
] | null | null | null | import_pictures.py | RikEnde/image-indexer | ffade3d618bb8a59d3cca676e46576d05d2bc122 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""
Generate a document containing exif and file stat info of image files and persist this document
Usage:
media_indexer.py --path=/root [-v | --verbose=true] [-u | --upsert=true] [-d | --debug=true] [-h | --hashing=true]
-v, --verbose if true, print to stdout what file the indexer is currently working on
-u, --upsert if true, update files that have already been indexed. If false, skip.
-d, --debug print extremely verbose debugging info to stdout
-h, --hashing if true, include an md5 hash in the document that is persisted
"""
import sys
from options import get_options
from settings import collection, connect_string, database
from image_indexer.imageIndexer import ImageIndexer
from image_indexer.imageDAO import ImageDAO
if __name__ == '__main__':
opt = get_options(sys.argv[1:])
if opt.debug:
print "path=%s(%s), verbose=%s(%s), upsert=%s(%s), debug=%s(%s), hashing=%s(%s)" % \
(opt.path, type(opt.path), opt.verbose, type(opt.verbose),
opt.upsert, type(opt.upsert), opt.debug, type(opt.debug), opt.hashing,
type(opt.hashing))
dao = ImageDAO(connection_string=connect_string, database=database, collection=collection, upsert=opt.upsert)
indexer = ImageIndexer(opt.path, dao, verbose=opt.verbose, debug=opt.debug, hashing=opt.hashing)
try:
indexer.index()
except KeyboardInterrupt as e:
print "Aborted. ", e
| 38.128205 | 118 | 0.67115 |
"""
Generate a document containing exif and file stat info of image files and persist this document
Usage:
media_indexer.py --path=/root [-v | --verbose=true] [-u | --upsert=true] [-d | --debug=true] [-h | --hashing=true]
-v, --verbose if true, print to stdout what file the indexer is currently working on
-u, --upsert if true, update files that have already been indexed. If false, skip.
-d, --debug print extremely verbose debugging info to stdout
-h, --hashing if true, include an md5 hash in the document that is persisted
"""
import sys
from options import get_options
from settings import collection, connect_string, database
from image_indexer.imageIndexer import ImageIndexer
from image_indexer.imageDAO import ImageDAO
if __name__ == '__main__':
opt = get_options(sys.argv[1:])
if opt.debug:
print "path=%s(%s), verbose=%s(%s), upsert=%s(%s), debug=%s(%s), hashing=%s(%s)" % \
(opt.path, type(opt.path), opt.verbose, type(opt.verbose),
opt.upsert, type(opt.upsert), opt.debug, type(opt.debug), opt.hashing,
type(opt.hashing))
dao = ImageDAO(connection_string=connect_string, database=database, collection=collection, upsert=opt.upsert)
indexer = ImageIndexer(opt.path, dao, verbose=opt.verbose, debug=opt.debug, hashing=opt.hashing)
try:
indexer.index()
except KeyboardInterrupt as e:
print "Aborted. ", e
| false | true |
f72bfa6d085487f6f133c3d61b0833f55d54597a | 136 | py | Python | oandapy/entities/positions.py | gustavooferreira/oandaApi | 3c22c088e090e726cccebd201efb4254503246a0 | [
"MIT"
] | 4 | 2016-07-17T15:39:50.000Z | 2016-10-06T23:41:28.000Z | oandapy/entities/positions.py | gustavooferreira/oandaApi | 3c22c088e090e726cccebd201efb4254503246a0 | [
"MIT"
] | 1 | 2018-12-09T21:20:57.000Z | 2018-12-09T21:20:57.000Z | oandapy/entities/positions.py | gustavooferreira/oandaApi | 3c22c088e090e726cccebd201efb4254503246a0 | [
"MIT"
] | 1 | 2018-12-06T18:39:36.000Z | 2018-12-06T18:39:36.000Z | # -*- coding: utf-8 -*-
# vim:fenc=utf-8
"""
Entities for Positions
"""
def main():
pass
if __name__ == '__main__':
main()
| 9.066667 | 26 | 0.544118 |
def main():
pass
if __name__ == '__main__':
main()
| true | true |
f72bfdbd824f7c5caeb25c8512edc95565455d2e | 13,091 | py | Python | naucse/views.py | befeleme/naucse.python.cz | dee2c8cce8db90108b01b40c0981053943352d11 | [
"MIT"
] | 4 | 2019-02-14T08:02:41.000Z | 2020-10-20T10:35:55.000Z | naucse/views.py | befeleme/naucse.python.cz | dee2c8cce8db90108b01b40c0981053943352d11 | [
"MIT"
] | 71 | 2018-08-26T22:31:39.000Z | 2022-01-20T10:29:23.000Z | naucse/views.py | befeleme/naucse.python.cz | dee2c8cce8db90108b01b40c0981053943352d11 | [
"MIT"
] | 40 | 2018-08-22T14:44:59.000Z | 2021-09-20T16:11:27.000Z | import datetime
from pathlib import Path
import functools
import calendar
import os
from flask import Flask, render_template, jsonify, url_for, Response, abort, g, redirect
from flask import send_from_directory
import ics
from arca import Arca
from naucse import models
from naucse.urlconverters import register_url_converters
from naucse.templates import setup_jinja_env
app = Flask('naucse')
app.config['JSON_AS_ASCII'] = False
@app.before_request
def _get_model():
"""Set `g.model` to the root of the naucse model
A single model is used (and stored in app config).
In debug mode (elsa serve), the model is re-initialized for each request,
so changes are picked up.
In non-debug mode (elsa freeze), the model is initialized once, and
frozen (so all course data is requested and rendered upfront).
"""
freezing = os.environ.get('NAUCSE_FREEZE', not app.config['DEBUG'])
initialize = True
try:
g.model = app.config['NAUCSE_MODEL']
except KeyError:
g.model = init_model()
app.config['NAUCSE_MODEL'] = g.model
else:
if freezing:
# Model already initialized; don't look for changes
return
# (Re-)initialize model
g.model.load_licenses(Path(app.root_path).parent / 'licenses')
g.model.load_local_courses(Path(app.root_path).parent)
if freezing:
g.model.freeze()
def init_model():
trusted = os.environ.get('NAUCSE_TRUSTED_REPOS', None)
if trusted is None:
trusted_repo_patterns = ()
else:
trusted_repo_patterns = tuple(
line for line in trusted.split() if line
)
return models.Root(
url_factories={
'api': {
models.Root: lambda **kw: url_for('api', **kw),
models.Course: lambda **kw: url_for('course_api', **kw),
models.RunYear: lambda **kw: url_for('run_year_api', **kw),
},
'web': {
models.Lesson: lambda **kw: url_for('page',
page_slug='index', **kw),
models.Page: lambda **kw: url_for('page', **kw),
models.Solution: lambda **kw: url_for('solution', **kw),
models.Course: lambda **kw: url_for('course', **kw),
models.Session: lambda **kw: url_for('session', **kw),
models.SessionPage: lambda **kw: url_for(
'session', **kw),
models.StaticFile: lambda **kw: url_for('page_static', **kw),
models.Root: lambda **kw: url_for('index', **kw)
},
},
schema_url_factory=lambda m, is_input, **kw: url_for(
'schema', model_slug=m.model_slug,
is_input=is_input, **kw),
arca=Arca(settings={
"ARCA_BACKEND": "arca.backend.CurrentEnvironmentBackend",
"ARCA_BACKEND_CURRENT_ENVIRONMENT_REQUIREMENTS": "requirements.txt",
"ARCA_BACKEND_VERBOSITY": 2,
"ARCA_BACKEND_KEEP_CONTAINER_RUNNING": True,
"ARCA_BACKEND_USE_REGISTRY_NAME": "docker.io/naucse/naucse.python.cz",
"ARCA_SINGLE_PULL": True,
"ARCA_IGNORE_CACHE_ERRORS": True,
"ARCA_CACHE_BACKEND": "dogpile.cache.dbm",
"ARCA_CACHE_BACKEND_ARGUMENTS": {
"filename": ".arca/cache/naucse.dbm"
},
"ARCA_BASE_DIR": str(Path('.arca').resolve()),
}),
trusted_repo_patterns=trusted_repo_patterns,
)
register_url_converters(app)
setup_jinja_env(app.jinja_env)
@app.route('/')
def index():
return render_template("index.html", edit_info=g.model.edit_info)
@app.route('/courses/')
def courses():
return render_template(
"course_list.html",
featured_courses=g.model.featured_courses,
edit_info=g.model.course_edit_info,
)
@app.route('/runs/')
@app.route('/<int:year>/')
@app.route('/runs/<any(all):all>/')
def runs(year=None, all=None):
# XXX: Simplify?
today = datetime.date.today()
# List of years to show in the pagination
# If the current year is not there (no runs that start in the current year
# yet), add it manually
all_years = sorted(g.model.explicit_run_years)
if today.year not in all_years:
all_years.append(today.year)
first_year, last_year = min(all_years), max(all_years)
if year is not None:
if year > last_year:
# Instead of showing a future year, redirect to the 'Current' page
return redirect(url_for('runs'))
if year not in all_years:
# Otherwise, if there are no runs in requested year, return 404.
abort(404)
if all is not None:
run_data = {}
courses = g.model.courses
for slug, course in g.model.courses.items():
if course.start_date:
run_data.setdefault(course.start_date.year, {})[slug] = course
paginate_prev = {'year': first_year}
paginate_next = {'all': 'all'}
elif year is None:
# Show runs that are either ongoing or ended in the last 3 months
runs = {**g.model.run_years.get(today.year, {}),
**g.model.run_years.get(today.year - 1, {})}
ongoing = {slug: run for slug, run in runs.items()
if run.end_date >= today}
cutoff = today - datetime.timedelta(days=3*31)
recent = {slug: run for slug, run in runs.items()
if today > run.end_date > cutoff}
run_data = {"ongoing": ongoing, "recent": recent}
paginate_prev = {'year': None}
paginate_next = {'year': last_year}
else:
run_data = {year: g.model.run_years.get(year, {})}
past_years = [y for y in all_years if y < year]
if past_years:
paginate_next = {'year': max(past_years)}
else:
paginate_next = {'all': 'all'}
future_years = [y for y in all_years if y > year]
if future_years:
paginate_prev = {'year': min(future_years)}
else:
paginate_prev = {'year': None}
return render_template(
"run_list.html",
run_data=run_data,
today=datetime.date.today(),
year=year,
all=all,
all_years=all_years,
paginate_next=paginate_next,
paginate_prev=paginate_prev,
edit_info=g.model.runs_edit_info,
)
@app.route('/<course:course_slug>/')
def course(course_slug, year=None):
try:
course = g.model.courses[course_slug]
except KeyError:
print(g.model.courses)
abort(404)
recent_runs = course.get_recent_derived_runs()
return render_template(
"course.html",
course=course,
recent_runs=recent_runs,
edit_info=course.edit_info,
)
@app.route('/<course:course_slug>/sessions/<session_slug>/',
defaults={'page_slug': 'front'})
@app.route('/<course:course_slug>/sessions/<session_slug>/<page_slug>/')
def session(course_slug, session_slug, page_slug):
try:
course = g.model.courses[course_slug]
session = course.sessions[session_slug]
page = session.pages[page_slug]
except KeyError:
abort(404)
template = {
'front': 'coverpage.html',
'back': 'backpage.html',
}[page.slug]
materials_by_type = {}
for material in session.materials:
materials_by_type.setdefault(material.type, []).append(material)
return render_template(
template,
session=session,
course=session.course,
edit_info=session.edit_info,
materials_by_type=materials_by_type,
page=page,
)
def _get_canonicality_info(lesson):
"""Get canonical URL -- i.e., a lesson from 'lessons' with the same slug"""
# XXX: This could be made much more fancy
lessons_course = g.model.get_course('lessons')
is_canonical_lesson = (lessons_course == lesson.course)
if is_canonical_lesson:
canonical_url = None
else:
if lessons_course._has_lesson(lesson.slug):
canonical = lessons_course.lessons[lesson.slug]
canonical_url = canonical.get_url(external=True)
else:
canonical_url = None
return is_canonical_lesson, canonical_url
@app.route('/<course:course_slug>/<lesson:lesson_slug>/',
defaults={'page_slug': 'index'})
@app.route('/<course:course_slug>/<lesson:lesson_slug>/<page_slug>/')
def page(course_slug, lesson_slug, page_slug='index'):
try:
course = g.model.courses[course_slug]
lesson = course.lessons[lesson_slug]
page = lesson.pages[page_slug]
except KeyError:
raise abort(404)
is_canonical_lesson, canonical_url = _get_canonicality_info(lesson)
return render_template(
"lesson.html",
page=page,
content=page.content,
course=course,
canonical_url=canonical_url,
is_canonical_lesson=is_canonical_lesson,
page_attribution=page.attribution,
edit_info=page.edit_info,
)
@app.route('/<course:course_slug>/<lesson:lesson_slug>/<page_slug>'
+ '/solutions/<int:solution_index>/')
def solution(course_slug, lesson_slug, page_slug, solution_index):
try:
course = g.model.courses[course_slug]
lesson = course.lessons[lesson_slug]
page = lesson.pages[page_slug]
solution = page.solutions[solution_index]
except KeyError:
raise abort(404)
is_canonical_lesson, canonical_url = _get_canonicality_info(lesson)
return render_template(
"lesson.html",
page=page,
content=solution.content,
course=course,
canonical_url=canonical_url,
is_canonical_lesson=is_canonical_lesson,
page_attribution=page.attribution,
edit_info=page.edit_info,
solution=solution,
)
@app.route('/<course:course_slug>/<lesson:lesson_slug>/static/<path:filename>')
def page_static(course_slug, lesson_slug, filename):
try:
course = g.model.courses[course_slug]
lesson = course.lessons[lesson_slug]
static = lesson.static_files[filename]
except KeyError:
raise abort(404)
print('sending', static.base_path, static.filename)
return send_from_directory(static.base_path, static.path)
def list_months(start_date, end_date):
"""Return a span of months as a list of (year, month) tuples
The months of start_date and end_date are both included.
"""
months = []
year = start_date.year
month = start_date.month
while (year, month) <= (end_date.year, end_date.month):
months.append((year, month))
month += 1
if month > 12:
month = 1
year += 1
return months
@app.route('/<course:course_slug>/calendar/')
def course_calendar(course_slug):
try:
course = g.model.courses[course_slug]
except KeyError:
abort(404)
if not course.start_date:
abort(404)
sessions_by_date = {
s.date: s for s in course.sessions.values()
if hasattr(s, 'date')
}
return render_template(
'course_calendar.html',
course=course,
sessions_by_date=sessions_by_date,
months=list_months(course.start_date, course.end_date),
calendar=calendar.Calendar(),
edit_info=course.edit_info,
)
@app.route('/<course:course_slug>/calendar.ics')
def course_calendar_ics(course_slug):
try:
course = g.model.courses[course_slug]
except KeyError:
abort(404)
if not course.start_date:
abort(404)
events = []
for session in course.sessions.values():
time = getattr(session, 'time', None)
if time is None:
# Sessions without times don't show up in the calendar
continue
created = os.environ.get('NAUCSE_CALENDAR_DTSTAMP', None)
cal_event = ics.Event(
name=session.title,
begin=time['start'],
end=time['end'],
uid=session.get_url(external=True),
created=created,
)
events.append(cal_event)
cal = ics.Calendar(events=events)
return Response(str(cal), mimetype="text/calendar")
@app.route('/v0/schema/<is_input:is_input>.json', defaults={'model_slug': 'root'})
@app.route('/v0/schema/<is_input:is_input>/<model_slug>.json')
def schema(model_slug, is_input):
try:
cls = models.models[model_slug]
except KeyError:
abort(404)
return jsonify(models.get_schema(cls, is_input=is_input))
@app.route('/v0/naucse.json')
def api():
return jsonify(models.dump(g.model))
@app.route('/v0/years/<int:year>.json')
def run_year_api(year):
try:
run_year = g.model.run_years[year]
except KeyError:
abort(404)
return jsonify(models.dump(run_year))
@app.route('/v0/<course:course_slug>.json')
def course_api(course_slug):
try:
course = g.model.courses[course_slug]
except KeyError:
abort(404)
return jsonify(models.dump(course))
| 30.730047 | 88 | 0.625544 | import datetime
from pathlib import Path
import functools
import calendar
import os
from flask import Flask, render_template, jsonify, url_for, Response, abort, g, redirect
from flask import send_from_directory
import ics
from arca import Arca
from naucse import models
from naucse.urlconverters import register_url_converters
from naucse.templates import setup_jinja_env
app = Flask('naucse')
app.config['JSON_AS_ASCII'] = False
@app.before_request
def _get_model():
freezing = os.environ.get('NAUCSE_FREEZE', not app.config['DEBUG'])
initialize = True
try:
g.model = app.config['NAUCSE_MODEL']
except KeyError:
g.model = init_model()
app.config['NAUCSE_MODEL'] = g.model
else:
if freezing:
return
# (Re-)initialize model
g.model.load_licenses(Path(app.root_path).parent / 'licenses')
g.model.load_local_courses(Path(app.root_path).parent)
if freezing:
g.model.freeze()
def init_model():
trusted = os.environ.get('NAUCSE_TRUSTED_REPOS', None)
if trusted is None:
trusted_repo_patterns = ()
else:
trusted_repo_patterns = tuple(
line for line in trusted.split() if line
)
return models.Root(
url_factories={
'api': {
models.Root: lambda **kw: url_for('api', **kw),
models.Course: lambda **kw: url_for('course_api', **kw),
models.RunYear: lambda **kw: url_for('run_year_api', **kw),
},
'web': {
models.Lesson: lambda **kw: url_for('page',
page_slug='index', **kw),
models.Page: lambda **kw: url_for('page', **kw),
models.Solution: lambda **kw: url_for('solution', **kw),
models.Course: lambda **kw: url_for('course', **kw),
models.Session: lambda **kw: url_for('session', **kw),
models.SessionPage: lambda **kw: url_for(
'session', **kw),
models.StaticFile: lambda **kw: url_for('page_static', **kw),
models.Root: lambda **kw: url_for('index', **kw)
},
},
schema_url_factory=lambda m, is_input, **kw: url_for(
'schema', model_slug=m.model_slug,
is_input=is_input, **kw),
arca=Arca(settings={
"ARCA_BACKEND": "arca.backend.CurrentEnvironmentBackend",
"ARCA_BACKEND_CURRENT_ENVIRONMENT_REQUIREMENTS": "requirements.txt",
"ARCA_BACKEND_VERBOSITY": 2,
"ARCA_BACKEND_KEEP_CONTAINER_RUNNING": True,
"ARCA_BACKEND_USE_REGISTRY_NAME": "docker.io/naucse/naucse.python.cz",
"ARCA_SINGLE_PULL": True,
"ARCA_IGNORE_CACHE_ERRORS": True,
"ARCA_CACHE_BACKEND": "dogpile.cache.dbm",
"ARCA_CACHE_BACKEND_ARGUMENTS": {
"filename": ".arca/cache/naucse.dbm"
},
"ARCA_BASE_DIR": str(Path('.arca').resolve()),
}),
trusted_repo_patterns=trusted_repo_patterns,
)
register_url_converters(app)
setup_jinja_env(app.jinja_env)
@app.route('/')
def index():
return render_template("index.html", edit_info=g.model.edit_info)
@app.route('/courses/')
def courses():
return render_template(
"course_list.html",
featured_courses=g.model.featured_courses,
edit_info=g.model.course_edit_info,
)
@app.route('/runs/')
@app.route('/<int:year>/')
@app.route('/runs/<any(all):all>/')
def runs(year=None, all=None):
# XXX: Simplify?
today = datetime.date.today()
# List of years to show in the pagination
# If the current year is not there (no runs that start in the current year
# yet), add it manually
all_years = sorted(g.model.explicit_run_years)
if today.year not in all_years:
all_years.append(today.year)
first_year, last_year = min(all_years), max(all_years)
if year is not None:
if year > last_year:
# Instead of showing a future year, redirect to the 'Current' page
return redirect(url_for('runs'))
if year not in all_years:
# Otherwise, if there are no runs in requested year, return 404.
abort(404)
if all is not None:
run_data = {}
courses = g.model.courses
for slug, course in g.model.courses.items():
if course.start_date:
run_data.setdefault(course.start_date.year, {})[slug] = course
paginate_prev = {'year': first_year}
paginate_next = {'all': 'all'}
elif year is None:
# Show runs that are either ongoing or ended in the last 3 months
runs = {**g.model.run_years.get(today.year, {}),
**g.model.run_years.get(today.year - 1, {})}
ongoing = {slug: run for slug, run in runs.items()
if run.end_date >= today}
cutoff = today - datetime.timedelta(days=3*31)
recent = {slug: run for slug, run in runs.items()
if today > run.end_date > cutoff}
run_data = {"ongoing": ongoing, "recent": recent}
paginate_prev = {'year': None}
paginate_next = {'year': last_year}
else:
run_data = {year: g.model.run_years.get(year, {})}
past_years = [y for y in all_years if y < year]
if past_years:
paginate_next = {'year': max(past_years)}
else:
paginate_next = {'all': 'all'}
future_years = [y for y in all_years if y > year]
if future_years:
paginate_prev = {'year': min(future_years)}
else:
paginate_prev = {'year': None}
return render_template(
"run_list.html",
run_data=run_data,
today=datetime.date.today(),
year=year,
all=all,
all_years=all_years,
paginate_next=paginate_next,
paginate_prev=paginate_prev,
edit_info=g.model.runs_edit_info,
)
@app.route('/<course:course_slug>/')
def course(course_slug, year=None):
try:
course = g.model.courses[course_slug]
except KeyError:
print(g.model.courses)
abort(404)
recent_runs = course.get_recent_derived_runs()
return render_template(
"course.html",
course=course,
recent_runs=recent_runs,
edit_info=course.edit_info,
)
@app.route('/<course:course_slug>/sessions/<session_slug>/',
defaults={'page_slug': 'front'})
@app.route('/<course:course_slug>/sessions/<session_slug>/<page_slug>/')
def session(course_slug, session_slug, page_slug):
try:
course = g.model.courses[course_slug]
session = course.sessions[session_slug]
page = session.pages[page_slug]
except KeyError:
abort(404)
template = {
'front': 'coverpage.html',
'back': 'backpage.html',
}[page.slug]
materials_by_type = {}
for material in session.materials:
materials_by_type.setdefault(material.type, []).append(material)
return render_template(
template,
session=session,
course=session.course,
edit_info=session.edit_info,
materials_by_type=materials_by_type,
page=page,
)
def _get_canonicality_info(lesson):
# XXX: This could be made much more fancy
lessons_course = g.model.get_course('lessons')
is_canonical_lesson = (lessons_course == lesson.course)
if is_canonical_lesson:
canonical_url = None
else:
if lessons_course._has_lesson(lesson.slug):
canonical = lessons_course.lessons[lesson.slug]
canonical_url = canonical.get_url(external=True)
else:
canonical_url = None
return is_canonical_lesson, canonical_url
@app.route('/<course:course_slug>/<lesson:lesson_slug>/',
defaults={'page_slug': 'index'})
@app.route('/<course:course_slug>/<lesson:lesson_slug>/<page_slug>/')
def page(course_slug, lesson_slug, page_slug='index'):
try:
course = g.model.courses[course_slug]
lesson = course.lessons[lesson_slug]
page = lesson.pages[page_slug]
except KeyError:
raise abort(404)
is_canonical_lesson, canonical_url = _get_canonicality_info(lesson)
return render_template(
"lesson.html",
page=page,
content=page.content,
course=course,
canonical_url=canonical_url,
is_canonical_lesson=is_canonical_lesson,
page_attribution=page.attribution,
edit_info=page.edit_info,
)
@app.route('/<course:course_slug>/<lesson:lesson_slug>/<page_slug>'
+ '/solutions/<int:solution_index>/')
def solution(course_slug, lesson_slug, page_slug, solution_index):
try:
course = g.model.courses[course_slug]
lesson = course.lessons[lesson_slug]
page = lesson.pages[page_slug]
solution = page.solutions[solution_index]
except KeyError:
raise abort(404)
is_canonical_lesson, canonical_url = _get_canonicality_info(lesson)
return render_template(
"lesson.html",
page=page,
content=solution.content,
course=course,
canonical_url=canonical_url,
is_canonical_lesson=is_canonical_lesson,
page_attribution=page.attribution,
edit_info=page.edit_info,
solution=solution,
)
@app.route('/<course:course_slug>/<lesson:lesson_slug>/static/<path:filename>')
def page_static(course_slug, lesson_slug, filename):
try:
course = g.model.courses[course_slug]
lesson = course.lessons[lesson_slug]
static = lesson.static_files[filename]
except KeyError:
raise abort(404)
print('sending', static.base_path, static.filename)
return send_from_directory(static.base_path, static.path)
def list_months(start_date, end_date):
months = []
year = start_date.year
month = start_date.month
while (year, month) <= (end_date.year, end_date.month):
months.append((year, month))
month += 1
if month > 12:
month = 1
year += 1
return months
@app.route('/<course:course_slug>/calendar/')
def course_calendar(course_slug):
try:
course = g.model.courses[course_slug]
except KeyError:
abort(404)
if not course.start_date:
abort(404)
sessions_by_date = {
s.date: s for s in course.sessions.values()
if hasattr(s, 'date')
}
return render_template(
'course_calendar.html',
course=course,
sessions_by_date=sessions_by_date,
months=list_months(course.start_date, course.end_date),
calendar=calendar.Calendar(),
edit_info=course.edit_info,
)
@app.route('/<course:course_slug>/calendar.ics')
def course_calendar_ics(course_slug):
try:
course = g.model.courses[course_slug]
except KeyError:
abort(404)
if not course.start_date:
abort(404)
events = []
for session in course.sessions.values():
time = getattr(session, 'time', None)
if time is None:
# Sessions without times don't show up in the calendar
continue
created = os.environ.get('NAUCSE_CALENDAR_DTSTAMP', None)
cal_event = ics.Event(
name=session.title,
begin=time['start'],
end=time['end'],
uid=session.get_url(external=True),
created=created,
)
events.append(cal_event)
cal = ics.Calendar(events=events)
return Response(str(cal), mimetype="text/calendar")
@app.route('/v0/schema/<is_input:is_input>.json', defaults={'model_slug': 'root'})
@app.route('/v0/schema/<is_input:is_input>/<model_slug>.json')
def schema(model_slug, is_input):
try:
cls = models.models[model_slug]
except KeyError:
abort(404)
return jsonify(models.get_schema(cls, is_input=is_input))
@app.route('/v0/naucse.json')
def api():
return jsonify(models.dump(g.model))
@app.route('/v0/years/<int:year>.json')
def run_year_api(year):
try:
run_year = g.model.run_years[year]
except KeyError:
abort(404)
return jsonify(models.dump(run_year))
@app.route('/v0/<course:course_slug>.json')
def course_api(course_slug):
try:
course = g.model.courses[course_slug]
except KeyError:
abort(404)
return jsonify(models.dump(course))
| true | true |
f72bfe29fee50157b229530a1755cc94f9b37546 | 23,048 | py | Python | lib/keystore.py | GetAywa/electrum-aywa | 07a548bd14cdf563da49c1f1e52644b833ca972e | [
"MIT"
] | null | null | null | lib/keystore.py | GetAywa/electrum-aywa | 07a548bd14cdf563da49c1f1e52644b833ca972e | [
"MIT"
] | null | null | null | lib/keystore.py | GetAywa/electrum-aywa | 07a548bd14cdf563da49c1f1e52644b833ca972e | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
# -*- mode: python -*-
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2016 The Electrum developers
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from unicodedata import normalize
from . import bitcoin
from .bitcoin import *
from .util import PrintError, InvalidPassword, hfu
from .mnemonic import Mnemonic, load_wordlist
from .plugins import run_hook
class KeyStore(PrintError):
def has_seed(self):
return False
def is_watching_only(self):
return False
def can_import(self):
return False
def get_tx_derivations(self, tx):
keypairs = {}
for txin in tx.inputs():
num_sig = txin.get('num_sig')
if num_sig is None:
continue
x_signatures = txin['signatures']
signatures = [sig for sig in x_signatures if sig]
if len(signatures) == num_sig:
# input is complete
continue
for k, x_pubkey in enumerate(txin['x_pubkeys']):
if x_signatures[k] is not None:
# this pubkey already signed
continue
derivation = self.get_pubkey_derivation(x_pubkey)
if not derivation:
continue
keypairs[x_pubkey] = derivation
return keypairs
def can_sign(self, tx):
if self.is_watching_only():
return False
return bool(self.get_tx_derivations(tx))
class Software_KeyStore(KeyStore):
def __init__(self):
KeyStore.__init__(self)
def may_have_password(self):
return not self.is_watching_only()
def sign_message(self, sequence, message, password):
privkey, compressed = self.get_private_key(sequence, password)
key = regenerate_key(privkey)
return key.sign_message(message, compressed)
def decrypt_message(self, sequence, message, password):
privkey, compressed = self.get_private_key(sequence, password)
ec = regenerate_key(privkey)
decrypted = ec.decrypt_message(message)
return decrypted
def sign_transaction(self, tx, password):
if self.is_watching_only():
return
# Raise if password is not correct.
self.check_password(password)
# Add private keys
keypairs = self.get_tx_derivations(tx)
for k, v in keypairs.items():
keypairs[k] = self.get_private_key(v, password)
# Sign
if keypairs:
tx.sign(keypairs)
class Imported_KeyStore(Software_KeyStore):
# keystore for imported private keys
def __init__(self, d):
Software_KeyStore.__init__(self)
self.keypairs = d.get('keypairs', {})
def is_deterministic(self):
return False
def can_change_password(self):
return True
def get_master_public_key(self):
return None
def dump(self):
return {
'type': 'imported',
'keypairs': self.keypairs,
}
def can_import(self):
return True
def check_password(self, password):
pubkey = list(self.keypairs.keys())[0]
self.get_private_key(pubkey, password)
def import_privkey(self, sec, password):
txin_type, privkey, compressed = deserialize_privkey(sec)
pubkey = public_key_from_private_key(privkey, compressed)
self.keypairs[pubkey] = pw_encode(sec, password)
return txin_type, pubkey
def delete_imported_key(self, key):
self.keypairs.pop(key)
def get_private_key(self, pubkey, password):
sec = pw_decode(self.keypairs[pubkey], password)
txin_type, privkey, compressed = deserialize_privkey(sec)
# this checks the password
if pubkey != public_key_from_private_key(privkey, compressed):
raise InvalidPassword()
return privkey, compressed
def get_pubkey_derivation(self, x_pubkey):
if x_pubkey[0:2] in ['02', '03', '04']:
if x_pubkey in self.keypairs.keys():
return x_pubkey
elif x_pubkey[0:2] == 'fd':
addr = bitcoin.script_to_address(x_pubkey[2:])
if addr in self.addresses:
return self.addresses[addr].get('pubkey')
def update_password(self, old_password, new_password):
self.check_password(old_password)
if new_password == '':
new_password = None
for k, v in self.keypairs.items():
b = pw_decode(v, old_password)
c = pw_encode(b, new_password)
self.keypairs[k] = c
class Deterministic_KeyStore(Software_KeyStore):
def __init__(self, d):
Software_KeyStore.__init__(self)
self.seed = d.get('seed', '')
self.passphrase = d.get('passphrase', '')
def is_deterministic(self):
return True
def dump(self):
d = {}
if self.seed:
d['seed'] = self.seed
if self.passphrase:
d['passphrase'] = self.passphrase
return d
def has_seed(self):
return bool(self.seed)
def is_watching_only(self):
return not self.has_seed()
def can_change_password(self):
return not self.is_watching_only()
def add_seed(self, seed):
if self.seed:
raise Exception("a seed exists")
self.seed = self.format_seed(seed)
def get_seed(self, password):
return pw_decode(self.seed, password)
def get_passphrase(self, password):
return pw_decode(self.passphrase, password) if self.passphrase else ''
class Xpub:
def __init__(self):
self.xpub = None
self.xpub_receive = None
self.xpub_change = None
def get_master_public_key(self):
return self.xpub
def derive_pubkey(self, for_change, n):
xpub = self.xpub_change if for_change else self.xpub_receive
if xpub is None:
xpub = bip32_public_derivation(self.xpub, "", "/%d"%for_change)
if for_change:
self.xpub_change = xpub
else:
self.xpub_receive = xpub
return self.get_pubkey_from_xpub(xpub, (n,))
@classmethod
def get_pubkey_from_xpub(self, xpub, sequence):
_, _, _, _, c, cK = deserialize_xpub(xpub)
for i in sequence:
cK, c = CKD_pub(cK, c, i)
return bh2u(cK)
def get_xpubkey(self, c, i):
s = ''.join(map(lambda x: bitcoin.int_to_hex(x,2), (c, i)))
return 'ff' + bh2u(bitcoin.DecodeBase58Check(self.xpub)) + s
@classmethod
def parse_xpubkey(self, pubkey):
assert pubkey[0:2] == 'ff'
pk = bfh(pubkey)
pk = pk[1:]
xkey = bitcoin.EncodeBase58Check(pk[0:78])
dd = pk[78:]
s = []
while dd:
n = int(bitcoin.rev_hex(bh2u(dd[0:2])), 16)
dd = dd[2:]
s.append(n)
assert len(s) == 2
return xkey, s
def get_pubkey_derivation(self, x_pubkey):
if x_pubkey[0:2] != 'ff':
return
xpub, derivation = self.parse_xpubkey(x_pubkey)
if self.xpub != xpub:
return
return derivation
class BIP32_KeyStore(Deterministic_KeyStore, Xpub):
def __init__(self, d):
Xpub.__init__(self)
Deterministic_KeyStore.__init__(self, d)
self.xpub = d.get('xpub')
self.xprv = d.get('xprv')
def format_seed(self, seed):
return ' '.join(seed.split())
def dump(self):
d = Deterministic_KeyStore.dump(self)
d['type'] = 'bip32'
d['xpub'] = self.xpub
d['xprv'] = self.xprv
return d
def get_master_private_key(self, password):
return pw_decode(self.xprv, password)
def check_password(self, password):
xprv = pw_decode(self.xprv, password)
if deserialize_xprv(xprv)[4] != deserialize_xpub(self.xpub)[4]:
raise InvalidPassword()
def update_password(self, old_password, new_password):
self.check_password(old_password)
if new_password == '':
new_password = None
if self.has_seed():
decoded = self.get_seed(old_password)
self.seed = pw_encode(decoded, new_password)
if self.passphrase:
decoded = self.get_passphrase(old_password)
self.passphrase = pw_encode(decoded, new_password)
if self.xprv is not None:
b = pw_decode(self.xprv, old_password)
self.xprv = pw_encode(b, new_password)
def is_watching_only(self):
return self.xprv is None
def add_xprv(self, xprv):
self.xprv = xprv
self.xpub = bitcoin.xpub_from_xprv(xprv)
def add_xprv_from_seed(self, bip32_seed, xtype, derivation):
xprv, xpub = bip32_root(bip32_seed, xtype)
xprv, xpub = bip32_private_derivation(xprv, "m/", derivation)
self.add_xprv(xprv)
def get_private_key(self, sequence, password):
xprv = self.get_master_private_key(password)
_, _, _, _, c, k = deserialize_xprv(xprv)
pk = bip32_private_key(sequence, k, c)
return pk, True
class Old_KeyStore(Deterministic_KeyStore):
def __init__(self, d):
Deterministic_KeyStore.__init__(self, d)
self.mpk = d.get('mpk')
def get_hex_seed(self, password):
return pw_decode(self.seed, password).encode('utf8')
def dump(self):
d = Deterministic_KeyStore.dump(self)
d['mpk'] = self.mpk
d['type'] = 'old'
return d
def add_seed(self, seedphrase):
Deterministic_KeyStore.add_seed(self, seedphrase)
s = self.get_hex_seed(None)
self.mpk = self.mpk_from_seed(s)
def add_master_public_key(self, mpk):
self.mpk = mpk
def format_seed(self, seed):
from . import old_mnemonic, mnemonic
seed = mnemonic.normalize_text(seed)
# see if seed was entered as hex
if seed:
try:
bfh(seed)
return str(seed)
except Exception:
pass
words = seed.split()
seed = old_mnemonic.mn_decode(words)
if not seed:
raise Exception("Invalid seed")
return seed
def get_seed(self, password):
from . import old_mnemonic
s = self.get_hex_seed(password)
return ' '.join(old_mnemonic.mn_encode(s))
@classmethod
def mpk_from_seed(klass, seed):
secexp = klass.stretch_key(seed)
master_private_key = ecdsa.SigningKey.from_secret_exponent(secexp, curve = SECP256k1)
master_public_key = master_private_key.get_verifying_key().to_string()
return bh2u(master_public_key)
@classmethod
def stretch_key(self, seed):
x = seed
for i in range(100000):
x = hashlib.sha256(x + seed).digest()
return string_to_number(x)
@classmethod
def get_sequence(self, mpk, for_change, n):
return string_to_number(Hash(("%d:%d:"%(n, for_change)).encode('ascii') + bfh(mpk)))
@classmethod
def get_pubkey_from_mpk(self, mpk, for_change, n):
z = self.get_sequence(mpk, for_change, n)
master_public_key = ecdsa.VerifyingKey.from_string(bfh(mpk), curve = SECP256k1)
pubkey_point = master_public_key.pubkey.point + z*SECP256k1.generator
public_key2 = ecdsa.VerifyingKey.from_public_point(pubkey_point, curve = SECP256k1)
return '04' + bh2u(public_key2.to_string())
def derive_pubkey(self, for_change, n):
return self.get_pubkey_from_mpk(self.mpk, for_change, n)
def get_private_key_from_stretched_exponent(self, for_change, n, secexp):
order = generator_secp256k1.order()
secexp = (secexp + self.get_sequence(self.mpk, for_change, n)) % order
pk = number_to_string(secexp, generator_secp256k1.order())
return pk
def get_private_key(self, sequence, password):
seed = self.get_hex_seed(password)
self.check_seed(seed)
for_change, n = sequence
secexp = self.stretch_key(seed)
pk = self.get_private_key_from_stretched_exponent(for_change, n, secexp)
return pk, False
def check_seed(self, seed):
secexp = self.stretch_key(seed)
master_private_key = ecdsa.SigningKey.from_secret_exponent( secexp, curve = SECP256k1 )
master_public_key = master_private_key.get_verifying_key().to_string()
if master_public_key != bfh(self.mpk):
print_error('invalid password (mpk)', self.mpk, bh2u(master_public_key))
raise InvalidPassword()
def check_password(self, password):
seed = self.get_hex_seed(password)
self.check_seed(seed)
def get_master_public_key(self):
return self.mpk
def get_xpubkey(self, for_change, n):
s = ''.join(map(lambda x: bitcoin.int_to_hex(x,2), (for_change, n)))
return 'fe' + self.mpk + s
@classmethod
def parse_xpubkey(self, x_pubkey):
assert x_pubkey[0:2] == 'fe'
pk = x_pubkey[2:]
mpk = pk[0:128]
dd = pk[128:]
s = []
while dd:
n = int(bitcoin.rev_hex(dd[0:4]), 16)
dd = dd[4:]
s.append(n)
assert len(s) == 2
return mpk, s
def get_pubkey_derivation(self, x_pubkey):
if x_pubkey[0:2] != 'fe':
return
mpk, derivation = self.parse_xpubkey(x_pubkey)
if self.mpk != mpk:
return
return derivation
def update_password(self, old_password, new_password):
self.check_password(old_password)
if new_password == '':
new_password = None
if self.has_seed():
decoded = pw_decode(self.seed, old_password)
self.seed = pw_encode(decoded, new_password)
class Hardware_KeyStore(KeyStore, Xpub):
# Derived classes must set:
# - device
# - DEVICE_IDS
# - wallet_type
#restore_wallet_class = BIP32_RD_Wallet
max_change_outputs = 1
def __init__(self, d):
Xpub.__init__(self)
KeyStore.__init__(self)
# Errors and other user interaction is done through the wallet's
# handler. The handler is per-window and preserved across
# device reconnects
self.xpub = d.get('xpub')
self.label = d.get('label')
self.derivation = d.get('derivation')
self.handler = None
run_hook('init_keystore', self)
def set_label(self, label):
self.label = label
def may_have_password(self):
return False
def is_deterministic(self):
return True
def dump(self):
return {
'type': 'hardware',
'hw_type': self.hw_type,
'xpub': self.xpub,
'derivation':self.derivation,
'label':self.label,
}
def unpaired(self):
'''A device paired with the wallet was diconnected. This can be
called in any thread context.'''
self.print_error("unpaired")
def paired(self):
'''A device paired with the wallet was (re-)connected. This can be
called in any thread context.'''
self.print_error("paired")
def can_export(self):
return False
def is_watching_only(self):
'''The wallet is not watching-only; the user will be prompted for
pin and passphrase as appropriate when needed.'''
assert not self.has_seed()
return False
def can_change_password(self):
return False
def bip39_normalize_passphrase(passphrase):
return normalize('NFKD', passphrase or '')
def bip39_to_seed(mnemonic, passphrase):
import pbkdf2, hashlib, hmac
PBKDF2_ROUNDS = 2048
mnemonic = normalize('NFKD', ' '.join(mnemonic.split()))
passphrase = bip39_normalize_passphrase(passphrase)
return pbkdf2.PBKDF2(mnemonic, 'mnemonic' + passphrase,
iterations = PBKDF2_ROUNDS, macmodule = hmac,
digestmodule = hashlib.sha512).read(64)
# returns tuple (is_checksum_valid, is_wordlist_valid)
def bip39_is_checksum_valid(mnemonic):
words = [ normalize('NFKD', word) for word in mnemonic.split() ]
words_len = len(words)
wordlist = load_wordlist("english.txt")
n = len(wordlist)
checksum_length = 11*words_len//33
entropy_length = 32*checksum_length
i = 0
words.reverse()
while words:
w = words.pop()
try:
k = wordlist.index(w)
except ValueError:
return False, False
i = i*n + k
if words_len not in [12, 15, 18, 21, 24]:
return False, True
entropy = i >> checksum_length
checksum = i % 2**checksum_length
h = '{:x}'.format(entropy)
while len(h) < entropy_length/4:
h = '0'+h
b = bytearray.fromhex(h)
hashed = int(hfu(hashlib.sha256(b).digest()), 16)
calculated_checksum = hashed >> (256 - checksum_length)
return checksum == calculated_checksum, True
def from_bip39_seed(seed, passphrase, derivation):
k = BIP32_KeyStore({})
bip32_seed = bip39_to_seed(seed, passphrase)
t = 'standard' # bip43
k.add_xprv_from_seed(bip32_seed, t, derivation)
return k
# extended pubkeys
def is_xpubkey(x_pubkey):
return x_pubkey[0:2] == 'ff'
def parse_xpubkey(x_pubkey):
assert x_pubkey[0:2] == 'ff'
return BIP32_KeyStore.parse_xpubkey(x_pubkey)
def xpubkey_to_address(x_pubkey):
if x_pubkey[0:2] == 'fd':
address = bitcoin.script_to_address(x_pubkey[2:])
return x_pubkey, address
if x_pubkey[0:2] in ['02', '03', '04']:
pubkey = x_pubkey
elif x_pubkey[0:2] == 'ff':
xpub, s = BIP32_KeyStore.parse_xpubkey(x_pubkey)
pubkey = BIP32_KeyStore.get_pubkey_from_xpub(xpub, s)
elif x_pubkey[0:2] == 'fe':
mpk, s = Old_KeyStore.parse_xpubkey(x_pubkey)
pubkey = Old_KeyStore.get_pubkey_from_mpk(mpk, s[0], s[1])
else:
raise BaseException("Cannot parse pubkey")
if pubkey:
address = public_key_to_p2pkh(bfh(pubkey))
return pubkey, address
def xpubkey_to_pubkey(x_pubkey):
pubkey, address = xpubkey_to_address(x_pubkey)
return pubkey
hw_keystores = {}
def register_keystore(hw_type, constructor):
hw_keystores[hw_type] = constructor
def hardware_keystore(d):
hw_type = d['hw_type']
if hw_type in hw_keystores:
constructor = hw_keystores[hw_type]
return constructor(d)
raise BaseException('unknown hardware type', hw_type)
def load_keystore(storage, name):
w = storage.get('wallet_type', 'standard')
d = storage.get(name, {})
t = d.get('type')
if not t:
raise BaseException('wallet format requires update')
if t == 'old':
k = Old_KeyStore(d)
elif t == 'imported':
k = Imported_KeyStore(d)
elif t == 'bip32':
k = BIP32_KeyStore(d)
elif t == 'hardware':
k = hardware_keystore(d)
else:
raise BaseException('unknown wallet type', t)
return k
def is_old_mpk(mpk):
try:
int(mpk, 16)
except:
return False
return len(mpk) == 128
def is_address_list(text):
parts = text.split()
return bool(parts) and all(bitcoin.is_address(x) for x in parts)
def get_private_keys(text):
parts = text.split('\n')
parts = map(lambda x: ''.join(x.split()), parts)
parts = list(filter(bool, parts))
if bool(parts) and all(bitcoin.is_private_key(x) for x in parts):
return parts
def is_private_key_list(text):
return bool(get_private_keys(text))
is_mpk = lambda x: is_old_mpk(x) or is_xpub(x)
is_private = lambda x: is_seed(x) or is_xprv(x) or is_private_key_list(x)
is_master_key = lambda x: is_old_mpk(x) or is_xprv(x) or is_xpub(x)
is_master_key_plus_drk = lambda x: is_drkp(x) or is_drkv(x) or is_master_key(x)
is_private_key = lambda x: is_xprv(x) or is_private_key_list(x)
is_bip32_key = lambda x: is_xprv(x) or is_xpub(x)
def bip44_derivation(account_id):
bip = 44
coin = 1 if bitcoin.NetworkConstants.TESTNET else 5
return "m/%d'/%d'/%d'" % (bip, coin, int(account_id))
def from_seed(seed, passphrase, is_p2sh):
t = seed_type(seed)
if t == 'old':
keystore = Old_KeyStore({})
keystore.add_seed(seed)
elif t in ['standard']:
keystore = BIP32_KeyStore({})
keystore.add_seed(seed)
keystore.passphrase = passphrase
bip32_seed = Mnemonic.mnemonic_to_seed(seed, passphrase)
der = "m/"
xtype = 'standard'
keystore.add_xprv_from_seed(bip32_seed, xtype, der)
else:
raise BaseException(t)
return keystore
def from_private_key_list(text):
keystore = Imported_KeyStore({})
for x in get_private_keys(text):
keystore.import_key(x, None)
return keystore
def from_old_mpk(mpk):
keystore = Old_KeyStore({})
keystore.add_master_public_key(mpk)
return keystore
def from_xpub(xpub):
k = BIP32_KeyStore({})
k.xpub = xpub
return k
def from_xprv(xprv):
xpub = bitcoin.xpub_from_xprv(xprv)
k = BIP32_KeyStore({})
k.xprv = xprv
k.xpub = xpub
return k
def from_drkp(drkp):
xtype, depth, fingerprint, child_number, c, cK = deserialize_drkp(drkp)
xpub = serialize_xpub(xtype, c, cK, depth, fingerprint, child_number)
k = BIP32_KeyStore({})
k.xpub = xpub
return k
def from_drkv(drkv):
xtype, depth, fingerprint, child_number, c, k = deserialize_drkv(drkv)
xprv = serialize_xprv(xtype, c, k, depth, fingerprint, child_number)
xpub = bitcoin.xpub_from_xprv(xprv)
k = BIP32_KeyStore({})
k.xprv = xprv
k.xpub = xpub
return k
def from_master_key(text):
if is_xprv(text):
k = from_xprv(text)
elif is_old_mpk(text):
k = from_old_mpk(text)
elif is_xpub(text):
k = from_xpub(text)
elif is_drkv(text):
k = from_drkv(text)
elif is_drkp(text):
k = from_drkp(text)
else:
raise BaseException('Invalid key')
return k
| 30.730667 | 95 | 0.63025 |
from unicodedata import normalize
from . import bitcoin
from .bitcoin import *
from .util import PrintError, InvalidPassword, hfu
from .mnemonic import Mnemonic, load_wordlist
from .plugins import run_hook
class KeyStore(PrintError):
def has_seed(self):
return False
def is_watching_only(self):
return False
def can_import(self):
return False
def get_tx_derivations(self, tx):
keypairs = {}
for txin in tx.inputs():
num_sig = txin.get('num_sig')
if num_sig is None:
continue
x_signatures = txin['signatures']
signatures = [sig for sig in x_signatures if sig]
if len(signatures) == num_sig:
continue
for k, x_pubkey in enumerate(txin['x_pubkeys']):
if x_signatures[k] is not None:
continue
derivation = self.get_pubkey_derivation(x_pubkey)
if not derivation:
continue
keypairs[x_pubkey] = derivation
return keypairs
def can_sign(self, tx):
if self.is_watching_only():
return False
return bool(self.get_tx_derivations(tx))
class Software_KeyStore(KeyStore):
def __init__(self):
KeyStore.__init__(self)
def may_have_password(self):
return not self.is_watching_only()
def sign_message(self, sequence, message, password):
privkey, compressed = self.get_private_key(sequence, password)
key = regenerate_key(privkey)
return key.sign_message(message, compressed)
def decrypt_message(self, sequence, message, password):
privkey, compressed = self.get_private_key(sequence, password)
ec = regenerate_key(privkey)
decrypted = ec.decrypt_message(message)
return decrypted
def sign_transaction(self, tx, password):
if self.is_watching_only():
return
self.check_password(password)
keypairs = self.get_tx_derivations(tx)
for k, v in keypairs.items():
keypairs[k] = self.get_private_key(v, password)
if keypairs:
tx.sign(keypairs)
class Imported_KeyStore(Software_KeyStore):
def __init__(self, d):
Software_KeyStore.__init__(self)
self.keypairs = d.get('keypairs', {})
def is_deterministic(self):
return False
def can_change_password(self):
return True
def get_master_public_key(self):
return None
def dump(self):
return {
'type': 'imported',
'keypairs': self.keypairs,
}
def can_import(self):
return True
def check_password(self, password):
pubkey = list(self.keypairs.keys())[0]
self.get_private_key(pubkey, password)
def import_privkey(self, sec, password):
txin_type, privkey, compressed = deserialize_privkey(sec)
pubkey = public_key_from_private_key(privkey, compressed)
self.keypairs[pubkey] = pw_encode(sec, password)
return txin_type, pubkey
def delete_imported_key(self, key):
self.keypairs.pop(key)
def get_private_key(self, pubkey, password):
sec = pw_decode(self.keypairs[pubkey], password)
txin_type, privkey, compressed = deserialize_privkey(sec)
if pubkey != public_key_from_private_key(privkey, compressed):
raise InvalidPassword()
return privkey, compressed
def get_pubkey_derivation(self, x_pubkey):
if x_pubkey[0:2] in ['02', '03', '04']:
if x_pubkey in self.keypairs.keys():
return x_pubkey
elif x_pubkey[0:2] == 'fd':
addr = bitcoin.script_to_address(x_pubkey[2:])
if addr in self.addresses:
return self.addresses[addr].get('pubkey')
def update_password(self, old_password, new_password):
self.check_password(old_password)
if new_password == '':
new_password = None
for k, v in self.keypairs.items():
b = pw_decode(v, old_password)
c = pw_encode(b, new_password)
self.keypairs[k] = c
class Deterministic_KeyStore(Software_KeyStore):
def __init__(self, d):
Software_KeyStore.__init__(self)
self.seed = d.get('seed', '')
self.passphrase = d.get('passphrase', '')
def is_deterministic(self):
return True
def dump(self):
d = {}
if self.seed:
d['seed'] = self.seed
if self.passphrase:
d['passphrase'] = self.passphrase
return d
def has_seed(self):
return bool(self.seed)
def is_watching_only(self):
return not self.has_seed()
def can_change_password(self):
return not self.is_watching_only()
def add_seed(self, seed):
if self.seed:
raise Exception("a seed exists")
self.seed = self.format_seed(seed)
def get_seed(self, password):
return pw_decode(self.seed, password)
def get_passphrase(self, password):
return pw_decode(self.passphrase, password) if self.passphrase else ''
class Xpub:
def __init__(self):
self.xpub = None
self.xpub_receive = None
self.xpub_change = None
def get_master_public_key(self):
return self.xpub
def derive_pubkey(self, for_change, n):
xpub = self.xpub_change if for_change else self.xpub_receive
if xpub is None:
xpub = bip32_public_derivation(self.xpub, "", "/%d"%for_change)
if for_change:
self.xpub_change = xpub
else:
self.xpub_receive = xpub
return self.get_pubkey_from_xpub(xpub, (n,))
@classmethod
def get_pubkey_from_xpub(self, xpub, sequence):
_, _, _, _, c, cK = deserialize_xpub(xpub)
for i in sequence:
cK, c = CKD_pub(cK, c, i)
return bh2u(cK)
def get_xpubkey(self, c, i):
s = ''.join(map(lambda x: bitcoin.int_to_hex(x,2), (c, i)))
return 'ff' + bh2u(bitcoin.DecodeBase58Check(self.xpub)) + s
@classmethod
def parse_xpubkey(self, pubkey):
assert pubkey[0:2] == 'ff'
pk = bfh(pubkey)
pk = pk[1:]
xkey = bitcoin.EncodeBase58Check(pk[0:78])
dd = pk[78:]
s = []
while dd:
n = int(bitcoin.rev_hex(bh2u(dd[0:2])), 16)
dd = dd[2:]
s.append(n)
assert len(s) == 2
return xkey, s
def get_pubkey_derivation(self, x_pubkey):
if x_pubkey[0:2] != 'ff':
return
xpub, derivation = self.parse_xpubkey(x_pubkey)
if self.xpub != xpub:
return
return derivation
class BIP32_KeyStore(Deterministic_KeyStore, Xpub):
def __init__(self, d):
Xpub.__init__(self)
Deterministic_KeyStore.__init__(self, d)
self.xpub = d.get('xpub')
self.xprv = d.get('xprv')
def format_seed(self, seed):
return ' '.join(seed.split())
def dump(self):
d = Deterministic_KeyStore.dump(self)
d['type'] = 'bip32'
d['xpub'] = self.xpub
d['xprv'] = self.xprv
return d
def get_master_private_key(self, password):
return pw_decode(self.xprv, password)
def check_password(self, password):
xprv = pw_decode(self.xprv, password)
if deserialize_xprv(xprv)[4] != deserialize_xpub(self.xpub)[4]:
raise InvalidPassword()
def update_password(self, old_password, new_password):
self.check_password(old_password)
if new_password == '':
new_password = None
if self.has_seed():
decoded = self.get_seed(old_password)
self.seed = pw_encode(decoded, new_password)
if self.passphrase:
decoded = self.get_passphrase(old_password)
self.passphrase = pw_encode(decoded, new_password)
if self.xprv is not None:
b = pw_decode(self.xprv, old_password)
self.xprv = pw_encode(b, new_password)
def is_watching_only(self):
return self.xprv is None
def add_xprv(self, xprv):
self.xprv = xprv
self.xpub = bitcoin.xpub_from_xprv(xprv)
def add_xprv_from_seed(self, bip32_seed, xtype, derivation):
xprv, xpub = bip32_root(bip32_seed, xtype)
xprv, xpub = bip32_private_derivation(xprv, "m/", derivation)
self.add_xprv(xprv)
def get_private_key(self, sequence, password):
xprv = self.get_master_private_key(password)
_, _, _, _, c, k = deserialize_xprv(xprv)
pk = bip32_private_key(sequence, k, c)
return pk, True
class Old_KeyStore(Deterministic_KeyStore):
def __init__(self, d):
Deterministic_KeyStore.__init__(self, d)
self.mpk = d.get('mpk')
def get_hex_seed(self, password):
return pw_decode(self.seed, password).encode('utf8')
def dump(self):
d = Deterministic_KeyStore.dump(self)
d['mpk'] = self.mpk
d['type'] = 'old'
return d
def add_seed(self, seedphrase):
Deterministic_KeyStore.add_seed(self, seedphrase)
s = self.get_hex_seed(None)
self.mpk = self.mpk_from_seed(s)
def add_master_public_key(self, mpk):
self.mpk = mpk
def format_seed(self, seed):
from . import old_mnemonic, mnemonic
seed = mnemonic.normalize_text(seed)
if seed:
try:
bfh(seed)
return str(seed)
except Exception:
pass
words = seed.split()
seed = old_mnemonic.mn_decode(words)
if not seed:
raise Exception("Invalid seed")
return seed
def get_seed(self, password):
from . import old_mnemonic
s = self.get_hex_seed(password)
return ' '.join(old_mnemonic.mn_encode(s))
@classmethod
def mpk_from_seed(klass, seed):
secexp = klass.stretch_key(seed)
master_private_key = ecdsa.SigningKey.from_secret_exponent(secexp, curve = SECP256k1)
master_public_key = master_private_key.get_verifying_key().to_string()
return bh2u(master_public_key)
@classmethod
def stretch_key(self, seed):
x = seed
for i in range(100000):
x = hashlib.sha256(x + seed).digest()
return string_to_number(x)
@classmethod
def get_sequence(self, mpk, for_change, n):
return string_to_number(Hash(("%d:%d:"%(n, for_change)).encode('ascii') + bfh(mpk)))
@classmethod
def get_pubkey_from_mpk(self, mpk, for_change, n):
z = self.get_sequence(mpk, for_change, n)
master_public_key = ecdsa.VerifyingKey.from_string(bfh(mpk), curve = SECP256k1)
pubkey_point = master_public_key.pubkey.point + z*SECP256k1.generator
public_key2 = ecdsa.VerifyingKey.from_public_point(pubkey_point, curve = SECP256k1)
return '04' + bh2u(public_key2.to_string())
def derive_pubkey(self, for_change, n):
return self.get_pubkey_from_mpk(self.mpk, for_change, n)
def get_private_key_from_stretched_exponent(self, for_change, n, secexp):
order = generator_secp256k1.order()
secexp = (secexp + self.get_sequence(self.mpk, for_change, n)) % order
pk = number_to_string(secexp, generator_secp256k1.order())
return pk
def get_private_key(self, sequence, password):
seed = self.get_hex_seed(password)
self.check_seed(seed)
for_change, n = sequence
secexp = self.stretch_key(seed)
pk = self.get_private_key_from_stretched_exponent(for_change, n, secexp)
return pk, False
def check_seed(self, seed):
secexp = self.stretch_key(seed)
master_private_key = ecdsa.SigningKey.from_secret_exponent( secexp, curve = SECP256k1 )
master_public_key = master_private_key.get_verifying_key().to_string()
if master_public_key != bfh(self.mpk):
print_error('invalid password (mpk)', self.mpk, bh2u(master_public_key))
raise InvalidPassword()
def check_password(self, password):
seed = self.get_hex_seed(password)
self.check_seed(seed)
def get_master_public_key(self):
return self.mpk
def get_xpubkey(self, for_change, n):
s = ''.join(map(lambda x: bitcoin.int_to_hex(x,2), (for_change, n)))
return 'fe' + self.mpk + s
@classmethod
def parse_xpubkey(self, x_pubkey):
assert x_pubkey[0:2] == 'fe'
pk = x_pubkey[2:]
mpk = pk[0:128]
dd = pk[128:]
s = []
while dd:
n = int(bitcoin.rev_hex(dd[0:4]), 16)
dd = dd[4:]
s.append(n)
assert len(s) == 2
return mpk, s
def get_pubkey_derivation(self, x_pubkey):
if x_pubkey[0:2] != 'fe':
return
mpk, derivation = self.parse_xpubkey(x_pubkey)
if self.mpk != mpk:
return
return derivation
def update_password(self, old_password, new_password):
self.check_password(old_password)
if new_password == '':
new_password = None
if self.has_seed():
decoded = pw_decode(self.seed, old_password)
self.seed = pw_encode(decoded, new_password)
class Hardware_KeyStore(KeyStore, Xpub):
max_change_outputs = 1
def __init__(self, d):
Xpub.__init__(self)
KeyStore.__init__(self)
# handler. The handler is per-window and preserved across
# device reconnects
self.xpub = d.get('xpub')
self.label = d.get('label')
self.derivation = d.get('derivation')
self.handler = None
run_hook('init_keystore', self)
def set_label(self, label):
self.label = label
def may_have_password(self):
return False
def is_deterministic(self):
return True
def dump(self):
return {
'type': 'hardware',
'hw_type': self.hw_type,
'xpub': self.xpub,
'derivation':self.derivation,
'label':self.label,
}
def unpaired(self):
self.print_error("unpaired")
def paired(self):
self.print_error("paired")
def can_export(self):
return False
def is_watching_only(self):
assert not self.has_seed()
return False
def can_change_password(self):
return False
def bip39_normalize_passphrase(passphrase):
return normalize('NFKD', passphrase or '')
def bip39_to_seed(mnemonic, passphrase):
import pbkdf2, hashlib, hmac
PBKDF2_ROUNDS = 2048
mnemonic = normalize('NFKD', ' '.join(mnemonic.split()))
passphrase = bip39_normalize_passphrase(passphrase)
return pbkdf2.PBKDF2(mnemonic, 'mnemonic' + passphrase,
iterations = PBKDF2_ROUNDS, macmodule = hmac,
digestmodule = hashlib.sha512).read(64)
# returns tuple (is_checksum_valid, is_wordlist_valid)
def bip39_is_checksum_valid(mnemonic):
words = [ normalize('NFKD', word) for word in mnemonic.split() ]
words_len = len(words)
wordlist = load_wordlist("english.txt")
n = len(wordlist)
checksum_length = 11*words_len//33
entropy_length = 32*checksum_length
i = 0
words.reverse()
while words:
w = words.pop()
try:
k = wordlist.index(w)
except ValueError:
return False, False
i = i*n + k
if words_len not in [12, 15, 18, 21, 24]:
return False, True
entropy = i >> checksum_length
checksum = i % 2**checksum_length
h = '{:x}'.format(entropy)
while len(h) < entropy_length/4:
h = '0'+h
b = bytearray.fromhex(h)
hashed = int(hfu(hashlib.sha256(b).digest()), 16)
calculated_checksum = hashed >> (256 - checksum_length)
return checksum == calculated_checksum, True
def from_bip39_seed(seed, passphrase, derivation):
k = BIP32_KeyStore({})
bip32_seed = bip39_to_seed(seed, passphrase)
t = 'standard' # bip43
k.add_xprv_from_seed(bip32_seed, t, derivation)
return k
# extended pubkeys
def is_xpubkey(x_pubkey):
return x_pubkey[0:2] == 'ff'
def parse_xpubkey(x_pubkey):
assert x_pubkey[0:2] == 'ff'
return BIP32_KeyStore.parse_xpubkey(x_pubkey)
def xpubkey_to_address(x_pubkey):
if x_pubkey[0:2] == 'fd':
address = bitcoin.script_to_address(x_pubkey[2:])
return x_pubkey, address
if x_pubkey[0:2] in ['02', '03', '04']:
pubkey = x_pubkey
elif x_pubkey[0:2] == 'ff':
xpub, s = BIP32_KeyStore.parse_xpubkey(x_pubkey)
pubkey = BIP32_KeyStore.get_pubkey_from_xpub(xpub, s)
elif x_pubkey[0:2] == 'fe':
mpk, s = Old_KeyStore.parse_xpubkey(x_pubkey)
pubkey = Old_KeyStore.get_pubkey_from_mpk(mpk, s[0], s[1])
else:
raise BaseException("Cannot parse pubkey")
if pubkey:
address = public_key_to_p2pkh(bfh(pubkey))
return pubkey, address
def xpubkey_to_pubkey(x_pubkey):
pubkey, address = xpubkey_to_address(x_pubkey)
return pubkey
hw_keystores = {}
def register_keystore(hw_type, constructor):
hw_keystores[hw_type] = constructor
def hardware_keystore(d):
hw_type = d['hw_type']
if hw_type in hw_keystores:
constructor = hw_keystores[hw_type]
return constructor(d)
raise BaseException('unknown hardware type', hw_type)
def load_keystore(storage, name):
w = storage.get('wallet_type', 'standard')
d = storage.get(name, {})
t = d.get('type')
if not t:
raise BaseException('wallet format requires update')
if t == 'old':
k = Old_KeyStore(d)
elif t == 'imported':
k = Imported_KeyStore(d)
elif t == 'bip32':
k = BIP32_KeyStore(d)
elif t == 'hardware':
k = hardware_keystore(d)
else:
raise BaseException('unknown wallet type', t)
return k
def is_old_mpk(mpk):
try:
int(mpk, 16)
except:
return False
return len(mpk) == 128
def is_address_list(text):
parts = text.split()
return bool(parts) and all(bitcoin.is_address(x) for x in parts)
def get_private_keys(text):
parts = text.split('\n')
parts = map(lambda x: ''.join(x.split()), parts)
parts = list(filter(bool, parts))
if bool(parts) and all(bitcoin.is_private_key(x) for x in parts):
return parts
def is_private_key_list(text):
return bool(get_private_keys(text))
is_mpk = lambda x: is_old_mpk(x) or is_xpub(x)
is_private = lambda x: is_seed(x) or is_xprv(x) or is_private_key_list(x)
is_master_key = lambda x: is_old_mpk(x) or is_xprv(x) or is_xpub(x)
is_master_key_plus_drk = lambda x: is_drkp(x) or is_drkv(x) or is_master_key(x)
is_private_key = lambda x: is_xprv(x) or is_private_key_list(x)
is_bip32_key = lambda x: is_xprv(x) or is_xpub(x)
def bip44_derivation(account_id):
bip = 44
coin = 1 if bitcoin.NetworkConstants.TESTNET else 5
return "m/%d'/%d'/%d'" % (bip, coin, int(account_id))
def from_seed(seed, passphrase, is_p2sh):
t = seed_type(seed)
if t == 'old':
keystore = Old_KeyStore({})
keystore.add_seed(seed)
elif t in ['standard']:
keystore = BIP32_KeyStore({})
keystore.add_seed(seed)
keystore.passphrase = passphrase
bip32_seed = Mnemonic.mnemonic_to_seed(seed, passphrase)
der = "m/"
xtype = 'standard'
keystore.add_xprv_from_seed(bip32_seed, xtype, der)
else:
raise BaseException(t)
return keystore
def from_private_key_list(text):
keystore = Imported_KeyStore({})
for x in get_private_keys(text):
keystore.import_key(x, None)
return keystore
def from_old_mpk(mpk):
keystore = Old_KeyStore({})
keystore.add_master_public_key(mpk)
return keystore
def from_xpub(xpub):
k = BIP32_KeyStore({})
k.xpub = xpub
return k
def from_xprv(xprv):
xpub = bitcoin.xpub_from_xprv(xprv)
k = BIP32_KeyStore({})
k.xprv = xprv
k.xpub = xpub
return k
def from_drkp(drkp):
xtype, depth, fingerprint, child_number, c, cK = deserialize_drkp(drkp)
xpub = serialize_xpub(xtype, c, cK, depth, fingerprint, child_number)
k = BIP32_KeyStore({})
k.xpub = xpub
return k
def from_drkv(drkv):
xtype, depth, fingerprint, child_number, c, k = deserialize_drkv(drkv)
xprv = serialize_xprv(xtype, c, k, depth, fingerprint, child_number)
xpub = bitcoin.xpub_from_xprv(xprv)
k = BIP32_KeyStore({})
k.xprv = xprv
k.xpub = xpub
return k
def from_master_key(text):
if is_xprv(text):
k = from_xprv(text)
elif is_old_mpk(text):
k = from_old_mpk(text)
elif is_xpub(text):
k = from_xpub(text)
elif is_drkv(text):
k = from_drkv(text)
elif is_drkp(text):
k = from_drkp(text)
else:
raise BaseException('Invalid key')
return k
| true | true |
f72bffcf3e618c50ae473c4085888ea423e1b4c8 | 6,863 | py | Python | tests/system_tests_console.py | kishorkunal-raj/qpid-dispatch | f629b448dc1ae92d46c31f3c8d7bf317412b9e22 | [
"Apache-2.0"
] | 1 | 2019-07-16T10:24:40.000Z | 2019-07-16T10:24:40.000Z | tests/system_tests_console.py | kishorkunal-raj/qpid-dispatch | f629b448dc1ae92d46c31f3c8d7bf317412b9e22 | [
"Apache-2.0"
] | 121 | 2020-09-16T06:03:53.000Z | 2022-03-30T13:03:23.000Z | tests/system_tests_console.py | irinabov/debian-qpid-dispatch | 42fb2ffb65f8e8c8d616633c0b4308d6531a281d | [
"Apache-2.0"
] | null | null | null | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import os
import errno
import re
import time
import unittest
from subprocess import PIPE
import subprocess
from system_test import main_module, SkipIfNeeded, TestCase
from system_test import Qdrouterd, TIMEOUT, AsyncTestSender, AsyncTestReceiver
try:
import queue as Queue # 3.x
except ImportError:
import Queue as Queue # 2.7
from threading import Thread
from threading import Event
import uuid
from proton import Message
from proton.handlers import MessagingHandler
from proton.reactor import Container
class ConsolePreReq(object):
@staticmethod
def is_cmd(name):
''' determine if a command is present and executes on the system '''
try:
devnull = open(os.devnull, "w")
subprocess.Popen([name], stdout=devnull, stderr=devnull).communicate()
except OSError as e:
if errno == errno.ENOENT:
return False
return True
@staticmethod
def should_skip():
try:
found_npm = ConsolePreReq.is_cmd('npm')
return not found_npm
except OSError:
return True
class ConsoleTest(TestCase):
"""Run npm console tests"""
@classmethod
def setUpClass(cls):
super(ConsoleTest, cls).setUpClass()
def router(name, mode, extra):
config = [
('router', {'mode': mode, 'id': name}),
('listener', {'role': 'normal', 'port': cls.tester.get_port()})
]
if extra:
config.extend(extra)
config = Qdrouterd.Config(config)
cls.routers.append(cls.tester.qdrouterd(name, config, wait=True))
return cls.routers[-1]
cls.routers = []
interrouter_port = cls.tester.get_port()
cls.http_port = cls.tester.get_port()
cls.sender_port = cls.tester.get_port()
cls.receiver_port = cls.tester.get_port()
router('A', 'interior',
[('listener', {'role': 'inter-router', 'port': interrouter_port}),
('listener', {'role': 'normal', 'port': cls.sender_port}),
('listener', {'role': 'normal', 'port': cls.http_port, 'http': True})])
cls.INT_A = cls.routers[0]
cls.INT_A.listener = cls.INT_A.addresses[0]
router('B', 'interior',
[('connector', {'name': 'connectorToA', 'role': 'inter-router',
'port': interrouter_port}),
('listener', {'role': 'normal', 'port': cls.receiver_port})])
cls.INT_B = cls.routers[1]
cls.INT_B.listener = cls.INT_B.addresses[0]
cls.INT_A.wait_router_connected('B')
cls.INT_B.wait_router_connected('A')
def run_console_test(self):
address = "toB"
# create a slow receiver so that we get delayedDeliveries
receiver = AsyncSlowReceiver(self.INT_B.listener, address)
sender = AsyncStopableSender(self.INT_A.listener, address)
pret = 0
out = ''
prg = ['npm', 'test', '--', '--watchAll=false']
p = self.popen(prg,
cwd=os.path.join(os.environ.get('BUILD_DIR'), 'console'),
env=dict(os.environ, TEST_PORT="%d" % self.http_port),
stdout=PIPE,
expect=None)
out = p.communicate()[0]
pret = p.returncode
# write the output
with open('run_console_test.out', 'w') as popenfile:
popenfile.write('returncode was %s\n' % p.returncode)
popenfile.write('out was:\n')
popenfile.writelines(str(out))
sender.stop()
receiver.stop()
time.sleep(1)
assert pret == 0, \
"console test exit status %d, output:\n%s" % (pret, out)
return out
# If we are unable to run the npm command. Skip the test
@SkipIfNeeded(ConsolePreReq.should_skip(), 'Test skipped: npm command not found')
def test_console(self):
self.run_console_test()
class AsyncStopableSender(AsyncTestSender):
def __init__(self, hostport, address):
super(AsyncStopableSender, self).__init__(hostport, address, 999999999)
self._stop_thread = False
self.sent = 0
def _main(self):
self._container.start()
while self._container.process():
if self._stop_thread:
if self._conn:
self._conn.close()
self._conn = None
def on_sendable(self, event):
self._sender.send(Message(body="message %d" % self.sent))
self.sent += 1
def stop(self, timeout=TIMEOUT):
self._stop_thread = True
self._container.wakeup()
self._thread.join(timeout=TIMEOUT)
if self._thread.is_alive():
raise Exception("AsyncStopableSender did not exit")
# Based on gsim's slow_recv.py
class TimedFlow(MessagingHandler):
def __init__(self, receiver, credit):
super(TimedFlow, self).__init__()
self.receiver = receiver
self.credit = credit
def on_timer_task(self, event):
self.receiver.flow(self.credit)
class AsyncSlowReceiver(AsyncTestReceiver):
def __init__(self, hostport, target):
super(AsyncSlowReceiver, self).__init__(hostport, target, msg_args={"prefetch": 0})
def on_link_opened(self, event):
super(AsyncSlowReceiver, self).on_link_opened(event)
self.request_batch(event)
def request_batch(self, event):
event.container.schedule(1, TimedFlow(event.receiver, 10))
def check_empty(self, receiver):
return not receiver.credit and not receiver.queued
def on_link_flow(self, event):
if self.check_empty(event.receiver):
self.request_batch(event)
def on_message(self, event):
print (event.message.body)
if self.check_empty(event.receiver):
self.request_batch(event)
if __name__ == '__main__':
unittest.main(main_module())
| 32.837321 | 91 | 0.635582 |
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import os
import errno
import re
import time
import unittest
from subprocess import PIPE
import subprocess
from system_test import main_module, SkipIfNeeded, TestCase
from system_test import Qdrouterd, TIMEOUT, AsyncTestSender, AsyncTestReceiver
try:
import queue as Queue
except ImportError:
import Queue as Queue
from threading import Thread
from threading import Event
import uuid
from proton import Message
from proton.handlers import MessagingHandler
from proton.reactor import Container
class ConsolePreReq(object):
@staticmethod
def is_cmd(name):
try:
devnull = open(os.devnull, "w")
subprocess.Popen([name], stdout=devnull, stderr=devnull).communicate()
except OSError as e:
if errno == errno.ENOENT:
return False
return True
@staticmethod
def should_skip():
try:
found_npm = ConsolePreReq.is_cmd('npm')
return not found_npm
except OSError:
return True
class ConsoleTest(TestCase):
@classmethod
def setUpClass(cls):
super(ConsoleTest, cls).setUpClass()
def router(name, mode, extra):
config = [
('router', {'mode': mode, 'id': name}),
('listener', {'role': 'normal', 'port': cls.tester.get_port()})
]
if extra:
config.extend(extra)
config = Qdrouterd.Config(config)
cls.routers.append(cls.tester.qdrouterd(name, config, wait=True))
return cls.routers[-1]
cls.routers = []
interrouter_port = cls.tester.get_port()
cls.http_port = cls.tester.get_port()
cls.sender_port = cls.tester.get_port()
cls.receiver_port = cls.tester.get_port()
router('A', 'interior',
[('listener', {'role': 'inter-router', 'port': interrouter_port}),
('listener', {'role': 'normal', 'port': cls.sender_port}),
('listener', {'role': 'normal', 'port': cls.http_port, 'http': True})])
cls.INT_A = cls.routers[0]
cls.INT_A.listener = cls.INT_A.addresses[0]
router('B', 'interior',
[('connector', {'name': 'connectorToA', 'role': 'inter-router',
'port': interrouter_port}),
('listener', {'role': 'normal', 'port': cls.receiver_port})])
cls.INT_B = cls.routers[1]
cls.INT_B.listener = cls.INT_B.addresses[0]
cls.INT_A.wait_router_connected('B')
cls.INT_B.wait_router_connected('A')
def run_console_test(self):
address = "toB"
receiver = AsyncSlowReceiver(self.INT_B.listener, address)
sender = AsyncStopableSender(self.INT_A.listener, address)
pret = 0
out = ''
prg = ['npm', 'test', '--', '--watchAll=false']
p = self.popen(prg,
cwd=os.path.join(os.environ.get('BUILD_DIR'), 'console'),
env=dict(os.environ, TEST_PORT="%d" % self.http_port),
stdout=PIPE,
expect=None)
out = p.communicate()[0]
pret = p.returncode
with open('run_console_test.out', 'w') as popenfile:
popenfile.write('returncode was %s\n' % p.returncode)
popenfile.write('out was:\n')
popenfile.writelines(str(out))
sender.stop()
receiver.stop()
time.sleep(1)
assert pret == 0, \
"console test exit status %d, output:\n%s" % (pret, out)
return out
@SkipIfNeeded(ConsolePreReq.should_skip(), 'Test skipped: npm command not found')
def test_console(self):
self.run_console_test()
class AsyncStopableSender(AsyncTestSender):
def __init__(self, hostport, address):
super(AsyncStopableSender, self).__init__(hostport, address, 999999999)
self._stop_thread = False
self.sent = 0
def _main(self):
self._container.start()
while self._container.process():
if self._stop_thread:
if self._conn:
self._conn.close()
self._conn = None
def on_sendable(self, event):
self._sender.send(Message(body="message %d" % self.sent))
self.sent += 1
def stop(self, timeout=TIMEOUT):
self._stop_thread = True
self._container.wakeup()
self._thread.join(timeout=TIMEOUT)
if self._thread.is_alive():
raise Exception("AsyncStopableSender did not exit")
class TimedFlow(MessagingHandler):
def __init__(self, receiver, credit):
super(TimedFlow, self).__init__()
self.receiver = receiver
self.credit = credit
def on_timer_task(self, event):
self.receiver.flow(self.credit)
class AsyncSlowReceiver(AsyncTestReceiver):
def __init__(self, hostport, target):
super(AsyncSlowReceiver, self).__init__(hostport, target, msg_args={"prefetch": 0})
def on_link_opened(self, event):
super(AsyncSlowReceiver, self).on_link_opened(event)
self.request_batch(event)
def request_batch(self, event):
event.container.schedule(1, TimedFlow(event.receiver, 10))
def check_empty(self, receiver):
return not receiver.credit and not receiver.queued
def on_link_flow(self, event):
if self.check_empty(event.receiver):
self.request_batch(event)
def on_message(self, event):
print (event.message.body)
if self.check_empty(event.receiver):
self.request_batch(event)
if __name__ == '__main__':
unittest.main(main_module())
| true | true |
f72bfffe1663a5030918dbba8147d3457fb5b2fd | 3,095 | py | Python | examples/basic_2.py | yoavfreund/DAPPER | c2fa5cc446a2b22a1efc174afc7e091363c9375d | [
"MIT"
] | null | null | null | examples/basic_2.py | yoavfreund/DAPPER | c2fa5cc446a2b22a1efc174afc7e091363c9375d | [
"MIT"
] | null | null | null | examples/basic_2.py | yoavfreund/DAPPER | c2fa5cc446a2b22a1efc174afc7e091363c9375d | [
"MIT"
] | null | null | null | # ## Illustrate usage of DAPPER to benchmark multiple DA methods.
# #### Imports
# <b>NB:</b> If you're on <mark><b>Gooble Colab</b></mark>,
# then replace `%matplotlib notebook` below by
# `!python -m pip install git+https://github.com/nansencenter/DAPPER.git` .
# Also note that liveplotting does not work on Colab.
# %matplotlib notebook
import dapper as dpr
import dapper.da_methods as da
# #### DA method configurations
from dapper.mods.Lorenz63.sakov2012 import HMM # Expected rmse.a:
xps = dpr.xpList()
xps += da.Climatology() # 7.6
xps += da.OptInterp() # 1.25
xps += da.Persistence() # 10.7
xps += da.PreProg(lambda k, xx, yy: xx[k]) # 0
xps += da.Var3D(xB=0.1) # 1.03
xps += da.ExtKF(infl=90) # 0.87
xps += da.EnKF('Sqrt' , N=3 , infl=1.30) # 0.82
xps += da.EnKF('Sqrt' , N=10 , infl=1.02, rot=True) # 0.63
xps += da.EnKF('PertObs', N=500 , infl=0.95, rot=False) # 0.56
xps += da.EnKF_N( N=10 , rot=True) # 0.54
xps += da.iEnKS('Sqrt' , N=10 , infl=1.02, rot=True) # 0.31
xps += da.PartFilt( N=100 , reg=2.4 , NER=0.3) # 0.38
xps += da.PartFilt( N=800 , reg=0.9 , NER=0.2) # 0.28
# xps += da.PartFilt( N=4000, reg=0.7 , NER=0.05) # 0.27
# xps += da.PFxN(xN=1000, N=30 , Qs=2 , NER=0.2) # 0.56
# #### With Lorenz-96 instead
# +
# from dapper.mods.Lorenz96.sakov2008 import HMM # Expected rmse.a:
# xps = dpr.xpList()
# xps += da.Climatology() # 3.6
# xps += da.OptInterp() # 0.95
# xps += da.Var3D(xB=0.02) # 0.41
# xps += da.ExtKF(infl=6) # 0.24
# xps += da.EnKF('PertObs', N=40, infl=1.06) # 0.22
# xps += da.EnKF('Sqrt' , N=28, infl=1.02, rot=True) # 0.18
# # More sophisticated:
# xps += da.EnKF_N( N=24, rot=True) # 0.21
# xps += da.EnKF_N( N=24, rot=True, xN=2) # 0.18
# xps += da.iEnKS('Sqrt' , N=40, infl=1.01, rot=True) # 0.17
# # With localisation:
# xps += da.LETKF( N=7 , infl=1.04, rot=True, loc_rad=4) # 0.22
# xps += da.SL_EAKF( N=7 , infl=1.07, rot=True, loc_rad=6) # 0.23
# -
# #### Other models (suitable xp's listed in HMM files):
# +
# from dapper.mods.LA .evensen2009 import HMM
# from dapper.mods.KS .bocquet2019 import HMM
# from dapper.mods.LotkaVolterra.settings101 import HMM
# -
# #### Launch
# Write some more non-arg parameters to the `xps`. In this case we set the seed,
# so that repeat experiments produce exactly the same result.
for xp in xps:
xp.seed = 3000
# Adjust experiment duration
HMM.tseq.T = 50
# Run/assimilate (for each `xp` in `xps`)
save_as = xps.launch(HMM, liveplots=False)
# #### Print results
print(xps.tabulate_avrgs())
| 38.6875 | 80 | 0.51567 | ork on Colab.
# %matplotlib notebook
import dapper as dpr
import dapper.da_methods as da
# #### DA method configurations
from dapper.mods.Lorenz63.sakov2012 import HMM # Expected rmse.a:
xps = dpr.xpList()
xps += da.Climatology() # 7.6
xps += da.OptInterp() # 1.25
xps += da.Persistence() # 10.7
xps += da.PreProg(lambda k, xx, yy: xx[k]) # 0
xps += da.Var3D(xB=0.1) # 1.03
xps += da.ExtKF(infl=90) # 0.87
xps += da.EnKF('Sqrt' , N=3 , infl=1.30) # 0.82
xps += da.EnKF('Sqrt' , N=10 , infl=1.02, rot=True) # 0.63
xps += da.EnKF('PertObs', N=500 , infl=0.95, rot=False) # 0.56
xps += da.EnKF_N( N=10 , rot=True) # 0.54
xps += da.iEnKS('Sqrt' , N=10 , infl=1.02, rot=True) # 0.31
xps += da.PartFilt( N=100 , reg=2.4 , NER=0.3) # 0.38
xps += da.PartFilt( N=800 , reg=0.9 , NER=0.2) # 0.28
# xps += da.PartFilt( N=4000, reg=0.7 , NER=0.05) # 0.27
# xps += da.PFxN(xN=1000, N=30 , Qs=2 , NER=0.2) # 0.56
# #### With Lorenz-96 instead
# +
# from dapper.mods.Lorenz96.sakov2008 import HMM # Expected rmse.a:
# xps = dpr.xpList()
# xps += da.Climatology() # 3.6
# xps += da.OptInterp() # 0.95
# xps += da.Var3D(xB=0.02) # 0.41
# xps += da.ExtKF(infl=6) # 0.24
# xps += da.EnKF('PertObs', N=40, infl=1.06) # 0.22
# xps += da.EnKF('Sqrt' , N=28, infl=1.02, rot=True) # 0.18
# # More sophisticated:
# xps += da.EnKF_N( N=24, rot=True) # 0.21
# xps += da.EnKF_N( N=24, rot=True, xN=2) # 0.18
# xps += da.iEnKS('Sqrt' , N=40, infl=1.01, rot=True) # 0.17
# # With localisation:
# xps += da.LETKF( N=7 , infl=1.04, rot=True, loc_rad=4) # 0.22
# xps += da.SL_EAKF( N=7 , infl=1.07, rot=True, loc_rad=6) # 0.23
# -
# #### Other models (suitable xp's listed in HMM files):
HMM.tseq.T = 50
save_as = xps.launch(HMM, liveplots=False)
| true | true |
f72c023c3e475c0e66d6e44d0f104ade3ca2bea0 | 7,867 | py | Python | accelbyte_py_sdk/api/ugc/operations/public_group/get_group.py | AccelByte/accelbyte-python-sdk | dcd311fad111c59da828278975340fb92e0f26f7 | [
"MIT"
] | null | null | null | accelbyte_py_sdk/api/ugc/operations/public_group/get_group.py | AccelByte/accelbyte-python-sdk | dcd311fad111c59da828278975340fb92e0f26f7 | [
"MIT"
] | 1 | 2021-10-13T03:46:58.000Z | 2021-10-13T03:46:58.000Z | accelbyte_py_sdk/api/ugc/operations/public_group/get_group.py | AccelByte/accelbyte-python-sdk | dcd311fad111c59da828278975340fb92e0f26f7 | [
"MIT"
] | null | null | null | # Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: justice_py_sdk_codegen/__main__.py
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
# justice-ugc-service (2.1.0)
from __future__ import annotations
from typing import Any, Dict, List, Optional, Tuple, Union
from .....core import Operation
from .....core import HeaderStr
from .....core import HttpResponse
from ...models import ModelsCreateGroupResponse
from ...models import ResponseError
class GetGroup(Operation):
"""Get user's groups (GetGroup)
Required permission NAMESPACE:{namespace}:USER:{userId}:CONTENTGROUP [READ].
Required Permission(s):
- NAMESPACE:{namespace}:USER:{userId}:CONTENTGROUP [READ]
Properties:
url: /ugc/v1/public/namespaces/{namespace}/users/{userId}/groups/{groupId}
method: GET
tags: ["Public Group"]
consumes: ["application/json", "application/octet-stream"]
produces: ["application/json"]
securities: [BEARER_AUTH]
group_id: (groupId) REQUIRED str in path
namespace: (namespace) REQUIRED str in path
user_id: (userId) REQUIRED str in path
Responses:
200: OK - ModelsCreateGroupResponse (OK)
401: Unauthorized - ResponseError (Unauthorized)
404: Not Found - ResponseError (Not Found)
500: Internal Server Error - ResponseError (Internal Server Error)
"""
# region fields
_url: str = "/ugc/v1/public/namespaces/{namespace}/users/{userId}/groups/{groupId}"
_method: str = "GET"
_consumes: List[str] = ["application/json", "application/octet-stream"]
_produces: List[str] = ["application/json"]
_securities: List[List[str]] = [["BEARER_AUTH"]]
_location_query: str = None
group_id: str # REQUIRED in [path]
namespace: str # REQUIRED in [path]
user_id: str # REQUIRED in [path]
# endregion fields
# region properties
@property
def url(self) -> str:
return self._url
@property
def method(self) -> str:
return self._method
@property
def consumes(self) -> List[str]:
return self._consumes
@property
def produces(self) -> List[str]:
return self._produces
@property
def securities(self) -> List[List[str]]:
return self._securities
@property
def location_query(self) -> str:
return self._location_query
# endregion properties
# region get methods
# endregion get methods
# region get_x_params methods
def get_all_params(self) -> dict:
return {
"path": self.get_path_params(),
}
def get_path_params(self) -> dict:
result = {}
if hasattr(self, "group_id"):
result["groupId"] = self.group_id
if hasattr(self, "namespace"):
result["namespace"] = self.namespace
if hasattr(self, "user_id"):
result["userId"] = self.user_id
return result
# endregion get_x_params methods
# region is/has methods
# endregion is/has methods
# region with_x methods
def with_group_id(self, value: str) -> GetGroup:
self.group_id = value
return self
def with_namespace(self, value: str) -> GetGroup:
self.namespace = value
return self
def with_user_id(self, value: str) -> GetGroup:
self.user_id = value
return self
# endregion with_x methods
# region to methods
def to_dict(self, include_empty: bool = False) -> dict:
result: dict = {}
if hasattr(self, "group_id") and self.group_id:
result["groupId"] = str(self.group_id)
elif include_empty:
result["groupId"] = ""
if hasattr(self, "namespace") and self.namespace:
result["namespace"] = str(self.namespace)
elif include_empty:
result["namespace"] = ""
if hasattr(self, "user_id") and self.user_id:
result["userId"] = str(self.user_id)
elif include_empty:
result["userId"] = ""
return result
# endregion to methods
# region response methods
# noinspection PyMethodMayBeStatic
def parse_response(self, code: int, content_type: str, content: Any) -> Tuple[Union[None, ModelsCreateGroupResponse], Union[None, HttpResponse, ResponseError]]:
"""Parse the given response.
200: OK - ModelsCreateGroupResponse (OK)
401: Unauthorized - ResponseError (Unauthorized)
404: Not Found - ResponseError (Not Found)
500: Internal Server Error - ResponseError (Internal Server Error)
---: HttpResponse (Undocumented Response)
---: HttpResponse (Unexpected Content-Type Error)
---: HttpResponse (Unhandled Error)
"""
pre_processed_response, error = self.pre_process_response(code=code, content_type=content_type, content=content)
if error is not None:
return None, None if error.is_no_content() else error
code, content_type, content = pre_processed_response
if code == 200:
return ModelsCreateGroupResponse.create_from_dict(content), None
if code == 401:
return None, ResponseError.create_from_dict(content)
if code == 404:
return None, ResponseError.create_from_dict(content)
if code == 500:
return None, ResponseError.create_from_dict(content)
return None, self.handle_undocumented_response(code=code, content_type=content_type, content=content)
# endregion response methods
# region static methods
@classmethod
def create(
cls,
group_id: str,
namespace: str,
user_id: str,
) -> GetGroup:
instance = cls()
instance.group_id = group_id
instance.namespace = namespace
instance.user_id = user_id
return instance
@classmethod
def create_from_dict(cls, dict_: dict, include_empty: bool = False) -> GetGroup:
instance = cls()
if "groupId" in dict_ and dict_["groupId"] is not None:
instance.group_id = str(dict_["groupId"])
elif include_empty:
instance.group_id = ""
if "namespace" in dict_ and dict_["namespace"] is not None:
instance.namespace = str(dict_["namespace"])
elif include_empty:
instance.namespace = ""
if "userId" in dict_ and dict_["userId"] is not None:
instance.user_id = str(dict_["userId"])
elif include_empty:
instance.user_id = ""
return instance
@staticmethod
def get_field_info() -> Dict[str, str]:
return {
"groupId": "group_id",
"namespace": "namespace",
"userId": "user_id",
}
@staticmethod
def get_required_map() -> Dict[str, bool]:
return {
"groupId": True,
"namespace": True,
"userId": True,
}
# endregion static methods
| 29.575188 | 164 | 0.616245 |
from __future__ import annotations
from typing import Any, Dict, List, Optional, Tuple, Union
from .....core import Operation
from .....core import HeaderStr
from .....core import HttpResponse
from ...models import ModelsCreateGroupResponse
from ...models import ResponseError
class GetGroup(Operation):
_url: str = "/ugc/v1/public/namespaces/{namespace}/users/{userId}/groups/{groupId}"
_method: str = "GET"
_consumes: List[str] = ["application/json", "application/octet-stream"]
_produces: List[str] = ["application/json"]
_securities: List[List[str]] = [["BEARER_AUTH"]]
_location_query: str = None
group_id: str
namespace: str
user_id: str
@property
def url(self) -> str:
return self._url
@property
def method(self) -> str:
return self._method
@property
def consumes(self) -> List[str]:
return self._consumes
@property
def produces(self) -> List[str]:
return self._produces
@property
def securities(self) -> List[List[str]]:
return self._securities
@property
def location_query(self) -> str:
return self._location_query
def get_all_params(self) -> dict:
return {
"path": self.get_path_params(),
}
def get_path_params(self) -> dict:
result = {}
if hasattr(self, "group_id"):
result["groupId"] = self.group_id
if hasattr(self, "namespace"):
result["namespace"] = self.namespace
if hasattr(self, "user_id"):
result["userId"] = self.user_id
return result
def with_group_id(self, value: str) -> GetGroup:
self.group_id = value
return self
def with_namespace(self, value: str) -> GetGroup:
self.namespace = value
return self
def with_user_id(self, value: str) -> GetGroup:
self.user_id = value
return self
def to_dict(self, include_empty: bool = False) -> dict:
result: dict = {}
if hasattr(self, "group_id") and self.group_id:
result["groupId"] = str(self.group_id)
elif include_empty:
result["groupId"] = ""
if hasattr(self, "namespace") and self.namespace:
result["namespace"] = str(self.namespace)
elif include_empty:
result["namespace"] = ""
if hasattr(self, "user_id") and self.user_id:
result["userId"] = str(self.user_id)
elif include_empty:
result["userId"] = ""
return result
def parse_response(self, code: int, content_type: str, content: Any) -> Tuple[Union[None, ModelsCreateGroupResponse], Union[None, HttpResponse, ResponseError]]:
pre_processed_response, error = self.pre_process_response(code=code, content_type=content_type, content=content)
if error is not None:
return None, None if error.is_no_content() else error
code, content_type, content = pre_processed_response
if code == 200:
return ModelsCreateGroupResponse.create_from_dict(content), None
if code == 401:
return None, ResponseError.create_from_dict(content)
if code == 404:
return None, ResponseError.create_from_dict(content)
if code == 500:
return None, ResponseError.create_from_dict(content)
return None, self.handle_undocumented_response(code=code, content_type=content_type, content=content)
@classmethod
def create(
cls,
group_id: str,
namespace: str,
user_id: str,
) -> GetGroup:
instance = cls()
instance.group_id = group_id
instance.namespace = namespace
instance.user_id = user_id
return instance
@classmethod
def create_from_dict(cls, dict_: dict, include_empty: bool = False) -> GetGroup:
instance = cls()
if "groupId" in dict_ and dict_["groupId"] is not None:
instance.group_id = str(dict_["groupId"])
elif include_empty:
instance.group_id = ""
if "namespace" in dict_ and dict_["namespace"] is not None:
instance.namespace = str(dict_["namespace"])
elif include_empty:
instance.namespace = ""
if "userId" in dict_ and dict_["userId"] is not None:
instance.user_id = str(dict_["userId"])
elif include_empty:
instance.user_id = ""
return instance
@staticmethod
def get_field_info() -> Dict[str, str]:
return {
"groupId": "group_id",
"namespace": "namespace",
"userId": "user_id",
}
@staticmethod
def get_required_map() -> Dict[str, bool]:
return {
"groupId": True,
"namespace": True,
"userId": True,
}
| true | true |
f72c036616f99189d65b39be5f6f951c9fd19357 | 30,886 | py | Python | tensorflow/contrib/learn/python/learn/estimators/dnn.py | kadeng/tensorflow_project_workspace | dee284fb2d1796329895130a075cd57a62ea873f | [
"Apache-2.0"
] | null | null | null | tensorflow/contrib/learn/python/learn/estimators/dnn.py | kadeng/tensorflow_project_workspace | dee284fb2d1796329895130a075cd57a62ea873f | [
"Apache-2.0"
] | null | null | null | tensorflow/contrib/learn/python/learn/estimators/dnn.py | kadeng/tensorflow_project_workspace | dee284fb2d1796329895130a075cd57a62ea873f | [
"Apache-2.0"
] | null | null | null | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Deep Neural Network estimators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.contrib import layers
from tensorflow.contrib.framework import deprecated
from tensorflow.contrib.framework import deprecated_arg_values
from tensorflow.contrib.framework.python.framework import experimental
from tensorflow.contrib.framework.python.ops import variables as contrib_variables
from tensorflow.contrib.layers.python.layers import optimizers
from tensorflow.contrib.learn.python.learn import evaluable
from tensorflow.contrib.learn.python.learn import metric_spec
from tensorflow.contrib.learn.python.learn import monitors as monitor_lib
from tensorflow.contrib.learn.python.learn import trainable
from tensorflow.contrib.learn.python.learn.estimators import dnn_linear_combined
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.contrib.learn.python.learn.estimators import model_fn
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.contrib.learn.python.learn.utils import export
from tensorflow.python.ops import nn
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import variable_scope
from tensorflow.python.summary import summary
_CENTERED_BIAS_WEIGHT = "centered_bias_weight"
# The default learning rate of 0.05 is a historical artifact of the initial
# implementation, but seems a reasonable choice.
_LEARNING_RATE = 0.05
def _get_feature_dict(features):
if isinstance(features, dict):
return features
return {"": features}
def _get_optimizer(optimizer):
if callable(optimizer):
return optimizer()
else:
return optimizer
def _add_hidden_layer_summary(value, tag):
summary.scalar("%s_fraction_of_zero_values" % tag, nn.zero_fraction(value))
summary.histogram("%s_activation" % tag, value)
def _dnn_model_fn(features, labels, mode, params, config=None):
"""Deep Neural Net model_fn.
Args:
features: `Tensor` or dict of `Tensor` (depends on data passed to `fit`).
labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of
dtype `int32` or `int64` in the range `[0, n_classes)`.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
params: A dict of hyperparameters.
The following hyperparameters are expected:
* head: A `_Head` instance.
* hidden_units: List of hidden units per layer.
* feature_columns: An iterable containing all the feature columns used by
the model.
* optimizer: string, `Optimizer` object, or callable that defines the
optimizer to use for training. If `None`, will use the Adagrad
optimizer with a default learning rate of 0.05.
* activation_fn: Activation function applied to each layer. If `None`,
will use `tf.nn.relu`.
* dropout: When not `None`, the probability we will drop out a given
coordinate.
* gradient_clip_norm: A float > 0. If provided, gradients are
clipped to their global norm with this clipping ratio.
* embedding_lr_multipliers: Optional. A dictionary from
`EmbeddingColumn` to a `float` multiplier. Multiplier will be used to
multiply with learning rate for the embedding variables.
config: `RunConfig` object to configure the runtime settings.
Returns:
predictions: A dict of `Tensor` objects.
loss: A scalar containing the loss of the step.
train_op: The op for training.
"""
head = params["head"]
hidden_units = params["hidden_units"]
feature_columns = params["feature_columns"]
optimizer = params.get("optimizer") or "Adagrad"
activation_fn = params.get("activation_fn")
dropout = params.get("dropout")
gradient_clip_norm = params.get("gradient_clip_norm")
num_ps_replicas = config.num_ps_replicas if config else 0
embedding_lr_multipliers = params.get("embedding_lr_multipliers", {})
features = _get_feature_dict(features)
parent_scope = "dnn"
input_layer_partitioner = (partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas, min_slice_size=64 << 20))
input_layer_scope = parent_scope + "/input_from_feature_columns"
with variable_scope.variable_scope(
input_layer_scope,
values=list(six.itervalues(features)),
partitioner=input_layer_partitioner) as scope:
net = layers.input_from_feature_columns(
columns_to_tensors=features,
feature_columns=feature_columns,
weight_collections=[parent_scope],
scope=scope)
hidden_layer_partitioner = (
partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas))
for layer_id, num_hidden_units in enumerate(hidden_units):
with variable_scope.variable_scope(
parent_scope + "/hiddenlayer_%d" % layer_id,
values=[net],
partitioner=hidden_layer_partitioner) as scope:
net = layers.fully_connected(
net,
num_hidden_units,
activation_fn=activation_fn,
variables_collections=[parent_scope],
scope=scope)
if dropout is not None and mode == model_fn.ModeKeys.TRAIN:
net = layers.dropout(net, keep_prob=(1.0 - dropout))
_add_hidden_layer_summary(net, scope.name)
with variable_scope.variable_scope(
parent_scope + "/logits",
values=[net],
partitioner=hidden_layer_partitioner) as scope:
logits = layers.fully_connected(
net,
head.logits_dimension,
activation_fn=None,
variables_collections=[parent_scope],
scope=scope)
_add_hidden_layer_summary(logits, scope.name)
def _train_op_fn(loss):
"""Returns the op to optimize the loss."""
return optimizers.optimize_loss(
loss=loss,
global_step=contrib_variables.get_global_step(),
learning_rate=_LEARNING_RATE,
optimizer=_get_optimizer(optimizer),
gradient_multipliers=(
dnn_linear_combined._extract_embedding_lr_multipliers( # pylint: disable=protected-access
embedding_lr_multipliers, parent_scope, input_layer_scope)),
clip_gradients=gradient_clip_norm,
name=parent_scope,
# Empty summaries to prevent optimizers from logging the training_loss.
summaries=[])
return head.head_ops(features, labels, mode, _train_op_fn, logits)
class DNNClassifier(evaluable.Evaluable, trainable.Trainable):
"""A classifier for TensorFlow DNN models.
Example:
```python
sparse_feature_a = sparse_column_with_hash_bucket(...)
sparse_feature_b = sparse_column_with_hash_bucket(...)
sparse_feature_a_emb = embedding_column(sparse_id_column=sparse_feature_a,
...)
sparse_feature_b_emb = embedding_column(sparse_id_column=sparse_feature_b,
...)
estimator = DNNClassifier(
feature_columns=[sparse_feature_a_emb, sparse_feature_b_emb],
hidden_units=[1024, 512, 256])
# Or estimator using the ProximalAdagradOptimizer optimizer with
# regularization.
estimator = DNNClassifier(
feature_columns=[sparse_feature_a_emb, sparse_feature_b_emb],
hidden_units=[1024, 512, 256],
optimizer=tf.train.ProximalAdagradOptimizer(
learning_rate=0.1,
l1_regularization_strength=0.001
))
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
pass
estimator.fit(input_fn=input_fn_train)
def input_fn_eval: # returns x, y (where y represents label's class index).
pass
estimator.evaluate(input_fn=input_fn_eval)
estimator.predict(x=x) # returns predicted labels (i.e. label's class index).
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
* for each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `WeightedSparseColumn`, two features: the first with
`key` the id column name, the second with `key` the weight column name.
Both features' `value` must be a `SparseTensor`.
- if `column` is a `RealValuedColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
"""
def __init__(self,
hidden_units,
feature_columns,
model_dir=None,
n_classes=2,
weight_column_name=None,
optimizer=None,
activation_fn=nn.relu,
dropout=None,
gradient_clip_norm=None,
enable_centered_bias=False,
config=None,
feature_engineering_fn=None,
embedding_lr_multipliers=None):
"""Initializes a DNNClassifier instance.
Args:
hidden_units: List of hidden units per layer. All layers are fully
connected. Ex. `[64, 32]` means first layer has 64 nodes and second one
has 32.
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
n_classes: number of label classes. Default is binary classification.
It must be greater than 1. Note: Class labels are integers representing
the class index (i.e. values from 0 to n_classes-1). For arbitrary
label values (e.g. string labels), convert to class indices first.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: An instance of `tf.Optimizer` used to train the model. If
`None`, will use an Adagrad optimizer.
activation_fn: Activation function applied to each layer. If `None`, will
use `tf.nn.relu`.
dropout: When not `None`, the probability we will drop out a given
coordinate.
gradient_clip_norm: A float > 0. If provided, gradients are
clipped to their global norm with this clipping ratio. See
`tf.clip_by_global_norm` for more details.
enable_centered_bias: A bool. If True, estimator will learn a centered
bias variable for each class. Rest of the model structure learns the
residual after centered bias.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and
returns features and labels which will be fed
into the model.
embedding_lr_multipliers: Optional. A dictionary from `EmbeddingColumn` to
a `float` multiplier. Multiplier will be used to multiply with
learning rate for the embedding variables.
Returns:
A `DNNClassifier` estimator.
Raises:
ValueError: If `n_classes` < 2.
"""
self._hidden_units = hidden_units
self._feature_columns = tuple(feature_columns or [])
self._enable_centered_bias = enable_centered_bias
self._estimator = estimator.Estimator(
model_fn=_dnn_model_fn,
model_dir=model_dir,
config=config,
params={
"head":
head_lib._multi_class_head( # pylint: disable=protected-access
n_classes,
weight_column_name=weight_column_name,
enable_centered_bias=enable_centered_bias),
"hidden_units":
hidden_units,
"feature_columns":
self._feature_columns,
"optimizer":
optimizer,
"activation_fn":
activation_fn,
"dropout":
dropout,
"gradient_clip_norm":
gradient_clip_norm,
"embedding_lr_multipliers":
embedding_lr_multipliers,
},
feature_engineering_fn=feature_engineering_fn)
def fit(self,
x=None,
y=None,
input_fn=None,
steps=None,
batch_size=None,
monitors=None,
max_steps=None):
"""See trainable.Trainable. Note: Labels must be integer class indices."""
# TODO(roumposg): Remove when deprecated monitors are removed.
hooks = monitor_lib.replace_monitors_with_hooks(monitors, self)
self._estimator.fit(x=x,
y=y,
input_fn=input_fn,
steps=steps,
batch_size=batch_size,
monitors=hooks,
max_steps=max_steps)
return self
def evaluate(self,
x=None,
y=None,
input_fn=None,
feed_fn=None,
batch_size=None,
steps=None,
metrics=None,
name=None,
checkpoint_path=None,
hooks=None):
"""See evaluable.Evaluable. Note: Labels must be integer class indices."""
return self._estimator.evaluate(
x=x,
y=y,
input_fn=input_fn,
feed_fn=feed_fn,
batch_size=batch_size,
steps=steps,
metrics=metrics,
name=name,
checkpoint_path=checkpoint_path,
hooks=hooks)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE,
estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict(self, x=None, input_fn=None, batch_size=None, as_iterable=True):
"""Returns predicted classes for given features.
Args:
x: features.
input_fn: Input function. If set, x must be None.
batch_size: Override default batch size.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
Returns:
Numpy array of predicted classes with shape [batch_size] (or an iterable
of predicted classes if as_iterable is True). Each predicted class is
represented by its class index (i.e. integer from 0 to n_classes-1).
"""
key = prediction_key.PredictionKey.CLASSES
preds = self._estimator.predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return (pred[key] for pred in preds)
return preds[key].reshape(-1)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE,
estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict_proba(self,
x=None,
input_fn=None,
batch_size=None,
as_iterable=True):
"""Returns prediction probabilities for given features.
Args:
x: features.
input_fn: Input function. If set, x and y must be None.
batch_size: Override default batch size.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
Returns:
Numpy array of predicted probabilities with shape [batch_size, n_classes]
(or an iterable of predicted probabilities if as_iterable is True).
"""
key = prediction_key.PredictionKey.PROBABILITIES
preds = self._estimator.predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return (pred[key] for pred in preds)
return preds[key]
def _get_predict_ops(self, features):
"""See `Estimator` class."""
# This method exists to support some models that use the legacy interface.
# pylint: disable=protected-access
return self._estimator._get_predict_ops(features)
def get_variable_names(self):
"""Returns list of all variable names in this model.
Returns:
List of names.
"""
return self._estimator.get_variable_names()
def get_variable_value(self, name):
"""Returns value of the variable given by name.
Args:
name: string, name of the tensor.
Returns:
`Tensor` object.
"""
return self._estimator.get_variable_value(name)
def export(self,
export_dir,
input_fn=None,
input_feature_key=None,
use_deprecated_input_fn=True,
signature_fn=None,
default_batch_size=1,
exports_to_keep=None):
"""See BaseEstimator.export."""
def default_input_fn(unused_estimator, examples):
return layers.parse_feature_columns_from_examples(examples,
self._feature_columns)
return self._estimator.export(
export_dir=export_dir,
input_fn=input_fn or default_input_fn,
input_feature_key=input_feature_key,
use_deprecated_input_fn=use_deprecated_input_fn,
signature_fn=(signature_fn or
export.classification_signature_fn_with_prob),
prediction_key=prediction_key.PredictionKey.PROBABILITIES,
default_batch_size=default_batch_size,
exports_to_keep=exports_to_keep)
@experimental
def export_savedmodel(self,
export_dir_base,
input_fn,
default_output_alternative_key=None,
assets_extra=None,
as_text=False,
exports_to_keep=None):
return self._estimator.export_savedmodel(
export_dir_base,
input_fn,
default_output_alternative_key=default_output_alternative_key,
assets_extra=assets_extra,
as_text=as_text,
exports_to_keep=exports_to_keep)
@property
def model_dir(self):
return self._estimator.model_dir
@property
@deprecated("2016-10-30",
"This method will be removed after the deprecation date. "
"To inspect variables, use get_variable_names() and "
"get_variable_value().")
def weights_(self):
hiddenlayer_weights = [
self.get_variable_value("dnn/hiddenlayer_%d/weights" % i)
for i, _ in enumerate(self._hidden_units)
]
logits_weights = [self.get_variable_value("dnn/logits/weights")]
return hiddenlayer_weights + logits_weights
@property
@deprecated("2016-10-30",
"This method will be removed after the deprecation date. "
"To inspect variables, use get_variable_names() and "
"get_variable_value().")
def bias_(self):
hiddenlayer_bias = [
self.get_variable_value("dnn/hiddenlayer_%d/biases" % i)
for i, _ in enumerate(self._hidden_units)
]
logits_bias = [self.get_variable_value("dnn/logits/biases")]
if self._enable_centered_bias:
centered_bias = [self.get_variable_value(_CENTERED_BIAS_WEIGHT)]
else:
centered_bias = []
return hiddenlayer_bias + logits_bias + centered_bias
@property
def config(self):
return self._estimator.config
class DNNRegressor(evaluable.Evaluable, trainable.Trainable):
"""A regressor for TensorFlow DNN models.
Example:
```python
sparse_feature_a = sparse_column_with_hash_bucket(...)
sparse_feature_b = sparse_column_with_hash_bucket(...)
sparse_feature_a_emb = embedding_column(sparse_id_column=sparse_feature_a,
...)
sparse_feature_b_emb = embedding_column(sparse_id_column=sparse_feature_b,
...)
estimator = DNNRegressor(
feature_columns=[sparse_feature_a, sparse_feature_b],
hidden_units=[1024, 512, 256])
# Or estimator using the ProximalAdagradOptimizer optimizer with
# regularization.
estimator = DNNRegressor(
feature_columns=[sparse_feature_a, sparse_feature_b],
hidden_units=[1024, 512, 256],
optimizer=tf.train.ProximalAdagradOptimizer(
learning_rate=0.1,
l1_regularization_strength=0.001
))
# Input builders
def input_fn_train: # returns x, y
pass
estimator.fit(input_fn=input_fn_train)
def input_fn_eval: # returns x, y
pass
estimator.evaluate(input_fn=input_fn_eval)
estimator.predict(x=x)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
* for each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `WeightedSparseColumn`, two features: the first with
`key` the id column name, the second with `key` the weight column name.
Both features' `value` must be a `SparseTensor`.
- if `column` is a `RealValuedColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
"""
def __init__(self,
hidden_units,
feature_columns,
model_dir=None,
weight_column_name=None,
optimizer=None,
activation_fn=nn.relu,
dropout=None,
gradient_clip_norm=None,
enable_centered_bias=False,
config=None,
feature_engineering_fn=None,
label_dimension=1,
embedding_lr_multipliers=None):
"""Initializes a `DNNRegressor` instance.
Args:
hidden_units: List of hidden units per layer. All layers are fully
connected. Ex. `[64, 32]` means first layer has 64 nodes and second one
has 32.
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: An instance of `tf.Optimizer` used to train the model. If
`None`, will use an Adagrad optimizer.
activation_fn: Activation function applied to each layer. If `None`, will
use `tf.nn.relu`.
dropout: When not `None`, the probability we will drop out a given
coordinate.
gradient_clip_norm: A `float` > 0. If provided, gradients are clipped
to their global norm with this clipping ratio. See
`tf.clip_by_global_norm` for more details.
enable_centered_bias: A bool. If True, estimator will learn a centered
bias variable for each class. Rest of the model structure learns the
residual after centered bias.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and
returns features and labels which will be fed
into the model.
label_dimension: Dimension of the label for multilabels. Defaults to 1.
embedding_lr_multipliers: Optional. A dictionary from `EbeddingColumn` to
a `float` multiplier. Multiplier will be used to multiply with
learning rate for the embedding variables.
Returns:
A `DNNRegressor` estimator.
"""
self._feature_columns = tuple(feature_columns or [])
self._estimator = estimator.Estimator(
model_fn=_dnn_model_fn,
model_dir=model_dir,
config=config,
params={
"head":
head_lib._regression_head( # pylint: disable=protected-access
label_dimension=label_dimension,
weight_column_name=weight_column_name,
enable_centered_bias=enable_centered_bias),
"hidden_units":
hidden_units,
"feature_columns":
self._feature_columns,
"optimizer":
optimizer,
"activation_fn":
activation_fn,
"dropout":
dropout,
"gradient_clip_norm":
gradient_clip_norm,
"embedding_lr_multipliers":
embedding_lr_multipliers,
},
feature_engineering_fn=feature_engineering_fn)
def fit(self,
x=None,
y=None,
input_fn=None,
steps=None,
batch_size=None,
monitors=None,
max_steps=None):
"""See trainable.Trainable."""
# TODO(roumposg): Remove when deprecated monitors are removed.
hooks = monitor_lib.replace_monitors_with_hooks(monitors, self)
self._estimator.fit(x=x,
y=y,
input_fn=input_fn,
steps=steps,
batch_size=batch_size,
monitors=hooks,
max_steps=max_steps)
return self
def evaluate(self,
x=None,
y=None,
input_fn=None,
feed_fn=None,
batch_size=None,
steps=None,
metrics=None,
name=None,
checkpoint_path=None,
hooks=None):
"""See evaluable.Evaluable."""
# TODO(zakaria): remove once deprecation is finished (b/31229024)
custom_metrics = {}
if metrics:
for key, metric in six.iteritems(metrics):
if (not isinstance(metric, metric_spec.MetricSpec) and
not isinstance(key, tuple)):
custom_metrics[(key, prediction_key.PredictionKey.SCORES)] = metric
else:
custom_metrics[key] = metric
return self._estimator.evaluate(
x=x,
y=y,
input_fn=input_fn,
feed_fn=feed_fn,
batch_size=batch_size,
steps=steps,
metrics=custom_metrics,
name=name,
checkpoint_path=checkpoint_path,
hooks=hooks)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE,
estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict(self, x=None, input_fn=None, batch_size=None, as_iterable=True):
"""Returns predicted scores for given features.
Args:
x: features.
input_fn: Input function. If set, x must be None.
batch_size: Override default batch size.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
Returns:
Numpy array of predicted scores (or an iterable of predicted scores if
as_iterable is True). If `label_dimension == 1`, the shape of the output
is `[batch_size]`, otherwise the shape is `[batch_size, label_dimension]`.
"""
key = prediction_key.PredictionKey.SCORES
preds = self._estimator.predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return (pred[key] for pred in preds)
return preds[key]
def _get_predict_ops(self, features):
"""See `Estimator` class."""
# This method exists to support some models that use the legacy interface.
# pylint: disable=protected-access
return self._estimator._get_predict_ops(features)
def get_variable_names(self):
"""Returns list of all variable names in this model.
Returns:
List of names.
"""
return self._estimator.get_variable_names()
def get_variable_value(self, name):
"""Returns value of the variable given by name.
Args:
name: string, name of the tensor.
Returns:
`Tensor` object.
"""
return self._estimator.get_variable_value(name)
def export(self,
export_dir,
input_fn=None,
input_feature_key=None,
use_deprecated_input_fn=True,
signature_fn=None,
default_batch_size=1,
exports_to_keep=None):
"""See BaseEstimator.export."""
def default_input_fn(unused_estimator, examples):
return layers.parse_feature_columns_from_examples(examples,
self._feature_columns)
return self._estimator.export(
export_dir=export_dir,
input_fn=input_fn or default_input_fn,
input_feature_key=input_feature_key,
use_deprecated_input_fn=use_deprecated_input_fn,
signature_fn=signature_fn or export.regression_signature_fn,
prediction_key=prediction_key.PredictionKey.SCORES,
default_batch_size=default_batch_size,
exports_to_keep=exports_to_keep)
@property
def model_dir(self):
return self._estimator.model_dir
@property
def config(self):
return self._estimator.config
| 37.896933 | 102 | 0.662015 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.contrib import layers
from tensorflow.contrib.framework import deprecated
from tensorflow.contrib.framework import deprecated_arg_values
from tensorflow.contrib.framework.python.framework import experimental
from tensorflow.contrib.framework.python.ops import variables as contrib_variables
from tensorflow.contrib.layers.python.layers import optimizers
from tensorflow.contrib.learn.python.learn import evaluable
from tensorflow.contrib.learn.python.learn import metric_spec
from tensorflow.contrib.learn.python.learn import monitors as monitor_lib
from tensorflow.contrib.learn.python.learn import trainable
from tensorflow.contrib.learn.python.learn.estimators import dnn_linear_combined
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.contrib.learn.python.learn.estimators import model_fn
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.contrib.learn.python.learn.utils import export
from tensorflow.python.ops import nn
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import variable_scope
from tensorflow.python.summary import summary
_CENTERED_BIAS_WEIGHT = "centered_bias_weight"
_LEARNING_RATE = 0.05
def _get_feature_dict(features):
if isinstance(features, dict):
return features
return {"": features}
def _get_optimizer(optimizer):
if callable(optimizer):
return optimizer()
else:
return optimizer
def _add_hidden_layer_summary(value, tag):
summary.scalar("%s_fraction_of_zero_values" % tag, nn.zero_fraction(value))
summary.histogram("%s_activation" % tag, value)
def _dnn_model_fn(features, labels, mode, params, config=None):
head = params["head"]
hidden_units = params["hidden_units"]
feature_columns = params["feature_columns"]
optimizer = params.get("optimizer") or "Adagrad"
activation_fn = params.get("activation_fn")
dropout = params.get("dropout")
gradient_clip_norm = params.get("gradient_clip_norm")
num_ps_replicas = config.num_ps_replicas if config else 0
embedding_lr_multipliers = params.get("embedding_lr_multipliers", {})
features = _get_feature_dict(features)
parent_scope = "dnn"
input_layer_partitioner = (partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas, min_slice_size=64 << 20))
input_layer_scope = parent_scope + "/input_from_feature_columns"
with variable_scope.variable_scope(
input_layer_scope,
values=list(six.itervalues(features)),
partitioner=input_layer_partitioner) as scope:
net = layers.input_from_feature_columns(
columns_to_tensors=features,
feature_columns=feature_columns,
weight_collections=[parent_scope],
scope=scope)
hidden_layer_partitioner = (
partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas))
for layer_id, num_hidden_units in enumerate(hidden_units):
with variable_scope.variable_scope(
parent_scope + "/hiddenlayer_%d" % layer_id,
values=[net],
partitioner=hidden_layer_partitioner) as scope:
net = layers.fully_connected(
net,
num_hidden_units,
activation_fn=activation_fn,
variables_collections=[parent_scope],
scope=scope)
if dropout is not None and mode == model_fn.ModeKeys.TRAIN:
net = layers.dropout(net, keep_prob=(1.0 - dropout))
_add_hidden_layer_summary(net, scope.name)
with variable_scope.variable_scope(
parent_scope + "/logits",
values=[net],
partitioner=hidden_layer_partitioner) as scope:
logits = layers.fully_connected(
net,
head.logits_dimension,
activation_fn=None,
variables_collections=[parent_scope],
scope=scope)
_add_hidden_layer_summary(logits, scope.name)
def _train_op_fn(loss):
return optimizers.optimize_loss(
loss=loss,
global_step=contrib_variables.get_global_step(),
learning_rate=_LEARNING_RATE,
optimizer=_get_optimizer(optimizer),
gradient_multipliers=(
dnn_linear_combined._extract_embedding_lr_multipliers(
embedding_lr_multipliers, parent_scope, input_layer_scope)),
clip_gradients=gradient_clip_norm,
name=parent_scope,
summaries=[])
return head.head_ops(features, labels, mode, _train_op_fn, logits)
class DNNClassifier(evaluable.Evaluable, trainable.Trainable):
def __init__(self,
hidden_units,
feature_columns,
model_dir=None,
n_classes=2,
weight_column_name=None,
optimizer=None,
activation_fn=nn.relu,
dropout=None,
gradient_clip_norm=None,
enable_centered_bias=False,
config=None,
feature_engineering_fn=None,
embedding_lr_multipliers=None):
self._hidden_units = hidden_units
self._feature_columns = tuple(feature_columns or [])
self._enable_centered_bias = enable_centered_bias
self._estimator = estimator.Estimator(
model_fn=_dnn_model_fn,
model_dir=model_dir,
config=config,
params={
"head":
head_lib._multi_class_head(
n_classes,
weight_column_name=weight_column_name,
enable_centered_bias=enable_centered_bias),
"hidden_units":
hidden_units,
"feature_columns":
self._feature_columns,
"optimizer":
optimizer,
"activation_fn":
activation_fn,
"dropout":
dropout,
"gradient_clip_norm":
gradient_clip_norm,
"embedding_lr_multipliers":
embedding_lr_multipliers,
},
feature_engineering_fn=feature_engineering_fn)
def fit(self,
x=None,
y=None,
input_fn=None,
steps=None,
batch_size=None,
monitors=None,
max_steps=None):
hooks = monitor_lib.replace_monitors_with_hooks(monitors, self)
self._estimator.fit(x=x,
y=y,
input_fn=input_fn,
steps=steps,
batch_size=batch_size,
monitors=hooks,
max_steps=max_steps)
return self
def evaluate(self,
x=None,
y=None,
input_fn=None,
feed_fn=None,
batch_size=None,
steps=None,
metrics=None,
name=None,
checkpoint_path=None,
hooks=None):
return self._estimator.evaluate(
x=x,
y=y,
input_fn=input_fn,
feed_fn=feed_fn,
batch_size=batch_size,
steps=steps,
metrics=metrics,
name=name,
checkpoint_path=checkpoint_path,
hooks=hooks)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE,
estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict(self, x=None, input_fn=None, batch_size=None, as_iterable=True):
key = prediction_key.PredictionKey.CLASSES
preds = self._estimator.predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return (pred[key] for pred in preds)
return preds[key].reshape(-1)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE,
estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict_proba(self,
x=None,
input_fn=None,
batch_size=None,
as_iterable=True):
key = prediction_key.PredictionKey.PROBABILITIES
preds = self._estimator.predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return (pred[key] for pred in preds)
return preds[key]
def _get_predict_ops(self, features):
return self._estimator._get_predict_ops(features)
def get_variable_names(self):
return self._estimator.get_variable_names()
def get_variable_value(self, name):
return self._estimator.get_variable_value(name)
def export(self,
export_dir,
input_fn=None,
input_feature_key=None,
use_deprecated_input_fn=True,
signature_fn=None,
default_batch_size=1,
exports_to_keep=None):
def default_input_fn(unused_estimator, examples):
return layers.parse_feature_columns_from_examples(examples,
self._feature_columns)
return self._estimator.export(
export_dir=export_dir,
input_fn=input_fn or default_input_fn,
input_feature_key=input_feature_key,
use_deprecated_input_fn=use_deprecated_input_fn,
signature_fn=(signature_fn or
export.classification_signature_fn_with_prob),
prediction_key=prediction_key.PredictionKey.PROBABILITIES,
default_batch_size=default_batch_size,
exports_to_keep=exports_to_keep)
@experimental
def export_savedmodel(self,
export_dir_base,
input_fn,
default_output_alternative_key=None,
assets_extra=None,
as_text=False,
exports_to_keep=None):
return self._estimator.export_savedmodel(
export_dir_base,
input_fn,
default_output_alternative_key=default_output_alternative_key,
assets_extra=assets_extra,
as_text=as_text,
exports_to_keep=exports_to_keep)
@property
def model_dir(self):
return self._estimator.model_dir
@property
@deprecated("2016-10-30",
"This method will be removed after the deprecation date. "
"To inspect variables, use get_variable_names() and "
"get_variable_value().")
def weights_(self):
hiddenlayer_weights = [
self.get_variable_value("dnn/hiddenlayer_%d/weights" % i)
for i, _ in enumerate(self._hidden_units)
]
logits_weights = [self.get_variable_value("dnn/logits/weights")]
return hiddenlayer_weights + logits_weights
@property
@deprecated("2016-10-30",
"This method will be removed after the deprecation date. "
"To inspect variables, use get_variable_names() and "
"get_variable_value().")
def bias_(self):
hiddenlayer_bias = [
self.get_variable_value("dnn/hiddenlayer_%d/biases" % i)
for i, _ in enumerate(self._hidden_units)
]
logits_bias = [self.get_variable_value("dnn/logits/biases")]
if self._enable_centered_bias:
centered_bias = [self.get_variable_value(_CENTERED_BIAS_WEIGHT)]
else:
centered_bias = []
return hiddenlayer_bias + logits_bias + centered_bias
@property
def config(self):
return self._estimator.config
class DNNRegressor(evaluable.Evaluable, trainable.Trainable):
def __init__(self,
hidden_units,
feature_columns,
model_dir=None,
weight_column_name=None,
optimizer=None,
activation_fn=nn.relu,
dropout=None,
gradient_clip_norm=None,
enable_centered_bias=False,
config=None,
feature_engineering_fn=None,
label_dimension=1,
embedding_lr_multipliers=None):
self._feature_columns = tuple(feature_columns or [])
self._estimator = estimator.Estimator(
model_fn=_dnn_model_fn,
model_dir=model_dir,
config=config,
params={
"head":
head_lib._regression_head(
label_dimension=label_dimension,
weight_column_name=weight_column_name,
enable_centered_bias=enable_centered_bias),
"hidden_units":
hidden_units,
"feature_columns":
self._feature_columns,
"optimizer":
optimizer,
"activation_fn":
activation_fn,
"dropout":
dropout,
"gradient_clip_norm":
gradient_clip_norm,
"embedding_lr_multipliers":
embedding_lr_multipliers,
},
feature_engineering_fn=feature_engineering_fn)
def fit(self,
x=None,
y=None,
input_fn=None,
steps=None,
batch_size=None,
monitors=None,
max_steps=None):
hooks = monitor_lib.replace_monitors_with_hooks(monitors, self)
self._estimator.fit(x=x,
y=y,
input_fn=input_fn,
steps=steps,
batch_size=batch_size,
monitors=hooks,
max_steps=max_steps)
return self
def evaluate(self,
x=None,
y=None,
input_fn=None,
feed_fn=None,
batch_size=None,
steps=None,
metrics=None,
name=None,
checkpoint_path=None,
hooks=None):
custom_metrics = {}
if metrics:
for key, metric in six.iteritems(metrics):
if (not isinstance(metric, metric_spec.MetricSpec) and
not isinstance(key, tuple)):
custom_metrics[(key, prediction_key.PredictionKey.SCORES)] = metric
else:
custom_metrics[key] = metric
return self._estimator.evaluate(
x=x,
y=y,
input_fn=input_fn,
feed_fn=feed_fn,
batch_size=batch_size,
steps=steps,
metrics=custom_metrics,
name=name,
checkpoint_path=checkpoint_path,
hooks=hooks)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE,
estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict(self, x=None, input_fn=None, batch_size=None, as_iterable=True):
key = prediction_key.PredictionKey.SCORES
preds = self._estimator.predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return (pred[key] for pred in preds)
return preds[key]
def _get_predict_ops(self, features):
return self._estimator._get_predict_ops(features)
def get_variable_names(self):
return self._estimator.get_variable_names()
def get_variable_value(self, name):
return self._estimator.get_variable_value(name)
def export(self,
export_dir,
input_fn=None,
input_feature_key=None,
use_deprecated_input_fn=True,
signature_fn=None,
default_batch_size=1,
exports_to_keep=None):
def default_input_fn(unused_estimator, examples):
return layers.parse_feature_columns_from_examples(examples,
self._feature_columns)
return self._estimator.export(
export_dir=export_dir,
input_fn=input_fn or default_input_fn,
input_feature_key=input_feature_key,
use_deprecated_input_fn=use_deprecated_input_fn,
signature_fn=signature_fn or export.regression_signature_fn,
prediction_key=prediction_key.PredictionKey.SCORES,
default_batch_size=default_batch_size,
exports_to_keep=exports_to_keep)
@property
def model_dir(self):
return self._estimator.model_dir
@property
def config(self):
return self._estimator.config
| true | true |
f72c0438b220afbd9e2368f18d00c300ac52ab61 | 8,977 | py | Python | precision_search/model/TEMPONet_float.py | EmbeddedML-EDAGroup/Q-PPG | ed42829d0a456db4f0b31d63ba8b22ba483c7b08 | [
"Apache-2.0"
] | 1 | 2021-12-18T21:04:29.000Z | 2021-12-18T21:04:29.000Z | precision_search/model/TEMPONet_float.py | EmbeddedML-EDAGroup/Q-PPG | ed42829d0a456db4f0b31d63ba8b22ba483c7b08 | [
"Apache-2.0"
] | null | null | null | precision_search/model/TEMPONet_float.py | EmbeddedML-EDAGroup/Q-PPG | ed42829d0a456db4f0b31d63ba8b22ba483c7b08 | [
"Apache-2.0"
] | null | null | null | #*----------------------------------------------------------------------------*
#* Copyright (C) 2021 Politecnico di Torino, Italy *
#* SPDX-License-Identifier: Apache-2.0 *
#* *
#* Licensed under the Apache License, Version 2.0 (the "License"); *
#* you may not use this file except in compliance with the License. *
#* You may obtain a copy of the License at *
#* *
#* http://www.apache.org/licenses/LICENSE-2.0 *
#* *
#* Unless required by applicable law or agreed to in writing, software *
#* distributed under the License is distributed on an "AS IS" BASIS, *
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
#* See the License for the specific language governing permissions and *
#* limitations under the License. *
#* *
#* Author: Alessio Burrello *
#*----------------------------------------------------------------------------*
import torch.nn as nn
import torch.nn.functional as F
from base import BaseModel
from math import ceil
import sys
sys.path.append("..")
from models import quant_module_1d as qm
__all__ = ['TempoNetfloat']
def TempoNetfloat(**kwargs):
return TEMPONet(**kwargs)
class TEMPONet(BaseModel):
"""
TEMPONet architecture:
Three repeated instances of TemporalConvBlock and ConvBlock organized as follows:
- TemporalConvBlock
- ConvBlock
Two instances of Regressor followed by a final Linear layer with a single neuron.
"""
def __init__(self, dataset_name='PPG_Dalia', dataset_args={}):
super(TEMPONet, self).__init__()
self.dil = [
2, 2, 1,
4, 4,
8, 8
]
self.rf = [
5, 5, 5,
9, 9,
17, 17
]
self.ch = [
32, 32, 64,
64, 64, 128,
128, 128, 128,
256, 128
]
# 1st instance of two TempConvBlocks and ConvBlock
k_tcb00 = ceil(self.rf[0] / self.dil[0])
self.tcb00 = TempConvBlock(
ch_in=4,
ch_out=self.ch[0],
k_size=k_tcb00,
dil=self.dil[0],
pad=((k_tcb00 - 1) * self.dil[0] + 1) // 2
)
k_tcb01 = ceil(self.rf[1] / self.dil[1])
self.tcb01 = TempConvBlock(
ch_in=self.ch[0],
ch_out=self.ch[1],
k_size=k_tcb01,
dil=self.dil[1],
pad=((k_tcb01 - 1) * self.dil[1] + 1) // 2
)
k_cb0 = ceil(self.rf[2] / self.dil[2])
self.cb0 = ConvBlock(
ch_in=self.ch[1],
ch_out=self.ch[2],
k_size=k_cb0,
strd=1,
pad=((k_cb0 - 1) * self.dil[2] + 1) // 2,
dilation=self.dil[2]
)
# 2nd instance of two TempConvBlocks and ConvBlock
k_tcb10 = ceil(self.rf[3] / self.dil[3])
self.tcb10 = TempConvBlock(
ch_in=self.ch[2],
ch_out=self.ch[3],
k_size=k_tcb10,
dil=self.dil[3],
pad=((k_tcb10 - 1) * self.dil[3] + 1) // 2
)
k_tcb11 = ceil(self.rf[4] / self.dil[4])
self.tcb11 = TempConvBlock(
ch_in=self.ch[3],
ch_out=self.ch[4],
k_size=k_tcb11,
dil=self.dil[4],
pad=((k_tcb11 - 1) * self.dil[4] + 1) // 2
)
self.cb1 = ConvBlock(
ch_in=self.ch[4],
ch_out=self.ch[5],
k_size=5,
strd=2,
pad=2
)
# 3td instance of TempConvBlock and ConvBlock
k_tcb20 = ceil(self.rf[5] / self.dil[5])
self.tcb20 = TempConvBlock(
ch_in=self.ch[5],
ch_out=self.ch[6],
k_size=k_tcb20,
dil=self.dil[5],
pad=((k_tcb20 - 1) * self.dil[5] + 1) // 2
)
k_tcb21 = ceil(self.rf[6] / self.dil[6])
self.tcb21 = TempConvBlock(
ch_in=self.ch[6],
ch_out=self.ch[7],
k_size=k_tcb21,
dil=self.dil[6],
pad=((k_tcb21 - 1) * self.dil[6] + 1) // 2
)
self.cb2 = ConvBlock(
ch_in=self.ch[7],
ch_out=self.ch[8],
k_size=5,
strd=4,
pad=4
)
# 1st instance of regressor
self.regr0 = Regressor(
ft_in=self.ch[8] * 4,
ft_out=self.ch[9]
)
# 2nd instance of regressor
self.regr1 = Regressor(
ft_in=self.ch[9],
ft_out=self.ch[10]
)
self.out_neuron = nn.Linear(
in_features=self.ch[10],
out_features=1
)
def forward(self, x):
x = self.cb0(
self.tcb01(
self.tcb00(
x
)
)
)
x = self.cb1(
self.tcb11(
self.tcb10(
x
)
)
)
x = self.cb2(
self.tcb21(
self.tcb20(
x
)
)
)
x = x.flatten(1)
x = self.regr0(
x
)
x = self.regr1(
x
)
x = self.out_neuron(
x
)
return x
class TempConvBlock(BaseModel):
"""
Temporal Convolutional Block composed of one temporal convolutional layers.
The block is composed of :
- Conv1d layer
- Chomp1d layer
- ReLU layer
- BatchNorm1d layer
:param ch_in: Number of input channels
:param ch_out: Number of output channels
:param k_size: Kernel size
:param dil: Amount of dilation
:param pad: Amount of padding
"""
def __init__(self, ch_in, ch_out, k_size, dil, pad):
super(TempConvBlock, self).__init__()
self.tcn0 = nn.Conv1d(
in_channels=ch_in,
out_channels=ch_out,
kernel_size=k_size,
dilation=dil,
bias = False,
padding=pad
)
self.relu0 = nn.ReLU6()
self.bn0 = nn.BatchNorm1d(
num_features=ch_out
)
def forward(self, x):
x = self.relu0(self.bn0(self.tcn0(x)))
return x
class ConvBlock(BaseModel):
"""
Convolutional Block composed of:
- Conv1d layer
- AvgPool1d layer
- ReLU layer
- BatchNorm1d layer
:param ch_in: Number of input channels
:param ch_out: Number of output channels
:param k_size: Kernel size
:param strd: Amount of stride
:param pad: Amount of padding
"""
def __init__(self, ch_in, ch_out, k_size, strd, pad, dilation=1):
super(ConvBlock, self).__init__()
self.conv0 = nn.Conv1d(
in_channels=ch_in,
out_channels=ch_out,
kernel_size=k_size,
stride=strd,
dilation=dilation,
bias = False,
padding=pad
)
self.pool0 = nn.AvgPool1d(
kernel_size=2,
stride=2,
padding=0
)
self.relu0 = nn.ReLU6()
self.bn0 = nn.BatchNorm1d(ch_out)
def forward(self, x):
x = self.relu0(self.bn0(self.pool0(self.conv0(x))))
return x
class Regressor(BaseModel):
"""
Regressor block composed of :
- Linear layer
- ReLU layer
- BatchNorm1d layer
:param ft_in: Number of input channels
:param ft_out: Number of output channels
"""
def __init__(self, ft_in, ft_out):
super(Regressor, self).__init__()
self.ft_in = ft_in
self.ft_out = ft_out
self.fc0 = nn.Linear(
in_features=ft_in,
out_features=ft_out,
bias = False
)
self.relu0 = nn.ReLU6()
self.bn0 = nn.BatchNorm1d(
num_features=ft_out
)
def forward(self, x):
x = self.relu0(self.bn0(self.fc0(x)))
return x
class Chomp1d(BaseModel):
"""
Module that perform a chomping operation on the input tensor.
It is used to chomp the amount of zero-padding added on the right of the input tensor, this operation is necessary to compute causal convolutions.
:param chomp_size: amount of padding 0s to be removed
"""
def __init__(self, chomp_size):
super(Chomp1d, self).__init__()
self.chomp_size = chomp_size
def forward(self, x):
return x[:, :, :-self.chomp_size].contiguous()
| 28.22956 | 150 | 0.479782 |
import torch.nn as nn
import torch.nn.functional as F
from base import BaseModel
from math import ceil
import sys
sys.path.append("..")
from models import quant_module_1d as qm
__all__ = ['TempoNetfloat']
def TempoNetfloat(**kwargs):
return TEMPONet(**kwargs)
class TEMPONet(BaseModel):
def __init__(self, dataset_name='PPG_Dalia', dataset_args={}):
super(TEMPONet, self).__init__()
self.dil = [
2, 2, 1,
4, 4,
8, 8
]
self.rf = [
5, 5, 5,
9, 9,
17, 17
]
self.ch = [
32, 32, 64,
64, 64, 128,
128, 128, 128,
256, 128
]
k_tcb00 = ceil(self.rf[0] / self.dil[0])
self.tcb00 = TempConvBlock(
ch_in=4,
ch_out=self.ch[0],
k_size=k_tcb00,
dil=self.dil[0],
pad=((k_tcb00 - 1) * self.dil[0] + 1) // 2
)
k_tcb01 = ceil(self.rf[1] / self.dil[1])
self.tcb01 = TempConvBlock(
ch_in=self.ch[0],
ch_out=self.ch[1],
k_size=k_tcb01,
dil=self.dil[1],
pad=((k_tcb01 - 1) * self.dil[1] + 1) // 2
)
k_cb0 = ceil(self.rf[2] / self.dil[2])
self.cb0 = ConvBlock(
ch_in=self.ch[1],
ch_out=self.ch[2],
k_size=k_cb0,
strd=1,
pad=((k_cb0 - 1) * self.dil[2] + 1) // 2,
dilation=self.dil[2]
)
k_tcb10 = ceil(self.rf[3] / self.dil[3])
self.tcb10 = TempConvBlock(
ch_in=self.ch[2],
ch_out=self.ch[3],
k_size=k_tcb10,
dil=self.dil[3],
pad=((k_tcb10 - 1) * self.dil[3] + 1) // 2
)
k_tcb11 = ceil(self.rf[4] / self.dil[4])
self.tcb11 = TempConvBlock(
ch_in=self.ch[3],
ch_out=self.ch[4],
k_size=k_tcb11,
dil=self.dil[4],
pad=((k_tcb11 - 1) * self.dil[4] + 1) // 2
)
self.cb1 = ConvBlock(
ch_in=self.ch[4],
ch_out=self.ch[5],
k_size=5,
strd=2,
pad=2
)
k_tcb20 = ceil(self.rf[5] / self.dil[5])
self.tcb20 = TempConvBlock(
ch_in=self.ch[5],
ch_out=self.ch[6],
k_size=k_tcb20,
dil=self.dil[5],
pad=((k_tcb20 - 1) * self.dil[5] + 1) // 2
)
k_tcb21 = ceil(self.rf[6] / self.dil[6])
self.tcb21 = TempConvBlock(
ch_in=self.ch[6],
ch_out=self.ch[7],
k_size=k_tcb21,
dil=self.dil[6],
pad=((k_tcb21 - 1) * self.dil[6] + 1) // 2
)
self.cb2 = ConvBlock(
ch_in=self.ch[7],
ch_out=self.ch[8],
k_size=5,
strd=4,
pad=4
)
self.regr0 = Regressor(
ft_in=self.ch[8] * 4,
ft_out=self.ch[9]
)
self.regr1 = Regressor(
ft_in=self.ch[9],
ft_out=self.ch[10]
)
self.out_neuron = nn.Linear(
in_features=self.ch[10],
out_features=1
)
def forward(self, x):
x = self.cb0(
self.tcb01(
self.tcb00(
x
)
)
)
x = self.cb1(
self.tcb11(
self.tcb10(
x
)
)
)
x = self.cb2(
self.tcb21(
self.tcb20(
x
)
)
)
x = x.flatten(1)
x = self.regr0(
x
)
x = self.regr1(
x
)
x = self.out_neuron(
x
)
return x
class TempConvBlock(BaseModel):
def __init__(self, ch_in, ch_out, k_size, dil, pad):
super(TempConvBlock, self).__init__()
self.tcn0 = nn.Conv1d(
in_channels=ch_in,
out_channels=ch_out,
kernel_size=k_size,
dilation=dil,
bias = False,
padding=pad
)
self.relu0 = nn.ReLU6()
self.bn0 = nn.BatchNorm1d(
num_features=ch_out
)
def forward(self, x):
x = self.relu0(self.bn0(self.tcn0(x)))
return x
class ConvBlock(BaseModel):
def __init__(self, ch_in, ch_out, k_size, strd, pad, dilation=1):
super(ConvBlock, self).__init__()
self.conv0 = nn.Conv1d(
in_channels=ch_in,
out_channels=ch_out,
kernel_size=k_size,
stride=strd,
dilation=dilation,
bias = False,
padding=pad
)
self.pool0 = nn.AvgPool1d(
kernel_size=2,
stride=2,
padding=0
)
self.relu0 = nn.ReLU6()
self.bn0 = nn.BatchNorm1d(ch_out)
def forward(self, x):
x = self.relu0(self.bn0(self.pool0(self.conv0(x))))
return x
class Regressor(BaseModel):
def __init__(self, ft_in, ft_out):
super(Regressor, self).__init__()
self.ft_in = ft_in
self.ft_out = ft_out
self.fc0 = nn.Linear(
in_features=ft_in,
out_features=ft_out,
bias = False
)
self.relu0 = nn.ReLU6()
self.bn0 = nn.BatchNorm1d(
num_features=ft_out
)
def forward(self, x):
x = self.relu0(self.bn0(self.fc0(x)))
return x
class Chomp1d(BaseModel):
def __init__(self, chomp_size):
super(Chomp1d, self).__init__()
self.chomp_size = chomp_size
def forward(self, x):
return x[:, :, :-self.chomp_size].contiguous()
| true | true |
f72c0485c62be64f09e5a685e3cf499a790a4ccf | 13,340 | py | Python | crossmodal_embedding/tasks/crossmodal/training_star_task.py | ai-systems/crossmodal_embedding | 5c61775531fd350c48a965450ab5e99b28deec5e | [
"MIT"
] | null | null | null | crossmodal_embedding/tasks/crossmodal/training_star_task.py | ai-systems/crossmodal_embedding | 5c61775531fd350c48a965450ab5e99b28deec5e | [
"MIT"
] | null | null | null | crossmodal_embedding/tasks/crossmodal/training_star_task.py | ai-systems/crossmodal_embedding | 5c61775531fd350c48a965450ab5e99b28deec5e | [
"MIT"
] | null | null | null | from prefect import Task
from loguru import logger
from tqdm import tqdm
from crossmodal_embedding.models import CrossModalEmbedding, SiameseNet
from crossmodal_embedding.models import InputData, InputDataTest
from sklearn.metrics import precision_recall_fscore_support, f1_score
import torch.optim as optim
import torch.nn as nn
import torch
import torch.nn as nn
from crossmodal_embedding.util.evaluation import (
compute_map_basic,
compute_map_with_unification,
)
from torch.utils.data import WeightedRandomSampler
import sys
import json
from torch.utils.tensorboard import SummaryWriter
class TrainingTaskStar(Task):
def create_weights(self, df):
positives = 0
negatives = 0
weights = list()
for index, row in df.iterrows():
if row["score"] == 0:
negatives = negatives + 1
else:
positives = positives + 1
weight_positive = 1.0 / float(positives)
weight_negative = 1.0 / float(negatives)
for index, row in df.iterrows():
if row["score"] == 0:
weights.append(weight_negative)
else:
weights.append(weight_positive)
return torch.tensor(weights)
def run(
self,
train,
test,
dev,
num_negatives,
output_log,
output_model,
vocab_size,
batch_size=10,
num_epochs=5,
learning_rate=0.0001,
max_sequence_len=100,
hidden_size=10,
out_embedding=128,
attention_heads=5,
word_embedding=50,
decay=0.01,
):
logger.info(f" Negative Examples: {num_negatives}")
logger.info("Let's train the Cross-Modal Embedding ! (^・ω・^ )")
# Device configuration
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# Check for multi_GPUS
multiple_gpus = 0
train_class_weight = self.create_weights(train)
train_dataset = InputData(train)
logger.info(f"TRAIN: {len(train_dataset)}")
dev_dataset = InputData(dev)
logger.info(f"DEV: {len(dev_dataset)}")
test_dataset = InputDataTest(test, vocab_size)
logger.info(f"TEST: {len(test_dataset)}")
sampler_train = WeightedRandomSampler(
train_class_weight, len(train_class_weight)
)
# Data loader
train_loader = torch.utils.data.DataLoader(
dataset=train_dataset, batch_size=batch_size, sampler=sampler_train,
)
dev_loader = torch.utils.data.DataLoader(
dataset=dev_dataset, batch_size=batch_size, shuffle=False
)
test_loader = torch.utils.data.DataLoader(
dataset=test_dataset, batch_size=batch_size, shuffle=False
)
model = SiameseNet(
out_embedding,
batch_size,
vocab_size,
max_len=max_sequence_len,
hidden_size=hidden_size,
out_embedding=out_embedding,
device=device,
attention_heads=attention_heads,
word_embedding=word_embedding,
)
if torch.cuda.device_count() > 1:
logger.info(
f"**********Let's use {torch.cuda.device_count()} GPUs!********"
)
multiple_gpus = 1
model = nn.DataParallel(model)
else:
logger.info("********* Only one GPU *******")
model = model.to(device)
# Loss and optimizer
criterion = nn.NLLLoss()
optimizer = torch.optim.AdamW(
model.parameters(), lr=learning_rate, weight_decay=decay
)
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(
optimizer, "min", verbose=True, patience=1, cooldown=3
)
# Train the model
best_value = 0
all_best = dict()
result_dict = dict()
total_step = len(train_loader)
for epoch in tqdm(range(num_epochs), desc=f"Epoch"):
epoch_loss = 0.0
running_loss = 0.0
model.train()
t = tqdm(iter(train_loader), leave=False, total=len(train_loader))
for (
i,
(statement1, st1_mask, st1_len, statement2, st2_mask, st2_len, score),
) in enumerate(t):
# Move tensors to the configured device
statement1 = statement1.to(device)
st1_mask = st1_mask.to(device)
st1_len = st1_len.to(device)
statement2 = statement2.to(device)
st2_mask = st2_mask.to(device)
st2_len = st2_len.to(device)
score = score.to(device)
optimizer.zero_grad()
sim = model(
statement1, st1_mask, st1_len, statement2, st2_mask, st2_len
)
loss = criterion(sim, score)
loss.backward()
optimizer.step()
epoch_loss += loss.item()
# print statistics
running_loss += loss.item()
if i % 10 == 0:
t.set_description("loss: {:.4f}".format(running_loss / 10))
running_loss = 0
logger.info(
f"********Epoch: {epoch+1} *****Loss: {epoch_loss / len(train_loader)}"
)
result_dict[epoch] = dict()
result_dict[epoch]["train_loss"] = epoch_loss / len(train_loader)
scheduler.step(epoch_loss / len(train_loader))
if (epoch + 1) % 1 == 0:
model.eval()
with torch.no_grad():
logger.info("Evaluating on Train set!")
t = tqdm(iter(train_loader), leave=False, total=len(train_loader))
y_pred_list = []
y_real_list = []
for (
i,
(
statement1,
st1_mask,
st1_len,
statement2,
st2_mask,
st2_len,
score,
),
) in enumerate(t):
# Move tensors to the configured device
statement1 = statement1.to(device)
st1_mask = st1_mask.to(device)
st1_len = st1_len.to(device)
statement2 = statement2.to(device)
st2_mask = st2_mask.to(device)
st2_len = st2_len.to(device)
y_real_list.extend(score.cpu().tolist())
score = score.to(device)
sim = model(
statement1, st1_mask, st1_len, statement2, st2_mask, st2_len
)
y_dev_pred = torch.argmax(sim, dim=1)
# y_dev_pred = torch.argmax(sim, dim=1)
y_pred_list.extend(y_dev_pred.cpu().tolist())
f1_value = f1_score(y_real_list, y_pred_list)
(precision, recall, _, _,) = precision_recall_fscore_support(
y_real_list, y_pred_list, average="binary"
)
# logger.info("**** TRAINING SET **** ")
# logger.info(f"F1-value: {f1_value}")
# logger.info(f"Precision: {precision}")
# logger.info(f"Recall: {recall}")
logger.info("Evaluating on Dev set!")
t = tqdm(iter(dev_loader), leave=False, total=len(dev_loader))
y_pred_list = []
y_real_list = []
epoch_test_loss = 0.0
for (
i,
(
statement1,
st1_mask,
st1_len,
statement2,
st2_mask,
st2_len,
score,
),
) in enumerate(t):
statement1 = statement1.to(device)
st1_mask = st1_mask.to(device)
st1_len = st1_len.to(device)
statement2 = statement2.to(device)
st2_mask = st2_mask.to(device)
st2_len = st2_len.to(device)
y_real_list.extend(score.cpu().tolist())
score = score.to(device)
sim = model(
statement1, st1_mask, st2_len, statement2, st2_mask, st2_len
)
loss_test = criterion(sim, score)
epoch_test_loss += loss_test.item()
y_dev_pred = torch.argmax(sim, dim=1)
y_pred_list.extend(y_dev_pred.cpu().tolist())
logger.info(f"DEV LOSS: {epoch_test_loss / len(dev_loader)}")
# scheduler.step(epoch_test_loss / len(dev_loader))
f1_value = f1_score(y_real_list, y_pred_list)
(precision, recall, _, _,) = precision_recall_fscore_support(
y_real_list, y_pred_list, average="binary"
)
# logger.info("**** DEV SET **** ")
# logger.info(f"F1-value: {f1_value}")
# logger.info(f"Precision: {precision.tolist()}")
# logger.info(f"Recall: {recall.tolist()}")
result_dict[epoch]["f1"] = f1_value
result_dict[epoch]["precision"] = precision.tolist()
result_dict[epoch]["recall"] = recall.tolist()
if f1_value > best_value:
best_value = f1_value
model = model.to("cpu")
if multiple_gpus:
torch.save(
model.module.state_dict(), f"./models/{output_model}",
)
else:
torch.save(
model.state_dict(), f"./models/{output_model}",
)
all_best["f1"] = f1_value
all_best["precision"] = precision.tolist()
all_best["recall"] = recall.tolist()
model = model.to(device)
best_model = model
with torch.no_grad():
best_model.eval()
logger.info("Evaluating on Test set!")
all_embeddings = dict()
t = tqdm(iter(test_loader), leave=False, total=len(test_loader))
y_pred_list = []
y_real_list = []
for (
i,
(statement1, st1_mask, st1_len, statement2, st2_mask, st2_len, score),
) in enumerate(t):
# Move tensors to the configured device
statement1 = statement1.to(device)
st1_mask = st1_mask.to(device)
st1_len = st1_len.to(device)
statement2 = statement2.to(device)
st2_mask = st2_mask.to(device)
st2_len = st2_len.to(device)
y_real_list.extend(score.cpu().tolist())
score = score.to(device)
sim = best_model(
statement1, st1_mask, st1_len, statement2, st2_mask, st2_len
)
# y_dev_pred = torch.round(sim)
y_dev_pred = torch.argmax(sim, dim=1)
y_pred_list.extend(y_dev_pred.cpu().tolist())
f1_value = f1_score(y_real_list, y_pred_list)
(precision, recall, _, _,) = precision_recall_fscore_support(
y_real_list, y_pred_list, average="binary"
)
logger.info("****** PARAMETERS ********")
logger.info(f"Num negatives: {num_negatives}")
logger.info(f"Batch_size: {batch_size}")
logger.info(f"Max len: {max_sequence_len}")
logger.info(f"Word embedding: {word_embedding}")
logger.info(f"Out embedding: {out_embedding}")
logger.info(f"Hidden Size: {hidden_size}")
logger.info(f"Decay: {decay}")
logger.info(f"ATT heads: {attention_heads}")
logger.info(f"Learning rate: {learning_rate}")
logger.info("****** BEST RESULTS TEST******")
logger.info(f"F1 SCORE {f1_value}")
logger.info(f"PRECISION: {precision}")
logger.info(f"RECALL: {recall}")
all_best["f1_test"] = f1_value
all_best["precision_test"] = precision.tolist()
all_best["recall_test"] = recall.tolist()
logger.info("******** BEST RESULTS DEV **********")
logger.info(all_best)
with open(f"./logs/{output_log}", "w") as f:
json.dump(result_dict, f)
with open(f"./logs/best_{output_log}", "w") as f:
json.dump(result_dict, f)
| 37.366947 | 88 | 0.496027 | from prefect import Task
from loguru import logger
from tqdm import tqdm
from crossmodal_embedding.models import CrossModalEmbedding, SiameseNet
from crossmodal_embedding.models import InputData, InputDataTest
from sklearn.metrics import precision_recall_fscore_support, f1_score
import torch.optim as optim
import torch.nn as nn
import torch
import torch.nn as nn
from crossmodal_embedding.util.evaluation import (
compute_map_basic,
compute_map_with_unification,
)
from torch.utils.data import WeightedRandomSampler
import sys
import json
from torch.utils.tensorboard import SummaryWriter
class TrainingTaskStar(Task):
def create_weights(self, df):
positives = 0
negatives = 0
weights = list()
for index, row in df.iterrows():
if row["score"] == 0:
negatives = negatives + 1
else:
positives = positives + 1
weight_positive = 1.0 / float(positives)
weight_negative = 1.0 / float(negatives)
for index, row in df.iterrows():
if row["score"] == 0:
weights.append(weight_negative)
else:
weights.append(weight_positive)
return torch.tensor(weights)
def run(
self,
train,
test,
dev,
num_negatives,
output_log,
output_model,
vocab_size,
batch_size=10,
num_epochs=5,
learning_rate=0.0001,
max_sequence_len=100,
hidden_size=10,
out_embedding=128,
attention_heads=5,
word_embedding=50,
decay=0.01,
):
logger.info(f" Negative Examples: {num_negatives}")
logger.info("Let's train the Cross-Modal Embedding ! (^・ω・^ )")
# Device configuration
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# Check for multi_GPUS
multiple_gpus = 0
train_class_weight = self.create_weights(train)
train_dataset = InputData(train)
logger.info(f"TRAIN: {len(train_dataset)}")
dev_dataset = InputData(dev)
logger.info(f"DEV: {len(dev_dataset)}")
test_dataset = InputDataTest(test, vocab_size)
logger.info(f"TEST: {len(test_dataset)}")
sampler_train = WeightedRandomSampler(
train_class_weight, len(train_class_weight)
)
# Data loader
train_loader = torch.utils.data.DataLoader(
dataset=train_dataset, batch_size=batch_size, sampler=sampler_train,
)
dev_loader = torch.utils.data.DataLoader(
dataset=dev_dataset, batch_size=batch_size, shuffle=False
)
test_loader = torch.utils.data.DataLoader(
dataset=test_dataset, batch_size=batch_size, shuffle=False
)
model = SiameseNet(
out_embedding,
batch_size,
vocab_size,
max_len=max_sequence_len,
hidden_size=hidden_size,
out_embedding=out_embedding,
device=device,
attention_heads=attention_heads,
word_embedding=word_embedding,
)
if torch.cuda.device_count() > 1:
logger.info(
f"**********Let's use {torch.cuda.device_count()} GPUs!********"
)
multiple_gpus = 1
model = nn.DataParallel(model)
else:
logger.info("********* Only one GPU *******")
model = model.to(device)
criterion = nn.NLLLoss()
optimizer = torch.optim.AdamW(
model.parameters(), lr=learning_rate, weight_decay=decay
)
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(
optimizer, "min", verbose=True, patience=1, cooldown=3
)
best_value = 0
all_best = dict()
result_dict = dict()
total_step = len(train_loader)
for epoch in tqdm(range(num_epochs), desc=f"Epoch"):
epoch_loss = 0.0
running_loss = 0.0
model.train()
t = tqdm(iter(train_loader), leave=False, total=len(train_loader))
for (
i,
(statement1, st1_mask, st1_len, statement2, st2_mask, st2_len, score),
) in enumerate(t):
statement1 = statement1.to(device)
st1_mask = st1_mask.to(device)
st1_len = st1_len.to(device)
statement2 = statement2.to(device)
st2_mask = st2_mask.to(device)
st2_len = st2_len.to(device)
score = score.to(device)
optimizer.zero_grad()
sim = model(
statement1, st1_mask, st1_len, statement2, st2_mask, st2_len
)
loss = criterion(sim, score)
loss.backward()
optimizer.step()
epoch_loss += loss.item()
running_loss += loss.item()
if i % 10 == 0:
t.set_description("loss: {:.4f}".format(running_loss / 10))
running_loss = 0
logger.info(
f"********Epoch: {epoch+1} *****Loss: {epoch_loss / len(train_loader)}"
)
result_dict[epoch] = dict()
result_dict[epoch]["train_loss"] = epoch_loss / len(train_loader)
scheduler.step(epoch_loss / len(train_loader))
if (epoch + 1) % 1 == 0:
model.eval()
with torch.no_grad():
logger.info("Evaluating on Train set!")
t = tqdm(iter(train_loader), leave=False, total=len(train_loader))
y_pred_list = []
y_real_list = []
for (
i,
(
statement1,
st1_mask,
st1_len,
statement2,
st2_mask,
st2_len,
score,
),
) in enumerate(t):
statement1 = statement1.to(device)
st1_mask = st1_mask.to(device)
st1_len = st1_len.to(device)
statement2 = statement2.to(device)
st2_mask = st2_mask.to(device)
st2_len = st2_len.to(device)
y_real_list.extend(score.cpu().tolist())
score = score.to(device)
sim = model(
statement1, st1_mask, st1_len, statement2, st2_mask, st2_len
)
y_dev_pred = torch.argmax(sim, dim=1)
y_pred_list.extend(y_dev_pred.cpu().tolist())
f1_value = f1_score(y_real_list, y_pred_list)
(precision, recall, _, _,) = precision_recall_fscore_support(
y_real_list, y_pred_list, average="binary"
)
logger.info("Evaluating on Dev set!")
t = tqdm(iter(dev_loader), leave=False, total=len(dev_loader))
y_pred_list = []
y_real_list = []
epoch_test_loss = 0.0
for (
i,
(
statement1,
st1_mask,
st1_len,
statement2,
st2_mask,
st2_len,
score,
),
) in enumerate(t):
statement1 = statement1.to(device)
st1_mask = st1_mask.to(device)
st1_len = st1_len.to(device)
statement2 = statement2.to(device)
st2_mask = st2_mask.to(device)
st2_len = st2_len.to(device)
y_real_list.extend(score.cpu().tolist())
score = score.to(device)
sim = model(
statement1, st1_mask, st2_len, statement2, st2_mask, st2_len
)
loss_test = criterion(sim, score)
epoch_test_loss += loss_test.item()
y_dev_pred = torch.argmax(sim, dim=1)
y_pred_list.extend(y_dev_pred.cpu().tolist())
logger.info(f"DEV LOSS: {epoch_test_loss / len(dev_loader)}")
f1_value = f1_score(y_real_list, y_pred_list)
(precision, recall, _, _,) = precision_recall_fscore_support(
y_real_list, y_pred_list, average="binary"
)
result_dict[epoch]["f1"] = f1_value
result_dict[epoch]["precision"] = precision.tolist()
result_dict[epoch]["recall"] = recall.tolist()
if f1_value > best_value:
best_value = f1_value
model = model.to("cpu")
if multiple_gpus:
torch.save(
model.module.state_dict(), f"./models/{output_model}",
)
else:
torch.save(
model.state_dict(), f"./models/{output_model}",
)
all_best["f1"] = f1_value
all_best["precision"] = precision.tolist()
all_best["recall"] = recall.tolist()
model = model.to(device)
best_model = model
with torch.no_grad():
best_model.eval()
logger.info("Evaluating on Test set!")
all_embeddings = dict()
t = tqdm(iter(test_loader), leave=False, total=len(test_loader))
y_pred_list = []
y_real_list = []
for (
i,
(statement1, st1_mask, st1_len, statement2, st2_mask, st2_len, score),
) in enumerate(t):
statement1 = statement1.to(device)
st1_mask = st1_mask.to(device)
st1_len = st1_len.to(device)
statement2 = statement2.to(device)
st2_mask = st2_mask.to(device)
st2_len = st2_len.to(device)
y_real_list.extend(score.cpu().tolist())
score = score.to(device)
sim = best_model(
statement1, st1_mask, st1_len, statement2, st2_mask, st2_len
)
y_dev_pred = torch.argmax(sim, dim=1)
y_pred_list.extend(y_dev_pred.cpu().tolist())
f1_value = f1_score(y_real_list, y_pred_list)
(precision, recall, _, _,) = precision_recall_fscore_support(
y_real_list, y_pred_list, average="binary"
)
logger.info("****** PARAMETERS ********")
logger.info(f"Num negatives: {num_negatives}")
logger.info(f"Batch_size: {batch_size}")
logger.info(f"Max len: {max_sequence_len}")
logger.info(f"Word embedding: {word_embedding}")
logger.info(f"Out embedding: {out_embedding}")
logger.info(f"Hidden Size: {hidden_size}")
logger.info(f"Decay: {decay}")
logger.info(f"ATT heads: {attention_heads}")
logger.info(f"Learning rate: {learning_rate}")
logger.info("****** BEST RESULTS TEST******")
logger.info(f"F1 SCORE {f1_value}")
logger.info(f"PRECISION: {precision}")
logger.info(f"RECALL: {recall}")
all_best["f1_test"] = f1_value
all_best["precision_test"] = precision.tolist()
all_best["recall_test"] = recall.tolist()
logger.info("******** BEST RESULTS DEV **********")
logger.info(all_best)
with open(f"./logs/{output_log}", "w") as f:
json.dump(result_dict, f)
with open(f"./logs/best_{output_log}", "w") as f:
json.dump(result_dict, f)
| true | true |
f72c04c1115d6d2253cc4ff13cc515e322a0dd87 | 6,763 | py | Python | config.py | YetheYe/Mask_RCNN | 6895c617af13ecbf0bb27790e29a6271725cb34f | [
"MIT"
] | null | null | null | config.py | YetheYe/Mask_RCNN | 6895c617af13ecbf0bb27790e29a6271725cb34f | [
"MIT"
] | null | null | null | config.py | YetheYe/Mask_RCNN | 6895c617af13ecbf0bb27790e29a6271725cb34f | [
"MIT"
] | null | null | null | """
Mask R-CNN
Base Configurations class.
Copyright (c) 2017 Matterport, Inc.
Licensed under the MIT License (see LICENSE for details)
Written by Waleed Abdulla
"""
import math
import numpy as np
# Base Configuration Class
# Don't use this class directly. Instead, sub-class it and override
# the configurations you need to change.
class Config(object):
"""Base configuration class. For custom configurations, create a
sub-class that inherits from this one and override properties
that need to be changed.
"""
# Name the configurations. For example, 'COCO', 'Experiment 3', ...etc.
# Useful if your code needs to do things differently depending on which
# experiment is running.
NAME = None # Override in sub-classes
# NUMBER OF GPUs to use. For CPU training, use 1
GPU_COUNT = 1
# Number of images to train with on each GPU. A 12GB GPU can typically
# handle 2 images of 1024x1024px.
# Adjust based on your GPU memory and image sizes. Use the highest
# number that your GPU can handle for best performance.
IMAGES_PER_GPU = 2
# Number of training steps per epoch
# This doesn't need to match the size of the training set. Tensorboard
# updates are saved at the end of each epoch, so setting this to a
# smaller number means getting more frequent TensorBoard updates.
# Validation stats are also calculated at each epoch end and they
# might take a while, so don't set this too small to avoid spending
# a lot of time on validation stats.
STEPS_PER_EPOCH = 1000
# Number of validation steps to run at the end of every training epoch.
# A bigger number improves accuracy of validation stats, but slows
# down the training.
VALIDATION_STEPS = 50
# Backbone network architecture
# Supported values are: resnet50, resnet101
BACKBONE = "resnet101"
# The strides of each layer of the FPN Pyramid. These values
# are based on a Resnet101 backbone.
BACKBONE_STRIDES = [4, 8, 16, 32, 64]
# Number of classification classes (including background)
NUM_CLASSES = 1 # Override in sub-classes
# Length of square anchor side in pixels
RPN_ANCHOR_SCALES = (128, 256, 512)
# Ratios of anchors at each cell (width/height)
# A value of 1 represents a square anchor, and 0.5 is a wide anchor
RPN_ANCHOR_RATIOS = [0.5, 1, 2]
# Anchor stride
# If 1 then anchors are created for each cell in the backbone feature map.
# If 2, then anchors are created for every other cell, and so on.
RPN_ANCHOR_STRIDE = 1
# Non-max suppression threshold to filter RPN proposals.
# You can increase this during training to generate more propsals.
RPN_NMS_THRESHOLD = 0.7
# How many anchors per image to use for RPN training
RPN_TRAIN_ANCHORS_PER_IMAGE = 256
# ROIs kept after non-maximum supression (training and inference)
POST_NMS_ROIS_TRAINING = 2000
POST_NMS_ROIS_INFERENCE = 1000
# If enabled, resizes instance masks to a smaller size to reduce
# memory load. Recommended when using high-resolution images.
USE_MINI_MASK = True
MINI_MASK_SHAPE = (56, 56) # (height, width) of the mini-mask
# Input image resizing
# Images are resized such that the small side is IMAGE_MIN_DIM and
# the long side is <= IMAGE_MAX_DIM. If both conditions can't be
# satisfied at the same time then IMAGE_MAX_DIM is enforced.
# Resizing modes:
# none: No resizing
# square: Pad with zeros to make it a square (MAX_DIM, MAX_DIM)
# TODO: currently, only 'square' mode is supported
IMAGE_RESIZE_MODE = "square"
IMAGE_MIN_DIM = 800
IMAGE_MAX_DIM = 1024
# Image mean (RGB)
MEAN_PIXEL = np.array([123.7, 116.8, 103.9])
# Number of ROIs per image to feed to classifier/mask heads
# The Mask RCNN paper uses 512 but often the RPN doesn't generate
# enough positive proposals to fill this and keep a positive:negative
# ratio of 1:3. You can increase the number of proposals by adjusting
# the RPN NMS threshold.
TRAIN_ROIS_PER_IMAGE = 200
# Percent of positive ROIs used to train classifier/mask heads
ROI_POSITIVE_RATIO = 0.33
# Pooled ROIs
POOL_SIZE = 7
MASK_POOL_SIZE = 14
MASK_SHAPE = [28, 28]
# Maximum number of ground truth instances to use in one image
MAX_GT_INSTANCES = 100
# Bounding box refinement standard deviation for RPN and final detections.
RPN_BBOX_STD_DEV = np.array([0.1, 0.1, 0.2, 0.2])
BBOX_STD_DEV = np.array([0.1, 0.1, 0.2, 0.2])
# Max number of final detections
DETECTION_MAX_INSTANCES = 100
# Minimum probability value to accept a detected instance
# ROIs below this threshold are skipped
DETECTION_MIN_CONFIDENCE = 0.5
# Non-maximum suppression threshold for detection
DETECTION_NMS_THRESHOLD = 0.3
# Learning rate and momentum
# The Mask RCNN paper uses lr=0.02, but on TensorFlow it causes
# weights to explode. Likely due to differences in optimzer
# implementation.
LEARNING_RATE = 0.001
LEARNING_MOMENTUM = 0.9
# Weight decay regularization
WEIGHT_DECAY = 0.0001
# Use RPN ROIs or externally generated ROIs for training
# Keep this True for most situations. Set to False if you want to train
# the head branches on ROI generated by code rather than the ROIs from
# the RPN. For example, to debug the classifier head without having to
# train the RPN.
USE_RPN_ROIS = True
# Train or freeze batch normalization layers
# None: Train BN layers. This is the normal mode
# False: Freeze BN layers. Good when using a small batch size
# True: (don't use). Set layer in training mode even when inferencing
TRAIN_BN = False # Defaulting to False since batch size is often small
# Gradient norm clipping
GRADIENT_CLIP_NORM = 5.0
def __init__(self):
"""Set values of computed attributes."""
# Effective batch size
self.BATCH_SIZE = self.IMAGES_PER_GPU * self.GPU_COUNT
# Input image size
if self.IMAGE_RESIZE_MODE == "crop":
self.IMAGE_SHAPE = np.array([self.IMAGE_MIN_DIM, self.IMAGE_MIN_DIM, 3])
else:
self.IMAGE_SHAPE = np.array([self.IMAGE_MAX_DIM, self.IMAGE_MAX_DIM, 3])
# Image meta data length
# See compose_image_meta() for details
self.IMAGE_META_SIZE = 1 + 3 + 3 + 4 + 1 + self.NUM_CLASSES
def display(self):
"""Display Configuration values."""
print("\nConfigurations:")
for a in dir(self):
if not a.startswith("__") and not callable(getattr(self, a)):
print("{:30} {}".format(a, getattr(self, a)))
print("\n")
| 36.556757 | 84 | 0.69067 |
import math
import numpy as np
# the configurations you need to change.
class Config(object):
# Name the configurations. For example, 'COCO', 'Experiment 3', ...etc.
# Useful if your code needs to do things differently depending on which
# experiment is running.
NAME = None # Override in sub-classes
# NUMBER OF GPUs to use. For CPU training, use 1
GPU_COUNT = 1
# Number of images to train with on each GPU. A 12GB GPU can typically
# handle 2 images of 1024x1024px.
# Adjust based on your GPU memory and image sizes. Use the highest
# number that your GPU can handle for best performance.
IMAGES_PER_GPU = 2
# Number of training steps per epoch
# This doesn't need to match the size of the training set. Tensorboard
# a lot of time on validation stats.
STEPS_PER_EPOCH = 1000
# Number of validation steps to run at the end of every training epoch.
# A bigger number improves accuracy of validation stats, but slows
# down the training.
VALIDATION_STEPS = 50
# Backbone network architecture
# Supported values are: resnet50, resnet101
BACKBONE = "resnet101"
# The strides of each layer of the FPN Pyramid. These values
# are based on a Resnet101 backbone.
BACKBONE_STRIDES = [4, 8, 16, 32, 64]
# Number of classification classes (including background)
NUM_CLASSES = 1 # Override in sub-classes
# Length of square anchor side in pixels
RPN_ANCHOR_SCALES = (128, 256, 512)
# Ratios of anchors at each cell (width/height)
# A value of 1 represents a square anchor, and 0.5 is a wide anchor
RPN_ANCHOR_RATIOS = [0.5, 1, 2]
# Anchor stride
# If 1 then anchors are created for each cell in the backbone feature map.
# If 2, then anchors are created for every other cell, and so on.
RPN_ANCHOR_STRIDE = 1
# Non-max suppression threshold to filter RPN proposals.
# You can increase this during training to generate more propsals.
RPN_NMS_THRESHOLD = 0.7
# How many anchors per image to use for RPN training
RPN_TRAIN_ANCHORS_PER_IMAGE = 256
# ROIs kept after non-maximum supression (training and inference)
POST_NMS_ROIS_TRAINING = 2000
POST_NMS_ROIS_INFERENCE = 1000
# If enabled, resizes instance masks to a smaller size to reduce
# memory load. Recommended when using high-resolution images.
USE_MINI_MASK = True
MINI_MASK_SHAPE = (56, 56) # (height, width) of the mini-mask
# Input image resizing
# Images are resized such that the small side is IMAGE_MIN_DIM and
# the long side is <= IMAGE_MAX_DIM. If both conditions can't be
IMAGE_RESIZE_MODE = "square"
IMAGE_MIN_DIM = 800
IMAGE_MAX_DIM = 1024
MEAN_PIXEL = np.array([123.7, 116.8, 103.9])
# enough positive proposals to fill this and keep a positive:negative
# ratio of 1:3. You can increase the number of proposals by adjusting
# the RPN NMS threshold.
TRAIN_ROIS_PER_IMAGE = 200
# Percent of positive ROIs used to train classifier/mask heads
ROI_POSITIVE_RATIO = 0.33
# Pooled ROIs
POOL_SIZE = 7
MASK_POOL_SIZE = 14
MASK_SHAPE = [28, 28]
# Maximum number of ground truth instances to use in one image
MAX_GT_INSTANCES = 100
# Bounding box refinement standard deviation for RPN and final detections.
RPN_BBOX_STD_DEV = np.array([0.1, 0.1, 0.2, 0.2])
BBOX_STD_DEV = np.array([0.1, 0.1, 0.2, 0.2])
# Max number of final detections
DETECTION_MAX_INSTANCES = 100
# Minimum probability value to accept a detected instance
# ROIs below this threshold are skipped
DETECTION_MIN_CONFIDENCE = 0.5
# Non-maximum suppression threshold for detection
DETECTION_NMS_THRESHOLD = 0.3
# Learning rate and momentum
# The Mask RCNN paper uses lr=0.02, but on TensorFlow it causes
# weights to explode. Likely due to differences in optimzer
# implementation.
LEARNING_RATE = 0.001
LEARNING_MOMENTUM = 0.9
# Weight decay regularization
WEIGHT_DECAY = 0.0001
# Use RPN ROIs or externally generated ROIs for training
# Keep this True for most situations. Set to False if you want to train
# the head branches on ROI generated by code rather than the ROIs from
# the RPN. For example, to debug the classifier head without having to
# train the RPN.
USE_RPN_ROIS = True
# Train or freeze batch normalization layers
# None: Train BN layers. This is the normal mode
# False: Freeze BN layers. Good when using a small batch size
# True: (don't use). Set layer in training mode even when inferencing
TRAIN_BN = False
GRADIENT_CLIP_NORM = 5.0
def __init__(self):
self.BATCH_SIZE = self.IMAGES_PER_GPU * self.GPU_COUNT
if self.IMAGE_RESIZE_MODE == "crop":
self.IMAGE_SHAPE = np.array([self.IMAGE_MIN_DIM, self.IMAGE_MIN_DIM, 3])
else:
self.IMAGE_SHAPE = np.array([self.IMAGE_MAX_DIM, self.IMAGE_MAX_DIM, 3])
self.IMAGE_META_SIZE = 1 + 3 + 3 + 4 + 1 + self.NUM_CLASSES
def display(self):
print("\nConfigurations:")
for a in dir(self):
if not a.startswith("__") and not callable(getattr(self, a)):
print("{:30} {}".format(a, getattr(self, a)))
print("\n")
| true | true |
f72c04c84917b4d25698a444e88719304b1f71e7 | 17,176 | py | Python | test/test_utils/vcfutils/test_parser.py | dylex/wecall | 35d24cefa4fba549e737cd99329ae1b17dd0156b | [
"MIT"
] | 8 | 2018-10-08T15:47:21.000Z | 2021-11-09T07:13:05.000Z | test/test_utils/vcfutils/test_parser.py | dylex/wecall | 35d24cefa4fba549e737cd99329ae1b17dd0156b | [
"MIT"
] | 4 | 2018-11-05T09:16:27.000Z | 2020-04-09T12:32:56.000Z | test/test_utils/vcfutils/test_parser.py | dylex/wecall | 35d24cefa4fba549e737cd99329ae1b17dd0156b | [
"MIT"
] | 4 | 2019-09-03T15:46:39.000Z | 2021-06-04T07:28:33.000Z | # All content Copyright (C) 2018 Genomics plc
import os
import re
import unittest
from wecall.genomics.variant import Variant
from wecall.vcfutils.genotype_call import GenotypeCall
from wecall.vcfutils.parser import VCFReader, VCFReaderContextManager, decode_VCF_string, \
parse_VCF_comma_separated_pair_value
from wecall.vcfutils.schema import Schema
from wecall.vcfutils.writer import VCFWriterContextManager
from wecall_test_drivers.base_test import BaseTest
class ParserTest(BaseTest):
def setUp(self):
BaseTest.setUp(self)
self.data_dir = os.path.join(os.path.dirname(__file__), "example_data")
def variant_is_equal(self, var1, var2):
self.assertEqual(var1.chrom, var2[0])
self.assertEqual(var1.pos_from, var2[1])
self.assertEqual(var1.ids, var2[2])
self.assertEqual(var1.ref, var2[3])
self.assertEqual(var1.alt, var2[4])
def test_read_VCF_line(self):
with open(os.path.join(self.data_dir, "vcf_example.vcf"), "r") as vcf_file:
vcf_handler = VCFReader(vcf_file)
vcf_handler.read_header()
self.assertEqual(len(vcf_handler.header.file_metadata), 7)
self.assertEqual(len(vcf_handler.header.samples), 2)
records = list(vcf_handler.read_records())
self.assertEqual(len(records), 2)
# test first record fully
self.variant_is_equal(records[0], ("20", 9, set(), "CT", "C")) # zero=based representation
self.assertEqual(records[0].filters, set())
self.assertEqual(records[0].passes_filter, True)
self.assertEqual(len(records[0].info), 12)
self.assertEqual(records[0].info["PP"], [3000])
self.assertEqual(records[0].info["DP"], [250])
self.assertEqual(records[0].info["DPR"], [140])
self.assertEqual(records[0].info["DPF"], [110])
self.assertEqual(records[0].info["VC"], [100])
self.assertEqual(records[0].info["VCR"], [49])
self.assertEqual(records[0].info["VCF"], [51])
self.assertEqual(records[0].info["ABPV"], [0.2])
self.assertEqual(records[0].info["SBPV"], [0.3])
self.assertEqual(records[0].info["MQ"], [70])
self.assertEqual(records[0].info["BR"], [31])
self.assertEqual(records[0].info["QD"], [None])
self.assertEqual(records[0].samples, ['sample1', 'sample2'])
self.assertEqual(records[0].sample_info.get_field('sample1', "GT"), GenotypeCall("0/1"))
self.assertEqual(records[0].sample_info.get_field('sample2', "GT"), GenotypeCall("1/1"))
self.assertEqual(records[0].sample_info.get_field('sample1', 'PL'), [3000, 0, 3000])
self.assertEqual(records[0].sample_info.get_field('sample2', 'PL'), [114, 0, 0])
self.assertEqual(records[0].sample_info.get_field('sample1', 'GQ'), [1000])
self.assertEqual(records[0].sample_info.get_field('sample2', 'GQ'), [None])
# check that ordering in the dictionaries is preserved
expected_keys = ["PP", "DP", "DPR", "DPF", "VC", "VCR",
"VCF", "ABPV", "SBPV", "MQ", "BR", "QD"]
self.assertEqual(list(records[0].info.keys()), expected_keys)
# ensure last record is still being read correctly
self.variant_is_equal(records[-1], ("20", 10, set(), "T", "G"))
def test_reads_simple_file(self):
filename = os.path.join(self.work_dir, "test.vcf")
with VCFWriterContextManager(filename) as left_vcf:
left_vcf.write_variant(Variant("1", 1, "A", "T"))
left_vcf.write_variant(Variant("2", 1, "A", "T"))
left_vcf.write_variant(Variant("10", 1, "A", "T"))
expected_variants = [
Variant("1", 1, "A", "T"),
Variant("2", 1, "A", "T"),
Variant("10", 1, "A", "T"),
]
with VCFReaderContextManager(filename) as vcf_reader:
actual_variants = [record.variant for record in vcf_reader.read_records()]
self.assertEqual(expected_variants, actual_variants)
class TestVCFStringParsing(unittest.TestCase):
def test_should_decode_empty_VCF_string(self):
self.assertEqual('', decode_VCF_string('""'))
def test_should_decode_simple_VCF_string(self):
self.assertEqual('foo', decode_VCF_string('"foo"'))
def test_should_decode_VCF_string_with_single_double_quote(self):
self.assertEqual('"', decode_VCF_string('"\\""'))
def test_should_decode_VCF_string_with_single_backslash(self):
self.assertEqual('\\', decode_VCF_string('"\\\\"'))
def test_should_decode_complex_VCF_string(self):
self.assertEqual(
'abc\\def"ghi',
decode_VCF_string('"abc\\\\def\\\"ghi"'))
def test_should_fail_to_decode_unquoted_string(self):
with self.assertRaisesRegex(Exception, 'expected a VCF encoded string: \'foo\''):
print(decode_VCF_string('foo'))
def test_should_fail_to_decode_string_with_stray_backslash(self):
with self.assertRaisesRegex(Exception, re.escape('expected a VCF encoded string: \'"\\\\"\'')):
print(decode_VCF_string('"\\"'))
def test_should_fail_to_decode_string_with_unencoded_double_quote(self):
with self.assertRaisesRegex(Exception, 'expected a VCF encoded string: \'"\""\''):
print(decode_VCF_string('"\""'))
class TestCommaSeparatedPairParser(unittest.TestCase):
def test_should_parse_simple_comma_separated_pairs(self):
parsed = parse_VCF_comma_separated_pair_value('<first=foo,second=bar>')
expected = {'first': 'foo', 'second': 'bar'}
self.assertEqual(expected, parsed)
def test_should_parse_empty_simple_value(self):
parsed = parse_VCF_comma_separated_pair_value('<first=,second=bar>')
expected = {'first': '', 'second': 'bar'}
self.assertEqual(expected, parsed)
def test_should_fail_to_parse_non_bracketed_string(self):
with self.assertRaisesRegex(Exception, 'expected braced key-value pairs: \'first=foo\''):
print(parse_VCF_comma_separated_pair_value('first=foo'))
def test_should_parse_quoted_comma_separated_pairs(self):
parsed = parse_VCF_comma_separated_pair_value(
'<first="foo",second="bar">')
expected = {'first': '"foo"', 'second': '"bar"'}
self.assertEqual(expected, parsed)
def test_should_parse_empty_quoted_value(self):
parsed = parse_VCF_comma_separated_pair_value('<first="">')
expected = {'first': '""'}
self.assertEqual(expected, parsed)
def test_should_parse_values_with_quoted_commas(self):
parsed = parse_VCF_comma_separated_pair_value('<first="foo,bar">')
expected = {'first': '"foo,bar"'}
self.assertEqual(expected, parsed)
def test_should_parse_values_with_quoted_double_quote(self):
parsed = parse_VCF_comma_separated_pair_value('<first="foo\\\"bar">')
expected = {'first': '"foo\\\"bar"'}
self.assertEqual(expected, parsed)
def test_should_fail_with_badly_quoted_double_quote(self):
with self.assertRaisesRegex(Exception, 'failed to parse key-value pairs from \'<first="foo\"bar">\''):
print(parse_VCF_comma_separated_pair_value('<first="foo\"bar">'))
class TestHeaderParsing(unittest.TestCase):
# version parsing
def test_should_parse_well_formatted_version(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
self.assertEqual(expected, header)
def test_should_store_header_as_attribute_of_parser(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
self.assertEqual(header, reader.header)
def test_should_fail_with_unexpected_version(self):
lines = [
'##fileformat=VCFv0.0\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(Exception, 'unexpected version: \'0.0\''):
print(reader.read_header())
def test_should_fail_to_parse_malformed_header_line(self):
lines = [
'##fileformat=VCFv4.2\n',
'##malformed line!\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(Exception, 'failed to parse header line: \'##malformed line!\''):
print(reader.read_header())
def test_should_fail_if_version_is_not_defined(self):
lines = [
'##notFileformat=foo\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(Exception, 'unrecognised file format line: \'##notFileformat=foo\''):
print(reader.read_header())
# file metadata parsing
def test_should_parse_well_formatted_file_metadata(self):
lines = [
'##fileformat=VCFv4.2\n',
'##fileDate=2013-07-08\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.file_metadata['fileDate'] = '2013-07-08'
self.assertEqual(expected, header)
# info data parsing
def test_should_parse_minimal_info_header_fields(self):
lines = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=key,Number=1,Type=String,Description="description">\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.set_info_data('key', '1', 'String', 'description')
self.assertEqual(expected, header)
def test_should_parse_all_info_header_fields(self):
lines = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=key,Number=1,Type=String,Description="description",Source="foo",Version="bar">\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.set_info_data(
'key',
'1',
'String',
'description',
'foo',
'bar')
self.assertEqual(expected, header)
# sample data parsing
def test_should_parse_valid_sample_header_fields(self):
lines = [
'##fileformat=VCFv4.2\n',
'##FORMAT=<ID=key,Number=1,Type=String,Description="description">\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.set_sample_data('key', '1', 'String', 'description')
self.assertEqual(expected, header)
# filter parsing
def test_should_parse_valid_filter_header_fields(self):
lines = [
'##fileformat=VCFv4.2\n',
'##FILTER=<ID=key,Description="description">\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.set_filter('key', 'description')
self.assertEqual(expected, header)
# contig parsing
def test_should_parse_valid_contig_header_fields(self):
lines = [
'##fileformat=VCFv4.2\n',
'##contig=<ID=key,length=666>\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.set_contig('key', 666)
self.assertEqual(expected, header)
# column headers + sample names
def test_should_parse_required_column_headers(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
self.assertEqual(expected, header)
def test_should_fail_without_required_column_headers(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(
Exception,
re.escape("expected column header line: '#CHROM\\tPOS\\tID\\tREF\\tALT\\tQUAL\\tFILTER'")
):
print(reader.read_header())
def test_should_parse_column_headers_with_format_but_no_samples(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
self.assertEqual(expected, header)
def test_should_parse_column_headers_with_complex_sample_names(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tOWEN_TOBY-RHYS.JONES\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.samples = ['OWEN_TOBY-RHYS.JONES']
self.assertEqual(expected, header)
def test_should_not_parse_column_headers_with_sample_names_containing_white_space(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tOWEN JONES\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(
Exception,
re.escape(
'expected column header line: '
'\'#CHROM\\tPOS\\tID\\tREF\\tALT\\tQUAL\\tFILTER\\tINFO\\tFORMAT\\tOWEN JONES\''
)
):
print(reader.read_header())
def test_should_fail_with_malformed_format_column_header(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFOO\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(
Exception,
re.escape('expected column header line: \'#CHROM\\tPOS\\tID\\tREF\\tALT\\tQUAL\\tFILTER\\tINFO\\tFOO\'')
):
print(reader.read_header())
def test_should_parse_column_headers_with_samples(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tFOO\tBAR\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.samples.append('FOO')
expected.samples.append('BAR')
self.assertEqual(expected, header)
def test_should_fail_if_column_header_line_is_missing(self):
lines = [
'##fileformat=VCFv4.2\n',
'the line after the header\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(Exception, 'expected column header line: \'the line after the header\''):
print(reader.read_header())
def test_should_fail_on_unexpected_EOF(self):
lines = [
'##fileformat=VCFv4.2\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(Exception, 'unexpected EOF'):
print(reader.read_header())
class TestRecordParsing(unittest.TestCase):
# version parsing
def test_should_parse_single_record(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
'chr0\t0\t.\tP\tQ\t0\tPASS\t\n',
]
reader = VCFReader(iter(lines))
record_count = len(list(reader.read_records()))
self.assertEqual(1, record_count)
def test_should_parse_header_when_parsing_records(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
'chr0\t0\t.\tP\tQ\t0\tPASS\t\n',
]
reader = VCFReader(iter(lines))
self.assertIsNone(reader.header)
list(reader.read_records())
self.assertIsNotNone(reader.header)
def test_should_parse_empty_file(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
record_count = len(list(reader.read_records()))
self.assertEqual(0, record_count)
| 35.783333 | 120 | 0.615859 |
import os
import re
import unittest
from wecall.genomics.variant import Variant
from wecall.vcfutils.genotype_call import GenotypeCall
from wecall.vcfutils.parser import VCFReader, VCFReaderContextManager, decode_VCF_string, \
parse_VCF_comma_separated_pair_value
from wecall.vcfutils.schema import Schema
from wecall.vcfutils.writer import VCFWriterContextManager
from wecall_test_drivers.base_test import BaseTest
class ParserTest(BaseTest):
def setUp(self):
BaseTest.setUp(self)
self.data_dir = os.path.join(os.path.dirname(__file__), "example_data")
def variant_is_equal(self, var1, var2):
self.assertEqual(var1.chrom, var2[0])
self.assertEqual(var1.pos_from, var2[1])
self.assertEqual(var1.ids, var2[2])
self.assertEqual(var1.ref, var2[3])
self.assertEqual(var1.alt, var2[4])
def test_read_VCF_line(self):
with open(os.path.join(self.data_dir, "vcf_example.vcf"), "r") as vcf_file:
vcf_handler = VCFReader(vcf_file)
vcf_handler.read_header()
self.assertEqual(len(vcf_handler.header.file_metadata), 7)
self.assertEqual(len(vcf_handler.header.samples), 2)
records = list(vcf_handler.read_records())
self.assertEqual(len(records), 2)
self.variant_is_equal(records[0], ("20", 9, set(), "CT", "C"))
self.assertEqual(records[0].filters, set())
self.assertEqual(records[0].passes_filter, True)
self.assertEqual(len(records[0].info), 12)
self.assertEqual(records[0].info["PP"], [3000])
self.assertEqual(records[0].info["DP"], [250])
self.assertEqual(records[0].info["DPR"], [140])
self.assertEqual(records[0].info["DPF"], [110])
self.assertEqual(records[0].info["VC"], [100])
self.assertEqual(records[0].info["VCR"], [49])
self.assertEqual(records[0].info["VCF"], [51])
self.assertEqual(records[0].info["ABPV"], [0.2])
self.assertEqual(records[0].info["SBPV"], [0.3])
self.assertEqual(records[0].info["MQ"], [70])
self.assertEqual(records[0].info["BR"], [31])
self.assertEqual(records[0].info["QD"], [None])
self.assertEqual(records[0].samples, ['sample1', 'sample2'])
self.assertEqual(records[0].sample_info.get_field('sample1', "GT"), GenotypeCall("0/1"))
self.assertEqual(records[0].sample_info.get_field('sample2', "GT"), GenotypeCall("1/1"))
self.assertEqual(records[0].sample_info.get_field('sample1', 'PL'), [3000, 0, 3000])
self.assertEqual(records[0].sample_info.get_field('sample2', 'PL'), [114, 0, 0])
self.assertEqual(records[0].sample_info.get_field('sample1', 'GQ'), [1000])
self.assertEqual(records[0].sample_info.get_field('sample2', 'GQ'), [None])
expected_keys = ["PP", "DP", "DPR", "DPF", "VC", "VCR",
"VCF", "ABPV", "SBPV", "MQ", "BR", "QD"]
self.assertEqual(list(records[0].info.keys()), expected_keys)
self.variant_is_equal(records[-1], ("20", 10, set(), "T", "G"))
def test_reads_simple_file(self):
filename = os.path.join(self.work_dir, "test.vcf")
with VCFWriterContextManager(filename) as left_vcf:
left_vcf.write_variant(Variant("1", 1, "A", "T"))
left_vcf.write_variant(Variant("2", 1, "A", "T"))
left_vcf.write_variant(Variant("10", 1, "A", "T"))
expected_variants = [
Variant("1", 1, "A", "T"),
Variant("2", 1, "A", "T"),
Variant("10", 1, "A", "T"),
]
with VCFReaderContextManager(filename) as vcf_reader:
actual_variants = [record.variant for record in vcf_reader.read_records()]
self.assertEqual(expected_variants, actual_variants)
class TestVCFStringParsing(unittest.TestCase):
def test_should_decode_empty_VCF_string(self):
self.assertEqual('', decode_VCF_string('""'))
def test_should_decode_simple_VCF_string(self):
self.assertEqual('foo', decode_VCF_string('"foo"'))
def test_should_decode_VCF_string_with_single_double_quote(self):
self.assertEqual('"', decode_VCF_string('"\\""'))
def test_should_decode_VCF_string_with_single_backslash(self):
self.assertEqual('\\', decode_VCF_string('"\\\\"'))
def test_should_decode_complex_VCF_string(self):
self.assertEqual(
'abc\\def"ghi',
decode_VCF_string('"abc\\\\def\\\"ghi"'))
def test_should_fail_to_decode_unquoted_string(self):
with self.assertRaisesRegex(Exception, 'expected a VCF encoded string: \'foo\''):
print(decode_VCF_string('foo'))
def test_should_fail_to_decode_string_with_stray_backslash(self):
with self.assertRaisesRegex(Exception, re.escape('expected a VCF encoded string: \'"\\\\"\'')):
print(decode_VCF_string('"\\"'))
def test_should_fail_to_decode_string_with_unencoded_double_quote(self):
with self.assertRaisesRegex(Exception, 'expected a VCF encoded string: \'"\""\''):
print(decode_VCF_string('"\""'))
class TestCommaSeparatedPairParser(unittest.TestCase):
def test_should_parse_simple_comma_separated_pairs(self):
parsed = parse_VCF_comma_separated_pair_value('<first=foo,second=bar>')
expected = {'first': 'foo', 'second': 'bar'}
self.assertEqual(expected, parsed)
def test_should_parse_empty_simple_value(self):
parsed = parse_VCF_comma_separated_pair_value('<first=,second=bar>')
expected = {'first': '', 'second': 'bar'}
self.assertEqual(expected, parsed)
def test_should_fail_to_parse_non_bracketed_string(self):
with self.assertRaisesRegex(Exception, 'expected braced key-value pairs: \'first=foo\''):
print(parse_VCF_comma_separated_pair_value('first=foo'))
def test_should_parse_quoted_comma_separated_pairs(self):
parsed = parse_VCF_comma_separated_pair_value(
'<first="foo",second="bar">')
expected = {'first': '"foo"', 'second': '"bar"'}
self.assertEqual(expected, parsed)
def test_should_parse_empty_quoted_value(self):
parsed = parse_VCF_comma_separated_pair_value('<first="">')
expected = {'first': '""'}
self.assertEqual(expected, parsed)
def test_should_parse_values_with_quoted_commas(self):
parsed = parse_VCF_comma_separated_pair_value('<first="foo,bar">')
expected = {'first': '"foo,bar"'}
self.assertEqual(expected, parsed)
def test_should_parse_values_with_quoted_double_quote(self):
parsed = parse_VCF_comma_separated_pair_value('<first="foo\\\"bar">')
expected = {'first': '"foo\\\"bar"'}
self.assertEqual(expected, parsed)
def test_should_fail_with_badly_quoted_double_quote(self):
with self.assertRaisesRegex(Exception, 'failed to parse key-value pairs from \'<first="foo\"bar">\''):
print(parse_VCF_comma_separated_pair_value('<first="foo\"bar">'))
class TestHeaderParsing(unittest.TestCase):
def test_should_parse_well_formatted_version(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
self.assertEqual(expected, header)
def test_should_store_header_as_attribute_of_parser(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
self.assertEqual(header, reader.header)
def test_should_fail_with_unexpected_version(self):
lines = [
'##fileformat=VCFv0.0\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(Exception, 'unexpected version: \'0.0\''):
print(reader.read_header())
def test_should_fail_to_parse_malformed_header_line(self):
lines = [
'##fileformat=VCFv4.2\n',
'##malformed line!\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(Exception, 'failed to parse header line: \'ader.read_header())
def test_should_fail_if_version_is_not_defined(self):
lines = [
'##notFileformat=foo\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(Exception, 'unrecognised file format line: \'er.read_header())
def test_should_parse_well_formatted_file_metadata(self):
lines = [
'##fileformat=VCFv4.2\n',
'##fileDate=2013-07-08\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.file_metadata['fileDate'] = '2013-07-08'
self.assertEqual(expected, header)
def test_should_parse_minimal_info_header_fields(self):
lines = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=key,Number=1,Type=String,Description="description">\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.set_info_data('key', '1', 'String', 'description')
self.assertEqual(expected, header)
def test_should_parse_all_info_header_fields(self):
lines = [
'##fileformat=VCFv4.2\n',
'##INFO=<ID=key,Number=1,Type=String,Description="description",Source="foo",Version="bar">\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.set_info_data(
'key',
'1',
'String',
'description',
'foo',
'bar')
self.assertEqual(expected, header)
def test_should_parse_valid_sample_header_fields(self):
lines = [
'##fileformat=VCFv4.2\n',
'##FORMAT=<ID=key,Number=1,Type=String,Description="description">\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.set_sample_data('key', '1', 'String', 'description')
self.assertEqual(expected, header)
def test_should_parse_valid_filter_header_fields(self):
lines = [
'##fileformat=VCFv4.2\n',
'##FILTER=<ID=key,Description="description">\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.set_filter('key', 'description')
self.assertEqual(expected, header)
def test_should_parse_valid_contig_header_fields(self):
lines = [
'##fileformat=VCFv4.2\n',
'##contig=<ID=key,length=666>\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.set_contig('key', 666)
self.assertEqual(expected, header)
def test_should_parse_required_column_headers(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
self.assertEqual(expected, header)
def test_should_fail_without_required_column_headers(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(
Exception,
re.escape("expected column header line: '#CHROM\\tPOS\\tID\\tREF\\tALT\\tQUAL\\tFILTER'")
):
print(reader.read_header())
def test_should_parse_column_headers_with_format_but_no_samples(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
self.assertEqual(expected, header)
def test_should_parse_column_headers_with_complex_sample_names(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tOWEN_TOBY-RHYS.JONES\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.samples = ['OWEN_TOBY-RHYS.JONES']
self.assertEqual(expected, header)
def test_should_not_parse_column_headers_with_sample_names_containing_white_space(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tOWEN JONES\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(
Exception,
re.escape(
'expected column header line: '
'\'
)
):
print(reader.read_header())
def test_should_fail_with_malformed_format_column_header(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFOO\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(
Exception,
re.escape('expected column header line: \'
):
print(reader.read_header())
def test_should_parse_column_headers_with_samples(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tFOO\tBAR\n',
]
reader = VCFReader(iter(lines))
header = reader.read_header()
expected = Schema()
expected.samples.append('FOO')
expected.samples.append('BAR')
self.assertEqual(expected, header)
def test_should_fail_if_column_header_line_is_missing(self):
lines = [
'##fileformat=VCFv4.2\n',
'the line after the header\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(Exception, 'expected column header line: \'the line after the header\''):
print(reader.read_header())
def test_should_fail_on_unexpected_EOF(self):
lines = [
'##fileformat=VCFv4.2\n',
]
reader = VCFReader(iter(lines))
with self.assertRaisesRegex(Exception, 'unexpected EOF'):
print(reader.read_header())
class TestRecordParsing(unittest.TestCase):
def test_should_parse_single_record(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
'chr0\t0\t.\tP\tQ\t0\tPASS\t\n',
]
reader = VCFReader(iter(lines))
record_count = len(list(reader.read_records()))
self.assertEqual(1, record_count)
def test_should_parse_header_when_parsing_records(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
'chr0\t0\t.\tP\tQ\t0\tPASS\t\n',
]
reader = VCFReader(iter(lines))
self.assertIsNone(reader.header)
list(reader.read_records())
self.assertIsNotNone(reader.header)
def test_should_parse_empty_file(self):
lines = [
'##fileformat=VCFv4.2\n',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n',
]
reader = VCFReader(iter(lines))
record_count = len(list(reader.read_records()))
self.assertEqual(0, record_count)
| true | true |
f72c05338ce0e13464f88b0d16b63a74fd9ad1e2 | 6,580 | py | Python | recognition/alexnet_PD_finetuning.py | ogrenenmakine/VCL-PL-Semi-Supervised-Learning-from-Noisy-Web-Data-with-Variational-Contrastive-Learning | baef25837ce7e073d03f69a095d1992aa18dd2d5 | [
"MIT"
] | null | null | null | recognition/alexnet_PD_finetuning.py | ogrenenmakine/VCL-PL-Semi-Supervised-Learning-from-Noisy-Web-Data-with-Variational-Contrastive-Learning | baef25837ce7e073d03f69a095d1992aa18dd2d5 | [
"MIT"
] | null | null | null | recognition/alexnet_PD_finetuning.py | ogrenenmakine/VCL-PL-Semi-Supervised-Learning-from-Noisy-Web-Data-with-Variational-Contrastive-Learning | baef25837ce7e073d03f69a095d1992aa18dd2d5 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
import math
from torch import nn
from torch.autograd import Variable
import torch
import torch.nn.functional as F
import torchvision
import torch.utils.data as data
import torchvision.transforms as transforms
import torchvision.utils as vutils
import numpy as np
from PIL import Image
import os
import matplotlib.pyplot as plt
import time
from torchsummary import summary
import config
from facenet_pytorch import training
from torch.utils.data import DataLoader, SubsetRandomSampler
from torch import optim
from torch.optim.lr_scheduler import MultiStepLR
from torch.utils.tensorboard import SummaryWriter
from torchvision import datasets, transforms
from PIL import Image
import glob
from utils.collate import collate_custom
import torchvision.models as models
from util import AverageMeter, learning_rate_decay, Logger
import collections
# In[ ]:
transform = transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.RandomApply([
transforms.ColorJitter(brightness=0.4, contrast=0.4, saturation=0.4, hue=0.1)
], p=0.8),
transforms.RandomGrayscale(0.2),
transforms.ToTensor(),
transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
])
# Root directory for dataset
data_root = "/home/mehmetyavuz/datasets/CelebA128/"
attr_root = "/home/mehmetyavuz/datasets/list_attr_celeba.txt"
# Number of workers for dataloader
workers = 8
# Batch size during training
batch_size = 64
# Spatial size of training images. All images will be resized to this
# size using a transformer.
image_size = (128,128)
epochs = 100
# In[ ]:
class CelebA(data.Dataset):
def __init__(self, data_path, attr_path, image_size, mode, selected_attrs):
super(CelebA, self).__init__()
self.data_path = data_path
att_list = open(attr_path, 'r', encoding='utf-8').readlines()[1].split()
atts = [att_list.index(att) + 1 for att in selected_attrs]
images = np.loadtxt(attr_path, skiprows=2, usecols=[0], dtype=np.str)
labels = np.loadtxt(attr_path, skiprows=2, usecols=atts, dtype=np.int)
self.tf = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
])
self.tf_a = transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.RandomApply([
transforms.ColorJitter(hue=.05, saturation=.05),
], p=0.8),
transforms.RandomGrayscale(0.2),
])
if mode == 'train':
self.images = images[:1627]
self.labels = labels[:1627]
if mode == 'valid':
self.images = images[162770:182637]
self.labels = labels[162770:182637]
if mode == 'test':
self.images = images[182637:]
self.labels = labels[182637:]
self.length = len(self.images)
def __getitem__(self, index):
if index < 16277:
img = self.tf(self.tf_a(Image.open(os.path.join(self.data_path, self.images[index]))))
else:
img = self.tf(Image.open(os.path.join(self.data_path, self.images[index])))
att = torch.tensor((self.labels[index] + 1) // 2)
return img, att.to(torch.float32)
def __len__(self):
return self.length
# In[ ]:
attrs_default = ["5_o_Clock_Shadow", "Arched_Eyebrows", "Attractive", "Bags_Under_Eyes", "Bald", "Bangs", "Big_Lips", "Big_Nose", "Black_Hair", "Blond_Hair", "Blurry", "Brown_Hair", "Bushy_Eyebrows", "Chubby", "Double_Chin", "Eyeglasses", "Goatee", "Gray_Hair", "Heavy_Makeup", "High_Cheekbones", "Male", "Mouth_Slightly_Open", "Mustache", "Narrow_Eyes", "No_Beard", "Oval_Face", "Pale_Skin", "Pointy_Nose", "Receding_Hairline", "Rosy_Cheeks", "Sideburns", "Smiling", "Straight_Hair", "Wavy_Hair", "Wearing_Earrings", "Wearing_Hat", "Wearing_Lipstick", "Wearing_Necklace", "Wearing_Necktie", "Young"]
# In[ ]:
dataset = CelebA(data_root, attr_root, image_size, 'train', attrs_default)
train_loader = torch.utils.data.DataLoader(dataset, num_workers=workers,
batch_size=batch_size, pin_memory=True, collate_fn=collate_custom,
drop_last=True, shuffle=True)
dataset = CelebA(data_root, attr_root, image_size, 'valid', attrs_default)
val_loader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=False,
num_workers=workers)
dataset = CelebA(data_root, attr_root, image_size, 'test', attrs_default)
test_loader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=False,
num_workers=workers)
# In[ ]:
# Decide which device we want to run on
device = torch.device("cuda:0")
# In[ ]:
resnet = models.__dict__['alexnet'](pretrained=True)
resnet.classifier[6] = nn.Linear(4096,40,bias=True)
resnet = torch.nn.DataParallel(resnet)
resnet.cuda()
resnet.load_state_dict(torch.load('alexnet_pseudolabeling_001_0_normal.pth'))
# In[ ]:
optimizer = optim.Adam(resnet.parameters(), lr=0.00001)
scheduler = None
# In[ ]:
loss_fn = torch.nn.BCEWithLogitsLoss()
metrics = {
'acc': training.accuracy_ml
}
# In[ ]:
print('\n\nInitial')
print('-' * 10)
val_loss = 1
for epoch in range(epochs):
print('\nEpoch {}/{}'.format(epoch + 1, epochs))
print('-' * 10)
resnet.train()
training.pass_epoch(
resnet, loss_fn, train_loader, optimizer, scheduler,
batch_metrics=metrics, show_running=True, device=device,
#writer=writer
)
#if epoch + 1 >= 30:
resnet.eval()
val_metrics = training.pass_epoch(
resnet, loss_fn, val_loader,
batch_metrics=metrics, show_running=True, device=device,
#writer=writer
)
if val_metrics[0].item() < val_loss:
val_loss = val_metrics[0].item()
print('Test set Accuracy Lowest Validation Loss:')
training.pass_epoch(
resnet, loss_fn, test_loader,
batch_metrics=metrics, show_running=True, device=device,
#writer=writer
)
torch.save(resnet.state_dict(), "alexnet_PD_001_0_normal.pth")
#writer.close()
# In[ ]:
# In[ ]:
# In[ ]:
| 29.375 | 600 | 0.632827 |
import math
from torch import nn
from torch.autograd import Variable
import torch
import torch.nn.functional as F
import torchvision
import torch.utils.data as data
import torchvision.transforms as transforms
import torchvision.utils as vutils
import numpy as np
from PIL import Image
import os
import matplotlib.pyplot as plt
import time
from torchsummary import summary
import config
from facenet_pytorch import training
from torch.utils.data import DataLoader, SubsetRandomSampler
from torch import optim
from torch.optim.lr_scheduler import MultiStepLR
from torch.utils.tensorboard import SummaryWriter
from torchvision import datasets, transforms
from PIL import Image
import glob
from utils.collate import collate_custom
import torchvision.models as models
from util import AverageMeter, learning_rate_decay, Logger
import collections
transform = transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.RandomApply([
transforms.ColorJitter(brightness=0.4, contrast=0.4, saturation=0.4, hue=0.1)
], p=0.8),
transforms.RandomGrayscale(0.2),
transforms.ToTensor(),
transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
])
data_root = "/home/mehmetyavuz/datasets/CelebA128/"
attr_root = "/home/mehmetyavuz/datasets/list_attr_celeba.txt"
workers = 8
batch_size = 64
image_size = (128,128)
epochs = 100
class CelebA(data.Dataset):
def __init__(self, data_path, attr_path, image_size, mode, selected_attrs):
super(CelebA, self).__init__()
self.data_path = data_path
att_list = open(attr_path, 'r', encoding='utf-8').readlines()[1].split()
atts = [att_list.index(att) + 1 for att in selected_attrs]
images = np.loadtxt(attr_path, skiprows=2, usecols=[0], dtype=np.str)
labels = np.loadtxt(attr_path, skiprows=2, usecols=atts, dtype=np.int)
self.tf = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
])
self.tf_a = transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.RandomApply([
transforms.ColorJitter(hue=.05, saturation=.05),
], p=0.8),
transforms.RandomGrayscale(0.2),
])
if mode == 'train':
self.images = images[:1627]
self.labels = labels[:1627]
if mode == 'valid':
self.images = images[162770:182637]
self.labels = labels[162770:182637]
if mode == 'test':
self.images = images[182637:]
self.labels = labels[182637:]
self.length = len(self.images)
def __getitem__(self, index):
if index < 16277:
img = self.tf(self.tf_a(Image.open(os.path.join(self.data_path, self.images[index]))))
else:
img = self.tf(Image.open(os.path.join(self.data_path, self.images[index])))
att = torch.tensor((self.labels[index] + 1) // 2)
return img, att.to(torch.float32)
def __len__(self):
return self.length
attrs_default = ["5_o_Clock_Shadow", "Arched_Eyebrows", "Attractive", "Bags_Under_Eyes", "Bald", "Bangs", "Big_Lips", "Big_Nose", "Black_Hair", "Blond_Hair", "Blurry", "Brown_Hair", "Bushy_Eyebrows", "Chubby", "Double_Chin", "Eyeglasses", "Goatee", "Gray_Hair", "Heavy_Makeup", "High_Cheekbones", "Male", "Mouth_Slightly_Open", "Mustache", "Narrow_Eyes", "No_Beard", "Oval_Face", "Pale_Skin", "Pointy_Nose", "Receding_Hairline", "Rosy_Cheeks", "Sideburns", "Smiling", "Straight_Hair", "Wavy_Hair", "Wearing_Earrings", "Wearing_Hat", "Wearing_Lipstick", "Wearing_Necklace", "Wearing_Necktie", "Young"]
dataset = CelebA(data_root, attr_root, image_size, 'train', attrs_default)
train_loader = torch.utils.data.DataLoader(dataset, num_workers=workers,
batch_size=batch_size, pin_memory=True, collate_fn=collate_custom,
drop_last=True, shuffle=True)
dataset = CelebA(data_root, attr_root, image_size, 'valid', attrs_default)
val_loader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=False,
num_workers=workers)
dataset = CelebA(data_root, attr_root, image_size, 'test', attrs_default)
test_loader = torch.utils.data.DataLoader(dataset,
batch_size=batch_size,
shuffle=False,
num_workers=workers)
device = torch.device("cuda:0")
resnet = models.__dict__['alexnet'](pretrained=True)
resnet.classifier[6] = nn.Linear(4096,40,bias=True)
resnet = torch.nn.DataParallel(resnet)
resnet.cuda()
resnet.load_state_dict(torch.load('alexnet_pseudolabeling_001_0_normal.pth'))
optimizer = optim.Adam(resnet.parameters(), lr=0.00001)
scheduler = None
loss_fn = torch.nn.BCEWithLogitsLoss()
metrics = {
'acc': training.accuracy_ml
}
print('\n\nInitial')
print('-' * 10)
val_loss = 1
for epoch in range(epochs):
print('\nEpoch {}/{}'.format(epoch + 1, epochs))
print('-' * 10)
resnet.train()
training.pass_epoch(
resnet, loss_fn, train_loader, optimizer, scheduler,
batch_metrics=metrics, show_running=True, device=device,
)
resnet.eval()
val_metrics = training.pass_epoch(
resnet, loss_fn, val_loader,
batch_metrics=metrics, show_running=True, device=device,
)
if val_metrics[0].item() < val_loss:
val_loss = val_metrics[0].item()
print('Test set Accuracy Lowest Validation Loss:')
training.pass_epoch(
resnet, loss_fn, test_loader,
batch_metrics=metrics, show_running=True, device=device,
)
torch.save(resnet.state_dict(), "alexnet_PD_001_0_normal.pth")
| true | true |
f72c055d3838a4636e2e7b02c3f27e7b13404fdf | 6,398 | py | Python | skyscrapers/skyscrapers.py | Adeon18/skyscrapers.py | 8dbd6e9d648a56f8dbab7de50ef6c606e4aed18e | [
"MIT"
] | null | null | null | skyscrapers/skyscrapers.py | Adeon18/skyscrapers.py | 8dbd6e9d648a56f8dbab7de50ef6c606e4aed18e | [
"MIT"
] | null | null | null | skyscrapers/skyscrapers.py | Adeon18/skyscrapers.py | 8dbd6e9d648a56f8dbab7de50ef6c606e4aed18e | [
"MIT"
] | null | null | null | """
https://github.com/Adeon18/skyscrapers
"""
def read_input(path: str) -> list:
"""
Read game board file from path.
Return list of str.
"""
with open(path, "r") as file:
output_lst = file.read().split("\n")
output_lst = output_lst[:-1]
return output_lst
def left_to_right_check(input_line: str, pivot: int) -> bool:
"""
Check row-wise visibility from left to right.
Return True if number of building from the left-most
hint is visible looking to the right,
False otherwise.
input_line - representing board row.
pivot - number on the left-most hint of the input_line.
>>> left_to_right_check("412453*", 4)
True
>>> left_to_right_check("452453*", 5)
False
>>> left_to_right_check("512345*", 5)
True
>>> left_to_right_check("4124531", 4)
True
"""
row = input_line
max_num = 0
count = 0
for _, num in enumerate(row[1:-1]):
# If the row is *, we move on to the next
if num == "*":
continue
# Check if the current building is the one we need
if int(num) > max_num:
max_num = int(num)
count += 1
if count == pivot:
return True
return False
def check_not_finished_board(board: list) -> bool:
"""
Check if skyscraper board is not finished, i.e.,
'?' present on the game board.
Return True if finished, False otherwise.
>>> check_not_finished_board(['***21**', '4?????*',\
'4?????*', '*?????5', '*?????*', '*?????*', '*2*1***'])
False
>>> check_not_finished_board(['***21**', '412453*',\
'423145*', '*543215', '*35214*', '*41532*', '*2*1***'])
True
>>> check_not_finished_board(['***21**', '412453*',\
'423145*', '*5?3215', '*35214*', '*41532*', '*2*1***'])
False
"""
for row in board:
if "?" in row:
return False
return True
def check_uniqueness_in_rows(board: list) -> bool:
"""
Check buildings of unique height in each row.
Return True if buildings in a row have unique length,
False otherwise.
>>> check_uniqueness_in_rows(['***21**', '412453*', '423145*',\
'*543215', '*35214*', '*41532*', '*2*1***'])
True
>>> check_uniqueness_in_rows(['***21**', '452453*', '423145*',\
'*543215', '*35214*', '*41532*', '*2*1***'])
False
>>> check_uniqueness_in_rows(['***21**', '412453*', '423145*',\
'*553215', '*35214*', '*41532*', '*2*1***'])
False
"""
# We chop each row
for row in board[1:-1]:
elements_int = []
for elem in row[1:-1]:
# If element can't be converted to int, it is skipped
try:
if int(elem) in elements_int:
return False
else:
elements_int.append(int(elem))
except:
continue
return True
def check_horizontal_visibility(board: list) -> bool:
"""
Check row-wise visibility (left-right and vice versa)
Return True if all horizontal hints are satisfiable,
i.e., for line 412453* , hint is 4, and 1245 are the four buildings
that could be observed from the hint looking to the right.
>>> check_horizontal_visibility(['***21**', '412453*', '423145*',\
'*543215', '*35214*', '*41532*', '*2*1***'])
True
>>> check_horizontal_visibility(['***21**', '452453*', '423145*',\
'*543215', '*35214*', '*41532*', '*2*1***'])
False
>>> check_horizontal_visibility(['***21**', '452413*', '423145*',\
'*543215', '*35214*', '*41532*', '*2*1***'])
False
"""
# Our right hint(default=*)
right_req = "*"
for row in board[1:-1]:
# We keep track of the max element and seen buildings
right_flag = 0
max_elem_right = 0
# We skip if there's no hint
if row[0] == "*":
continue
else:
right_req = int(row[0])
for elem in row[1:-1]:
# Check if the following element is bigger
if int(elem) > max_elem_right:
max_elem_right = int(elem)
right_flag += 1
# If the hints aren't met, we return False
if right_flag != right_req:
return False
# Same code, another direction, rewritten for better readability
left_req = "*"
for row in board[1:-1]:
left_flag = 0
max_elem_left = 0
if row[-1] == "*":
continue
else:
left_req = int(row[-1])
for elem in row[1:-1][::-1]:
if int(elem) > max_elem_left:
max_elem_left = int(elem)
left_flag += 1
# print('left ', right_flag, right_req)
if left_flag != left_req:
return False
return True
def check_columns(board: list) -> bool:
"""
Check column-wise compliance of the board for
uniqueness (buildings of unique height)
and visibility (top-bottom and vice versa).
Same as for horizontal cases, but aggregated in one
function for vertical case, i.e. columns.
>>> check_columns(['***21**', '412453*', '423145*',\
'*543215', '*35214*', '*41532*', '*2*1***'])
True
>>> check_columns(['***21**', '412453*', '423145*',\
'*543215', '*35214*', '*41232*', '*2*1***'])
False
>>> check_columns(['***21**', '412553*', '423145*',\
'*543215', '*35214*', '*41532*', '*2*1***'])
False
"""
new_lst = []
# Flip and check horisontally
for i, row in enumerate(board):
new_elem = ""
for j, _ in enumerate(row):
new_elem += board[j][i]
new_lst.append(new_elem)
if check_uniqueness_in_rows(new_lst) and check_not_finished_board(new_lst):
return check_horizontal_visibility(new_lst)
return False
def check_skyscrapers(input_path: str) -> bool:
"""
Main function to check the status of skyscraper game board.
Return True if the board status is compliant with the rules,
False otherwise.
"""
board = read_input(input_path)
# If everything is met return True
if (
check_horizontal_visibility(board)
and check_columns(board)
and check_uniqueness_in_rows(board)
and check_not_finished_board(board)
):
return True
return False
if __name__ == "__main__":
import doctest
print(doctest.testmod())
| 29.081818 | 79 | 0.557205 |
def read_input(path: str) -> list:
with open(path, "r") as file:
output_lst = file.read().split("\n")
output_lst = output_lst[:-1]
return output_lst
def left_to_right_check(input_line: str, pivot: int) -> bool:
row = input_line
max_num = 0
count = 0
for _, num in enumerate(row[1:-1]):
if num == "*":
continue
if int(num) > max_num:
max_num = int(num)
count += 1
if count == pivot:
return True
return False
def check_not_finished_board(board: list) -> bool:
for row in board:
if "?" in row:
return False
return True
def check_uniqueness_in_rows(board: list) -> bool:
for row in board[1:-1]:
elements_int = []
for elem in row[1:-1]:
try:
if int(elem) in elements_int:
return False
else:
elements_int.append(int(elem))
except:
continue
return True
def check_horizontal_visibility(board: list) -> bool:
# Our right hint(default=*)
right_req = "*"
for row in board[1:-1]:
# We keep track of the max element and seen buildings
right_flag = 0
max_elem_right = 0
# We skip if there's no hint
if row[0] == "*":
continue
else:
right_req = int(row[0])
for elem in row[1:-1]:
if int(elem) > max_elem_right:
max_elem_right = int(elem)
right_flag += 1
if right_flag != right_req:
return False
# Same code, another direction, rewritten for better readability
left_req = "*"
for row in board[1:-1]:
left_flag = 0
max_elem_left = 0
if row[-1] == "*":
continue
else:
left_req = int(row[-1])
for elem in row[1:-1][::-1]:
if int(elem) > max_elem_left:
max_elem_left = int(elem)
left_flag += 1
# print('left ', right_flag, right_req)
if left_flag != left_req:
return False
return True
def check_columns(board: list) -> bool:
new_lst = []
# Flip and check horisontally
for i, row in enumerate(board):
new_elem = ""
for j, _ in enumerate(row):
new_elem += board[j][i]
new_lst.append(new_elem)
if check_uniqueness_in_rows(new_lst) and check_not_finished_board(new_lst):
return check_horizontal_visibility(new_lst)
return False
def check_skyscrapers(input_path: str) -> bool:
board = read_input(input_path)
# If everything is met return True
if (
check_horizontal_visibility(board)
and check_columns(board)
and check_uniqueness_in_rows(board)
and check_not_finished_board(board)
):
return True
return False
if __name__ == "__main__":
import doctest
print(doctest.testmod())
| true | true |
f72c07bdf61ae06bff3821b388dc1226bbabef19 | 5,846 | py | Python | src/sage/categories/schemes.py | bopopescu/sagesmc | e8d1d31f6f598dba2d763baa2d2e804338f9e89e | [
"BSL-1.0"
] | 5 | 2015-01-04T07:15:06.000Z | 2022-03-04T15:15:18.000Z | src/sage/categories/schemes.py | bopopescu/sagesmc | e8d1d31f6f598dba2d763baa2d2e804338f9e89e | [
"BSL-1.0"
] | null | null | null | src/sage/categories/schemes.py | bopopescu/sagesmc | e8d1d31f6f598dba2d763baa2d2e804338f9e89e | [
"BSL-1.0"
] | 10 | 2016-09-28T13:12:40.000Z | 2022-02-12T09:28:34.000Z | r"""
Schemes
"""
#*****************************************************************************
# Copyright (C) 2005 David Kohel <kohel@maths.usyd.edu>
# William Stein <wstein@math.ucsd.edu>
# 2008-2009 Nicolas M. Thiery <nthiery at users.sf.net>
#
# Distributed under the terms of the GNU General Public License (GPL)
# http://www.gnu.org/licenses/
#******************************************************************************
from sage.categories.category import Category, HomCategory
from sage.categories.category_types import Category_over_base
from sage.misc.cachefunc import cached_method
from sets_cat import Sets
def Schemes(X=None):
"""
Construct a category of schemes.
EXAMPLES::
sage: Schemes()
Category of Schemes
sage: Schemes(Spec(ZZ))
Category of schemes over Integer Ring
sage: Schemes(ZZ)
Category of schemes over Integer Ring
"""
if X is None:
return Schemes_abstract()
from sage.schemes.generic.scheme import is_Scheme
if not is_Scheme(X):
X = Schemes()(X)
return Schemes_over_base(X)
# TODO: rename into AbstractSchemes ???
class Schemes_abstract(Category):
"""
The category of all abstract schemes.
EXAMPLES::
sage: Schemes()
Category of Schemes
"""
def __init__(self):
"""
TESTS::
sage: C = Schemes()
sage: C
Category of Schemes
sage: TestSuite(C).run()
"""
Category.__init__(self, "Schemes")
def super_categories(self):
"""
EXAMPLES::
sage: Schemes().super_categories()
[Category of sets]
"""
return [Sets()]
def _call_(self, x):
"""
Construct a scheme from the data in ``x``
EXAMPLES:
Let us first construct the category of schemes::
sage: S = Schemes(); S
Category of Schemes
We create a scheme from a ring::
sage: X = S(ZZ); X # indirect doctest
Spectrum of Integer Ring
We create a scheme from a scheme (do nothing)::
sage: S(X)
Spectrum of Integer Ring
We create a scheme morphism from a ring homomorphism.x::
sage: phi = ZZ.hom(QQ); phi
Ring Coercion morphism:
From: Integer Ring
To: Rational Field
sage: f = S(phi); f # indirect doctest
Affine Scheme morphism:
From: Spectrum of Rational Field
To: Spectrum of Integer Ring
Defn: Ring Coercion morphism:
From: Integer Ring
To: Rational Field
sage: f.domain()
Spectrum of Rational Field
sage: f.codomain()
Spectrum of Integer Ring
sage: S(f) # indirect doctest
Affine Scheme morphism:
From: Spectrum of Rational Field
To: Spectrum of Integer Ring
Defn: Ring Coercion morphism:
From: Integer Ring
To: Rational Field
"""
from sage.schemes.generic.scheme import is_Scheme
if is_Scheme(x):
return x
from sage.schemes.generic.morphism import is_SchemeMorphism
if is_SchemeMorphism(x):
return x
from sage.rings.morphism import is_RingHomomorphism
from sage.rings.commutative_ring import is_CommutativeRing
from sage.schemes.generic.spec import Spec
if is_CommutativeRing(x):
return Spec(x)
elif is_RingHomomorphism(x):
A = Spec(x.codomain())
return A.hom(x)
else:
raise TypeError, "No way to create an object or morphism in %s from %s"%(self, x)
class HomCategory(HomCategory):
def extra_super_categories(self):
"""
EXAMPLES::
sage: Schemes().hom_category().extra_super_categories()
[]
sage: Schemes().hom_category().super_categories()
[Category of hom sets in Category of sets]
FIXME: what category structure is there on Homsets of schemes?
The result above is wrong, and should be fixed during the next
homsets overhaul.
"""
return []
#############################################################
# Schemes over a given base scheme.
#############################################################
class Schemes_over_base(Category_over_base):
"""
The category of schemes over a given base scheme.
EXAMPLES::
sage: Schemes(Spec(ZZ))
Category of schemes over Integer Ring
TESTS::
sage: C = Schemes(ZZ)
sage: TestSuite(C).run()
"""
def base_scheme(self):
"""
EXAMPLES::
sage: Schemes(Spec(ZZ)).base_scheme()
Spectrum of Integer Ring
"""
return self.base()
def super_categories(self):
"""
EXAMPLES::
sage: Schemes(Spec(ZZ)).super_categories()
[Category of Schemes]
"""
return [Schemes_abstract()]
def _repr_object_names(self):
"""
EXAMPLES::
sage: Schemes(Spec(ZZ)) # indirect doctest
Category of schemes over Integer Ring
"""
# To work around the name of the class (schemes_over_base)
from sage.schemes.generic.spec import is_Spec
if is_Spec(self.base_scheme()):
return "schemes over %s" % self.base_scheme().coordinate_ring()
else:
return "schemes over %s" % self.base_scheme()
| 28.79803 | 93 | 0.529422 | r"""
Schemes
"""
from sage.categories.category import Category, HomCategory
from sage.categories.category_types import Category_over_base
from sage.misc.cachefunc import cached_method
from sets_cat import Sets
def Schemes(X=None):
"""
Construct a category of schemes.
EXAMPLES::
sage: Schemes()
Category of Schemes
sage: Schemes(Spec(ZZ))
Category of schemes over Integer Ring
sage: Schemes(ZZ)
Category of schemes over Integer Ring
"""
if X is None:
return Schemes_abstract()
from sage.schemes.generic.scheme import is_Scheme
if not is_Scheme(X):
X = Schemes()(X)
return Schemes_over_base(X)
class Schemes_abstract(Category):
"""
The category of all abstract schemes.
EXAMPLES::
sage: Schemes()
Category of Schemes
"""
def __init__(self):
"""
TESTS::
sage: C = Schemes()
sage: C
Category of Schemes
sage: TestSuite(C).run()
"""
Category.__init__(self, "Schemes")
def super_categories(self):
"""
EXAMPLES::
sage: Schemes().super_categories()
[Category of sets]
"""
return [Sets()]
def _call_(self, x):
"""
Construct a scheme from the data in ``x``
EXAMPLES:
Let us first construct the category of schemes::
sage: S = Schemes(); S
Category of Schemes
We create a scheme from a ring::
sage: X = S(ZZ); X # indirect doctest
Spectrum of Integer Ring
We create a scheme from a scheme (do nothing)::
sage: S(X)
Spectrum of Integer Ring
We create a scheme morphism from a ring homomorphism.x::
sage: phi = ZZ.hom(QQ); phi
Ring Coercion morphism:
From: Integer Ring
To: Rational Field
sage: f = S(phi); f # indirect doctest
Affine Scheme morphism:
From: Spectrum of Rational Field
To: Spectrum of Integer Ring
Defn: Ring Coercion morphism:
From: Integer Ring
To: Rational Field
sage: f.domain()
Spectrum of Rational Field
sage: f.codomain()
Spectrum of Integer Ring
sage: S(f) # indirect doctest
Affine Scheme morphism:
From: Spectrum of Rational Field
To: Spectrum of Integer Ring
Defn: Ring Coercion morphism:
From: Integer Ring
To: Rational Field
"""
from sage.schemes.generic.scheme import is_Scheme
if is_Scheme(x):
return x
from sage.schemes.generic.morphism import is_SchemeMorphism
if is_SchemeMorphism(x):
return x
from sage.rings.morphism import is_RingHomomorphism
from sage.rings.commutative_ring import is_CommutativeRing
from sage.schemes.generic.spec import Spec
if is_CommutativeRing(x):
return Spec(x)
elif is_RingHomomorphism(x):
A = Spec(x.codomain())
return A.hom(x)
else:
raise TypeError, "No way to create an object or morphism in %s from %s"%(self, x)
class HomCategory(HomCategory):
def extra_super_categories(self):
"""
EXAMPLES::
sage: Schemes().hom_category().extra_super_categories()
[]
sage: Schemes().hom_category().super_categories()
[Category of hom sets in Category of sets]
FIXME: what category structure is there on Homsets of schemes?
The result above is wrong, and should be fixed during the next
homsets overhaul.
"""
return []
| false | true |
f72c08bf0eb0d7eb846f18ba055eede40647f94e | 30,345 | py | Python | sdk/python/pulumi_google_native/apigee/v1/organization.py | AaronFriel/pulumi-google-native | 75d1cda425e33d4610348972cd70bddf35f1770d | [
"Apache-2.0"
] | 44 | 2021-04-18T23:00:48.000Z | 2022-02-14T17:43:15.000Z | sdk/python/pulumi_google_native/apigee/v1/organization.py | AaronFriel/pulumi-google-native | 75d1cda425e33d4610348972cd70bddf35f1770d | [
"Apache-2.0"
] | 354 | 2021-04-16T16:48:39.000Z | 2022-03-31T17:16:39.000Z | sdk/python/pulumi_google_native/apigee/v1/organization.py | AaronFriel/pulumi-google-native | 75d1cda425e33d4610348972cd70bddf35f1770d | [
"Apache-2.0"
] | 8 | 2021-04-24T17:46:51.000Z | 2022-01-05T10:40:21.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['OrganizationArgs', 'Organization']
@pulumi.input_type
class OrganizationArgs:
def __init__(__self__, *,
analytics_region: pulumi.Input[str],
parent: pulumi.Input[str],
runtime_type: pulumi.Input['OrganizationRuntimeType'],
addons_config: Optional[pulumi.Input['GoogleCloudApigeeV1AddonsConfigArgs']] = None,
attributes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
authorized_network: Optional[pulumi.Input[str]] = None,
billing_type: Optional[pulumi.Input['OrganizationBillingType']] = None,
customer_name: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
portal_disabled: Optional[pulumi.Input[bool]] = None,
properties: Optional[pulumi.Input['GoogleCloudApigeeV1PropertiesArgs']] = None,
runtime_database_encryption_key_name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input['OrganizationType']] = None):
"""
The set of arguments for constructing a Organization resource.
:param pulumi.Input[str] analytics_region: Primary GCP region for analytics data storage. For valid values, see [Create an Apigee organization](https://cloud.google.com/apigee/docs/api-platform/get-started/create-org).
:param pulumi.Input['OrganizationRuntimeType'] runtime_type: Runtime type of the Apigee organization based on the Apigee subscription purchased.
:param pulumi.Input['GoogleCloudApigeeV1AddonsConfigArgs'] addons_config: Addon configurations of the Apigee organization.
:param pulumi.Input[Sequence[pulumi.Input[str]]] attributes: Not used by Apigee.
:param pulumi.Input[str] authorized_network: Compute Engine network used for Service Networking to be peered with Apigee runtime instances. See [Getting started with the Service Networking API](https://cloud.google.com/service-infrastructure/docs/service-networking/getting-started). Valid only when [RuntimeType](#RuntimeType) is set to `CLOUD`. The value must be set before the creation of a runtime instance and can be updated only when there are no runtime instances. For example: `default`. Apigee also supports shared VPC (that is, the host network project is not the same as the one that is peering with Apigee). See [Shared VPC overview](https://cloud.google.com/vpc/docs/shared-vpc). To use a shared VPC network, use the following format: `projects/{host-project-id}/{region}/networks/{network-name}`. For example: `projects/my-sharedvpc-host/global/networks/mynetwork` **Note:** Not supported for Apigee hybrid.
:param pulumi.Input['OrganizationBillingType'] billing_type: Billing type of the Apigee organization. See [Apigee pricing](https://cloud.google.com/apigee/pricing).
:param pulumi.Input[str] customer_name: Not used by Apigee.
:param pulumi.Input[str] description: Description of the Apigee organization.
:param pulumi.Input[str] display_name: Display name for the Apigee organization. Unused, but reserved for future use.
:param pulumi.Input[bool] portal_disabled: Configuration for the Portals settings.
:param pulumi.Input['GoogleCloudApigeeV1PropertiesArgs'] properties: Properties defined in the Apigee organization profile.
:param pulumi.Input[str] runtime_database_encryption_key_name: Cloud KMS key name used for encrypting the data that is stored and replicated across runtime instances. Update is not allowed after the organization is created. Required when [RuntimeType](#RuntimeType) is `CLOUD`. If not specified when [RuntimeType](#RuntimeType) is `TRIAL`, a Google-Managed encryption key will be used. For example: "projects/foo/locations/us/keyRings/bar/cryptoKeys/baz". **Note:** Not supported for Apigee hybrid.
:param pulumi.Input['OrganizationType'] type: Not used by Apigee.
"""
pulumi.set(__self__, "analytics_region", analytics_region)
pulumi.set(__self__, "parent", parent)
pulumi.set(__self__, "runtime_type", runtime_type)
if addons_config is not None:
pulumi.set(__self__, "addons_config", addons_config)
if attributes is not None:
pulumi.set(__self__, "attributes", attributes)
if authorized_network is not None:
pulumi.set(__self__, "authorized_network", authorized_network)
if billing_type is not None:
pulumi.set(__self__, "billing_type", billing_type)
if customer_name is not None:
pulumi.set(__self__, "customer_name", customer_name)
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if portal_disabled is not None:
pulumi.set(__self__, "portal_disabled", portal_disabled)
if properties is not None:
pulumi.set(__self__, "properties", properties)
if runtime_database_encryption_key_name is not None:
pulumi.set(__self__, "runtime_database_encryption_key_name", runtime_database_encryption_key_name)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="analyticsRegion")
def analytics_region(self) -> pulumi.Input[str]:
"""
Primary GCP region for analytics data storage. For valid values, see [Create an Apigee organization](https://cloud.google.com/apigee/docs/api-platform/get-started/create-org).
"""
return pulumi.get(self, "analytics_region")
@analytics_region.setter
def analytics_region(self, value: pulumi.Input[str]):
pulumi.set(self, "analytics_region", value)
@property
@pulumi.getter
def parent(self) -> pulumi.Input[str]:
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: pulumi.Input[str]):
pulumi.set(self, "parent", value)
@property
@pulumi.getter(name="runtimeType")
def runtime_type(self) -> pulumi.Input['OrganizationRuntimeType']:
"""
Runtime type of the Apigee organization based on the Apigee subscription purchased.
"""
return pulumi.get(self, "runtime_type")
@runtime_type.setter
def runtime_type(self, value: pulumi.Input['OrganizationRuntimeType']):
pulumi.set(self, "runtime_type", value)
@property
@pulumi.getter(name="addonsConfig")
def addons_config(self) -> Optional[pulumi.Input['GoogleCloudApigeeV1AddonsConfigArgs']]:
"""
Addon configurations of the Apigee organization.
"""
return pulumi.get(self, "addons_config")
@addons_config.setter
def addons_config(self, value: Optional[pulumi.Input['GoogleCloudApigeeV1AddonsConfigArgs']]):
pulumi.set(self, "addons_config", value)
@property
@pulumi.getter
def attributes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Not used by Apigee.
"""
return pulumi.get(self, "attributes")
@attributes.setter
def attributes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "attributes", value)
@property
@pulumi.getter(name="authorizedNetwork")
def authorized_network(self) -> Optional[pulumi.Input[str]]:
"""
Compute Engine network used for Service Networking to be peered with Apigee runtime instances. See [Getting started with the Service Networking API](https://cloud.google.com/service-infrastructure/docs/service-networking/getting-started). Valid only when [RuntimeType](#RuntimeType) is set to `CLOUD`. The value must be set before the creation of a runtime instance and can be updated only when there are no runtime instances. For example: `default`. Apigee also supports shared VPC (that is, the host network project is not the same as the one that is peering with Apigee). See [Shared VPC overview](https://cloud.google.com/vpc/docs/shared-vpc). To use a shared VPC network, use the following format: `projects/{host-project-id}/{region}/networks/{network-name}`. For example: `projects/my-sharedvpc-host/global/networks/mynetwork` **Note:** Not supported for Apigee hybrid.
"""
return pulumi.get(self, "authorized_network")
@authorized_network.setter
def authorized_network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "authorized_network", value)
@property
@pulumi.getter(name="billingType")
def billing_type(self) -> Optional[pulumi.Input['OrganizationBillingType']]:
"""
Billing type of the Apigee organization. See [Apigee pricing](https://cloud.google.com/apigee/pricing).
"""
return pulumi.get(self, "billing_type")
@billing_type.setter
def billing_type(self, value: Optional[pulumi.Input['OrganizationBillingType']]):
pulumi.set(self, "billing_type", value)
@property
@pulumi.getter(name="customerName")
def customer_name(self) -> Optional[pulumi.Input[str]]:
"""
Not used by Apigee.
"""
return pulumi.get(self, "customer_name")
@customer_name.setter
def customer_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "customer_name", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description of the Apigee organization.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
Display name for the Apigee organization. Unused, but reserved for future use.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="portalDisabled")
def portal_disabled(self) -> Optional[pulumi.Input[bool]]:
"""
Configuration for the Portals settings.
"""
return pulumi.get(self, "portal_disabled")
@portal_disabled.setter
def portal_disabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "portal_disabled", value)
@property
@pulumi.getter
def properties(self) -> Optional[pulumi.Input['GoogleCloudApigeeV1PropertiesArgs']]:
"""
Properties defined in the Apigee organization profile.
"""
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: Optional[pulumi.Input['GoogleCloudApigeeV1PropertiesArgs']]):
pulumi.set(self, "properties", value)
@property
@pulumi.getter(name="runtimeDatabaseEncryptionKeyName")
def runtime_database_encryption_key_name(self) -> Optional[pulumi.Input[str]]:
"""
Cloud KMS key name used for encrypting the data that is stored and replicated across runtime instances. Update is not allowed after the organization is created. Required when [RuntimeType](#RuntimeType) is `CLOUD`. If not specified when [RuntimeType](#RuntimeType) is `TRIAL`, a Google-Managed encryption key will be used. For example: "projects/foo/locations/us/keyRings/bar/cryptoKeys/baz". **Note:** Not supported for Apigee hybrid.
"""
return pulumi.get(self, "runtime_database_encryption_key_name")
@runtime_database_encryption_key_name.setter
def runtime_database_encryption_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "runtime_database_encryption_key_name", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input['OrganizationType']]:
"""
Not used by Apigee.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input['OrganizationType']]):
pulumi.set(self, "type", value)
class Organization(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
addons_config: Optional[pulumi.Input[pulumi.InputType['GoogleCloudApigeeV1AddonsConfigArgs']]] = None,
analytics_region: Optional[pulumi.Input[str]] = None,
attributes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
authorized_network: Optional[pulumi.Input[str]] = None,
billing_type: Optional[pulumi.Input['OrganizationBillingType']] = None,
customer_name: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
portal_disabled: Optional[pulumi.Input[bool]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['GoogleCloudApigeeV1PropertiesArgs']]] = None,
runtime_database_encryption_key_name: Optional[pulumi.Input[str]] = None,
runtime_type: Optional[pulumi.Input['OrganizationRuntimeType']] = None,
type: Optional[pulumi.Input['OrganizationType']] = None,
__props__=None):
"""
Creates an Apigee organization. See [Create an Apigee organization](https://cloud.google.com/apigee/docs/api-platform/get-started/create-org).
Auto-naming is currently not supported for this resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['GoogleCloudApigeeV1AddonsConfigArgs']] addons_config: Addon configurations of the Apigee organization.
:param pulumi.Input[str] analytics_region: Primary GCP region for analytics data storage. For valid values, see [Create an Apigee organization](https://cloud.google.com/apigee/docs/api-platform/get-started/create-org).
:param pulumi.Input[Sequence[pulumi.Input[str]]] attributes: Not used by Apigee.
:param pulumi.Input[str] authorized_network: Compute Engine network used for Service Networking to be peered with Apigee runtime instances. See [Getting started with the Service Networking API](https://cloud.google.com/service-infrastructure/docs/service-networking/getting-started). Valid only when [RuntimeType](#RuntimeType) is set to `CLOUD`. The value must be set before the creation of a runtime instance and can be updated only when there are no runtime instances. For example: `default`. Apigee also supports shared VPC (that is, the host network project is not the same as the one that is peering with Apigee). See [Shared VPC overview](https://cloud.google.com/vpc/docs/shared-vpc). To use a shared VPC network, use the following format: `projects/{host-project-id}/{region}/networks/{network-name}`. For example: `projects/my-sharedvpc-host/global/networks/mynetwork` **Note:** Not supported for Apigee hybrid.
:param pulumi.Input['OrganizationBillingType'] billing_type: Billing type of the Apigee organization. See [Apigee pricing](https://cloud.google.com/apigee/pricing).
:param pulumi.Input[str] customer_name: Not used by Apigee.
:param pulumi.Input[str] description: Description of the Apigee organization.
:param pulumi.Input[str] display_name: Display name for the Apigee organization. Unused, but reserved for future use.
:param pulumi.Input[bool] portal_disabled: Configuration for the Portals settings.
:param pulumi.Input[pulumi.InputType['GoogleCloudApigeeV1PropertiesArgs']] properties: Properties defined in the Apigee organization profile.
:param pulumi.Input[str] runtime_database_encryption_key_name: Cloud KMS key name used for encrypting the data that is stored and replicated across runtime instances. Update is not allowed after the organization is created. Required when [RuntimeType](#RuntimeType) is `CLOUD`. If not specified when [RuntimeType](#RuntimeType) is `TRIAL`, a Google-Managed encryption key will be used. For example: "projects/foo/locations/us/keyRings/bar/cryptoKeys/baz". **Note:** Not supported for Apigee hybrid.
:param pulumi.Input['OrganizationRuntimeType'] runtime_type: Runtime type of the Apigee organization based on the Apigee subscription purchased.
:param pulumi.Input['OrganizationType'] type: Not used by Apigee.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: OrganizationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates an Apigee organization. See [Create an Apigee organization](https://cloud.google.com/apigee/docs/api-platform/get-started/create-org).
Auto-naming is currently not supported for this resource.
:param str resource_name: The name of the resource.
:param OrganizationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(OrganizationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
addons_config: Optional[pulumi.Input[pulumi.InputType['GoogleCloudApigeeV1AddonsConfigArgs']]] = None,
analytics_region: Optional[pulumi.Input[str]] = None,
attributes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
authorized_network: Optional[pulumi.Input[str]] = None,
billing_type: Optional[pulumi.Input['OrganizationBillingType']] = None,
customer_name: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
portal_disabled: Optional[pulumi.Input[bool]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['GoogleCloudApigeeV1PropertiesArgs']]] = None,
runtime_database_encryption_key_name: Optional[pulumi.Input[str]] = None,
runtime_type: Optional[pulumi.Input['OrganizationRuntimeType']] = None,
type: Optional[pulumi.Input['OrganizationType']] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = OrganizationArgs.__new__(OrganizationArgs)
__props__.__dict__["addons_config"] = addons_config
if analytics_region is None and not opts.urn:
raise TypeError("Missing required property 'analytics_region'")
__props__.__dict__["analytics_region"] = analytics_region
__props__.__dict__["attributes"] = attributes
__props__.__dict__["authorized_network"] = authorized_network
__props__.__dict__["billing_type"] = billing_type
__props__.__dict__["customer_name"] = customer_name
__props__.__dict__["description"] = description
__props__.__dict__["display_name"] = display_name
if parent is None and not opts.urn:
raise TypeError("Missing required property 'parent'")
__props__.__dict__["parent"] = parent
__props__.__dict__["portal_disabled"] = portal_disabled
__props__.__dict__["properties"] = properties
__props__.__dict__["runtime_database_encryption_key_name"] = runtime_database_encryption_key_name
if runtime_type is None and not opts.urn:
raise TypeError("Missing required property 'runtime_type'")
__props__.__dict__["runtime_type"] = runtime_type
__props__.__dict__["type"] = type
__props__.__dict__["ca_certificate"] = None
__props__.__dict__["created_at"] = None
__props__.__dict__["environments"] = None
__props__.__dict__["expires_at"] = None
__props__.__dict__["last_modified_at"] = None
__props__.__dict__["name"] = None
__props__.__dict__["project"] = None
__props__.__dict__["state"] = None
super(Organization, __self__).__init__(
'google-native:apigee/v1:Organization',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Organization':
"""
Get an existing Organization resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = OrganizationArgs.__new__(OrganizationArgs)
__props__.__dict__["addons_config"] = None
__props__.__dict__["analytics_region"] = None
__props__.__dict__["attributes"] = None
__props__.__dict__["authorized_network"] = None
__props__.__dict__["billing_type"] = None
__props__.__dict__["ca_certificate"] = None
__props__.__dict__["created_at"] = None
__props__.__dict__["customer_name"] = None
__props__.__dict__["description"] = None
__props__.__dict__["display_name"] = None
__props__.__dict__["environments"] = None
__props__.__dict__["expires_at"] = None
__props__.__dict__["last_modified_at"] = None
__props__.__dict__["name"] = None
__props__.__dict__["portal_disabled"] = None
__props__.__dict__["project"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["runtime_database_encryption_key_name"] = None
__props__.__dict__["runtime_type"] = None
__props__.__dict__["state"] = None
__props__.__dict__["type"] = None
return Organization(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="addonsConfig")
def addons_config(self) -> pulumi.Output['outputs.GoogleCloudApigeeV1AddonsConfigResponse']:
"""
Addon configurations of the Apigee organization.
"""
return pulumi.get(self, "addons_config")
@property
@pulumi.getter(name="analyticsRegion")
def analytics_region(self) -> pulumi.Output[str]:
"""
Primary GCP region for analytics data storage. For valid values, see [Create an Apigee organization](https://cloud.google.com/apigee/docs/api-platform/get-started/create-org).
"""
return pulumi.get(self, "analytics_region")
@property
@pulumi.getter
def attributes(self) -> pulumi.Output[Sequence[str]]:
"""
Not used by Apigee.
"""
return pulumi.get(self, "attributes")
@property
@pulumi.getter(name="authorizedNetwork")
def authorized_network(self) -> pulumi.Output[str]:
"""
Compute Engine network used for Service Networking to be peered with Apigee runtime instances. See [Getting started with the Service Networking API](https://cloud.google.com/service-infrastructure/docs/service-networking/getting-started). Valid only when [RuntimeType](#RuntimeType) is set to `CLOUD`. The value must be set before the creation of a runtime instance and can be updated only when there are no runtime instances. For example: `default`. Apigee also supports shared VPC (that is, the host network project is not the same as the one that is peering with Apigee). See [Shared VPC overview](https://cloud.google.com/vpc/docs/shared-vpc). To use a shared VPC network, use the following format: `projects/{host-project-id}/{region}/networks/{network-name}`. For example: `projects/my-sharedvpc-host/global/networks/mynetwork` **Note:** Not supported for Apigee hybrid.
"""
return pulumi.get(self, "authorized_network")
@property
@pulumi.getter(name="billingType")
def billing_type(self) -> pulumi.Output[str]:
"""
Billing type of the Apigee organization. See [Apigee pricing](https://cloud.google.com/apigee/pricing).
"""
return pulumi.get(self, "billing_type")
@property
@pulumi.getter(name="caCertificate")
def ca_certificate(self) -> pulumi.Output[str]:
"""
Base64-encoded public certificate for the root CA of the Apigee organization. Valid only when [RuntimeType](#RuntimeType) is `CLOUD`.
"""
return pulumi.get(self, "ca_certificate")
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> pulumi.Output[str]:
"""
Time that the Apigee organization was created in milliseconds since epoch.
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter(name="customerName")
def customer_name(self) -> pulumi.Output[str]:
"""
Not used by Apigee.
"""
return pulumi.get(self, "customer_name")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
Description of the Apigee organization.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
Display name for the Apigee organization. Unused, but reserved for future use.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def environments(self) -> pulumi.Output[Sequence[str]]:
"""
List of environments in the Apigee organization.
"""
return pulumi.get(self, "environments")
@property
@pulumi.getter(name="expiresAt")
def expires_at(self) -> pulumi.Output[str]:
"""
Time that the Apigee organization is scheduled for deletion.
"""
return pulumi.get(self, "expires_at")
@property
@pulumi.getter(name="lastModifiedAt")
def last_modified_at(self) -> pulumi.Output[str]:
"""
Time that the Apigee organization was last modified in milliseconds since epoch.
"""
return pulumi.get(self, "last_modified_at")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the Apigee organization.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="portalDisabled")
def portal_disabled(self) -> pulumi.Output[bool]:
"""
Configuration for the Portals settings.
"""
return pulumi.get(self, "portal_disabled")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
Project ID associated with the Apigee organization.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.GoogleCloudApigeeV1PropertiesResponse']:
"""
Properties defined in the Apigee organization profile.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter(name="runtimeDatabaseEncryptionKeyName")
def runtime_database_encryption_key_name(self) -> pulumi.Output[str]:
"""
Cloud KMS key name used for encrypting the data that is stored and replicated across runtime instances. Update is not allowed after the organization is created. Required when [RuntimeType](#RuntimeType) is `CLOUD`. If not specified when [RuntimeType](#RuntimeType) is `TRIAL`, a Google-Managed encryption key will be used. For example: "projects/foo/locations/us/keyRings/bar/cryptoKeys/baz". **Note:** Not supported for Apigee hybrid.
"""
return pulumi.get(self, "runtime_database_encryption_key_name")
@property
@pulumi.getter(name="runtimeType")
def runtime_type(self) -> pulumi.Output[str]:
"""
Runtime type of the Apigee organization based on the Apigee subscription purchased.
"""
return pulumi.get(self, "runtime_type")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
State of the organization. Values other than ACTIVE means the resource is not ready to use.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Not used by Apigee.
"""
return pulumi.get(self, "type")
| 52.958115 | 929 | 0.680046 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['OrganizationArgs', 'Organization']
@pulumi.input_type
class OrganizationArgs:
def __init__(__self__, *,
analytics_region: pulumi.Input[str],
parent: pulumi.Input[str],
runtime_type: pulumi.Input['OrganizationRuntimeType'],
addons_config: Optional[pulumi.Input['GoogleCloudApigeeV1AddonsConfigArgs']] = None,
attributes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
authorized_network: Optional[pulumi.Input[str]] = None,
billing_type: Optional[pulumi.Input['OrganizationBillingType']] = None,
customer_name: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
portal_disabled: Optional[pulumi.Input[bool]] = None,
properties: Optional[pulumi.Input['GoogleCloudApigeeV1PropertiesArgs']] = None,
runtime_database_encryption_key_name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input['OrganizationType']] = None):
pulumi.set(__self__, "analytics_region", analytics_region)
pulumi.set(__self__, "parent", parent)
pulumi.set(__self__, "runtime_type", runtime_type)
if addons_config is not None:
pulumi.set(__self__, "addons_config", addons_config)
if attributes is not None:
pulumi.set(__self__, "attributes", attributes)
if authorized_network is not None:
pulumi.set(__self__, "authorized_network", authorized_network)
if billing_type is not None:
pulumi.set(__self__, "billing_type", billing_type)
if customer_name is not None:
pulumi.set(__self__, "customer_name", customer_name)
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if portal_disabled is not None:
pulumi.set(__self__, "portal_disabled", portal_disabled)
if properties is not None:
pulumi.set(__self__, "properties", properties)
if runtime_database_encryption_key_name is not None:
pulumi.set(__self__, "runtime_database_encryption_key_name", runtime_database_encryption_key_name)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="analyticsRegion")
def analytics_region(self) -> pulumi.Input[str]:
return pulumi.get(self, "analytics_region")
@analytics_region.setter
def analytics_region(self, value: pulumi.Input[str]):
pulumi.set(self, "analytics_region", value)
@property
@pulumi.getter
def parent(self) -> pulumi.Input[str]:
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: pulumi.Input[str]):
pulumi.set(self, "parent", value)
@property
@pulumi.getter(name="runtimeType")
def runtime_type(self) -> pulumi.Input['OrganizationRuntimeType']:
return pulumi.get(self, "runtime_type")
@runtime_type.setter
def runtime_type(self, value: pulumi.Input['OrganizationRuntimeType']):
pulumi.set(self, "runtime_type", value)
@property
@pulumi.getter(name="addonsConfig")
def addons_config(self) -> Optional[pulumi.Input['GoogleCloudApigeeV1AddonsConfigArgs']]:
return pulumi.get(self, "addons_config")
@addons_config.setter
def addons_config(self, value: Optional[pulumi.Input['GoogleCloudApigeeV1AddonsConfigArgs']]):
pulumi.set(self, "addons_config", value)
@property
@pulumi.getter
def attributes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "attributes")
@attributes.setter
def attributes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "attributes", value)
@property
@pulumi.getter(name="authorizedNetwork")
def authorized_network(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "authorized_network")
@authorized_network.setter
def authorized_network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "authorized_network", value)
@property
@pulumi.getter(name="billingType")
def billing_type(self) -> Optional[pulumi.Input['OrganizationBillingType']]:
return pulumi.get(self, "billing_type")
@billing_type.setter
def billing_type(self, value: Optional[pulumi.Input['OrganizationBillingType']]):
pulumi.set(self, "billing_type", value)
@property
@pulumi.getter(name="customerName")
def customer_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "customer_name")
@customer_name.setter
def customer_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "customer_name", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="portalDisabled")
def portal_disabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "portal_disabled")
@portal_disabled.setter
def portal_disabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "portal_disabled", value)
@property
@pulumi.getter
def properties(self) -> Optional[pulumi.Input['GoogleCloudApigeeV1PropertiesArgs']]:
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: Optional[pulumi.Input['GoogleCloudApigeeV1PropertiesArgs']]):
pulumi.set(self, "properties", value)
@property
@pulumi.getter(name="runtimeDatabaseEncryptionKeyName")
def runtime_database_encryption_key_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "runtime_database_encryption_key_name")
@runtime_database_encryption_key_name.setter
def runtime_database_encryption_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "runtime_database_encryption_key_name", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input['OrganizationType']]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input['OrganizationType']]):
pulumi.set(self, "type", value)
class Organization(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
addons_config: Optional[pulumi.Input[pulumi.InputType['GoogleCloudApigeeV1AddonsConfigArgs']]] = None,
analytics_region: Optional[pulumi.Input[str]] = None,
attributes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
authorized_network: Optional[pulumi.Input[str]] = None,
billing_type: Optional[pulumi.Input['OrganizationBillingType']] = None,
customer_name: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
portal_disabled: Optional[pulumi.Input[bool]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['GoogleCloudApigeeV1PropertiesArgs']]] = None,
runtime_database_encryption_key_name: Optional[pulumi.Input[str]] = None,
runtime_type: Optional[pulumi.Input['OrganizationRuntimeType']] = None,
type: Optional[pulumi.Input['OrganizationType']] = None,
__props__=None):
...
@overload
def __init__(__self__,
resource_name: str,
args: OrganizationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(OrganizationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
addons_config: Optional[pulumi.Input[pulumi.InputType['GoogleCloudApigeeV1AddonsConfigArgs']]] = None,
analytics_region: Optional[pulumi.Input[str]] = None,
attributes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
authorized_network: Optional[pulumi.Input[str]] = None,
billing_type: Optional[pulumi.Input['OrganizationBillingType']] = None,
customer_name: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
portal_disabled: Optional[pulumi.Input[bool]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['GoogleCloudApigeeV1PropertiesArgs']]] = None,
runtime_database_encryption_key_name: Optional[pulumi.Input[str]] = None,
runtime_type: Optional[pulumi.Input['OrganizationRuntimeType']] = None,
type: Optional[pulumi.Input['OrganizationType']] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = OrganizationArgs.__new__(OrganizationArgs)
__props__.__dict__["addons_config"] = addons_config
if analytics_region is None and not opts.urn:
raise TypeError("Missing required property 'analytics_region'")
__props__.__dict__["analytics_region"] = analytics_region
__props__.__dict__["attributes"] = attributes
__props__.__dict__["authorized_network"] = authorized_network
__props__.__dict__["billing_type"] = billing_type
__props__.__dict__["customer_name"] = customer_name
__props__.__dict__["description"] = description
__props__.__dict__["display_name"] = display_name
if parent is None and not opts.urn:
raise TypeError("Missing required property 'parent'")
__props__.__dict__["parent"] = parent
__props__.__dict__["portal_disabled"] = portal_disabled
__props__.__dict__["properties"] = properties
__props__.__dict__["runtime_database_encryption_key_name"] = runtime_database_encryption_key_name
if runtime_type is None and not opts.urn:
raise TypeError("Missing required property 'runtime_type'")
__props__.__dict__["runtime_type"] = runtime_type
__props__.__dict__["type"] = type
__props__.__dict__["ca_certificate"] = None
__props__.__dict__["created_at"] = None
__props__.__dict__["environments"] = None
__props__.__dict__["expires_at"] = None
__props__.__dict__["last_modified_at"] = None
__props__.__dict__["name"] = None
__props__.__dict__["project"] = None
__props__.__dict__["state"] = None
super(Organization, __self__).__init__(
'google-native:apigee/v1:Organization',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Organization':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = OrganizationArgs.__new__(OrganizationArgs)
__props__.__dict__["addons_config"] = None
__props__.__dict__["analytics_region"] = None
__props__.__dict__["attributes"] = None
__props__.__dict__["authorized_network"] = None
__props__.__dict__["billing_type"] = None
__props__.__dict__["ca_certificate"] = None
__props__.__dict__["created_at"] = None
__props__.__dict__["customer_name"] = None
__props__.__dict__["description"] = None
__props__.__dict__["display_name"] = None
__props__.__dict__["environments"] = None
__props__.__dict__["expires_at"] = None
__props__.__dict__["last_modified_at"] = None
__props__.__dict__["name"] = None
__props__.__dict__["portal_disabled"] = None
__props__.__dict__["project"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["runtime_database_encryption_key_name"] = None
__props__.__dict__["runtime_type"] = None
__props__.__dict__["state"] = None
__props__.__dict__["type"] = None
return Organization(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="addonsConfig")
def addons_config(self) -> pulumi.Output['outputs.GoogleCloudApigeeV1AddonsConfigResponse']:
return pulumi.get(self, "addons_config")
@property
@pulumi.getter(name="analyticsRegion")
def analytics_region(self) -> pulumi.Output[str]:
return pulumi.get(self, "analytics_region")
@property
@pulumi.getter
def attributes(self) -> pulumi.Output[Sequence[str]]:
return pulumi.get(self, "attributes")
@property
@pulumi.getter(name="authorizedNetwork")
def authorized_network(self) -> pulumi.Output[str]:
return pulumi.get(self, "authorized_network")
@property
@pulumi.getter(name="billingType")
def billing_type(self) -> pulumi.Output[str]:
return pulumi.get(self, "billing_type")
@property
@pulumi.getter(name="caCertificate")
def ca_certificate(self) -> pulumi.Output[str]:
return pulumi.get(self, "ca_certificate")
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> pulumi.Output[str]:
return pulumi.get(self, "created_at")
@property
@pulumi.getter(name="customerName")
def customer_name(self) -> pulumi.Output[str]:
return pulumi.get(self, "customer_name")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def environments(self) -> pulumi.Output[Sequence[str]]:
return pulumi.get(self, "environments")
@property
@pulumi.getter(name="expiresAt")
def expires_at(self) -> pulumi.Output[str]:
return pulumi.get(self, "expires_at")
@property
@pulumi.getter(name="lastModifiedAt")
def last_modified_at(self) -> pulumi.Output[str]:
return pulumi.get(self, "last_modified_at")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="portalDisabled")
def portal_disabled(self) -> pulumi.Output[bool]:
return pulumi.get(self, "portal_disabled")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
return pulumi.get(self, "project")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.GoogleCloudApigeeV1PropertiesResponse']:
return pulumi.get(self, "properties")
@property
@pulumi.getter(name="runtimeDatabaseEncryptionKeyName")
def runtime_database_encryption_key_name(self) -> pulumi.Output[str]:
return pulumi.get(self, "runtime_database_encryption_key_name")
@property
@pulumi.getter(name="runtimeType")
def runtime_type(self) -> pulumi.Output[str]:
return pulumi.get(self, "runtime_type")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
return pulumi.get(self, "state")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
return pulumi.get(self, "type")
| true | true |
f72c08e934eb334e53153f585231168d33ac7fca | 35,118 | py | Python | src/search.py | luisriverag/MagnetMagnet | 99f1805da0fa6399e4b28ab40d5784170bb91e82 | [
"MIT"
] | 44 | 2020-03-14T15:53:11.000Z | 2022-01-31T11:28:59.000Z | src/search.py | luisriverag/MagnetMagnet | 99f1805da0fa6399e4b28ab40d5784170bb91e82 | [
"MIT"
] | 6 | 2020-04-30T15:53:34.000Z | 2021-09-09T14:38:02.000Z | src/search.py | luisriverag/MagnetMagnet | 99f1805da0fa6399e4b28ab40d5784170bb91e82 | [
"MIT"
] | 16 | 2020-04-30T13:18:21.000Z | 2022-01-27T17:50:56.000Z | import math
import re
import pyperclip
import requests
from bs4 import BeautifulSoup
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
import src.mglobals
path = src.mglobals.base_path
class Ui_searchMainWindow(object):
def copied_success_message(self):
successMessageBox = QMessageBox()
successMessageBox.setIcon(QMessageBox.Information)
successMessageBox.setText(
"Magnet links have been successfully copied to the clipboard.")
successMessageBox.setWindowTitle("Task Completed!")
successMessageBox.setStandardButtons(QMessageBox.Ok)
icon = QIcon()
icon.addPixmap(QPixmap(src.mglobals.icon), QIcon.Normal, QIcon.Off)
successMessageBox.setWindowIcon(icon)
successMessageBox.exec_()
def copy(self):
choice_row = self.tableTableWidget.currentRow()
choice_magnet = self.magnets[choice_row]
pyperclip.copy(choice_magnet)
self.copied_success_message()
def callback(self):
query = self.queryLineEdit.text()
limit = self.limitSlider.value()
def resize():
self.tableTableWidget.resizeColumnToContents(0)
self.tableTableWidget.resizeColumnToContents(1)
self.tableTableWidget.resizeColumnToContents(2)
self.tableTableWidget.resizeColumnToContents(3)
self.tableTableWidget.resizeColumnToContents(4)
def searched_success_message():
successMessageBox = QMessageBox()
successMessageBox.setIcon(QMessageBox.Information)
successMessageBox.setText(
"Magnet links have been successfully scraped.")
successMessageBox.setWindowTitle("Task Completed!")
successMessageBox.setStandardButtons(QMessageBox.Ok)
icon = QIcon()
icon.addPixmap(QPixmap(src.mglobals.icon), QIcon.Normal, QIcon.Off)
successMessageBox.setWindowIcon(icon)
successMessageBox.exec_()
def error_message():
errorMessageBox = QMessageBox()
errorMessageBox.setIcon(QMessageBox.Information)
errorMessageBox.setText(
"Something went wrong! Please inform me through GitHub!")
errorMessageBox.setWindowTitle("Error!")
errorMessageBox.setStandardButtons(QMessageBox.Ok)
icon = QIcon()
icon.addPixmap(QPixmap(src.mglobals.icon), QIcon.Normal, QIcon.Off)
errorMessageBox.setWindowIcon(icon)
errorMessageBox.exec_()
def x1377():
try:
main_link = "https://1377x.to/search/" + query + '/1/'
main_request = requests.get(
main_link, headers={'User-Agent': 'Mozilla/5.0'})
main_source = main_request.content
main_soup = BeautifulSoup(main_source, 'lxml')
limit_counter = 0
page_links_soup = main_soup.findAll(
'a', attrs={'href': re.compile("^/torrent/")})
for page_link in page_links_soup:
if limit_counter < limit:
page_link = "https://1377x.to" + page_link.get('href')
page_request = requests.get(
page_link, headers={'User-Agent': 'Mozilla/5.0'})
page_source = page_request.content
page_soup = BeautifulSoup(page_source, 'lxml')
title = (page_soup.find('h1').text).replace("\n", " ")
seeder = page_soup.find('span', class_="seeds").text
leecher = page_soup.find('span', class_="leeches").text
size = page_soup.findAll('span')[15].text
date = page_soup.findAll('span')[19].text
magnet = page_soup.find(
'a', attrs={'href': re.compile("^magnet:?")}).get('href')
row_position = self.tableTableWidget.rowCount()
self.tableTableWidget.insertRow(row_position)
self.tableTableWidget.setItem(
row_position, 0, QTableWidgetItem(title))
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(seeder))
self.tableTableWidget.setItem(row_position, 1, item)
self.tableTableWidget.setItem(
row_position, 2, QTableWidgetItem(leecher))
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(leecher))
self.tableTableWidget.setItem(row_position, 2, item)
self.tableTableWidget.setItem(
row_position, 3, QTableWidgetItem(size))
self.tableTableWidget.setItem(
row_position, 4, QTableWidgetItem(date))
self.tableTableWidget.setItem(
row_position, 5, QTableWidgetItem("1377x"))
self.magnets.append(magnet)
limit_counter = limit_counter + 1
except:
error_message()
def kat():
try:
main_link = "https://kat.rip/usearch/" + query
main_request = requests.get(
main_link, headers={'User-Agent': 'Mozilla/5.0'})
main_source = main_request.content
main_soup = BeautifulSoup(main_source, 'lxml')
titles_soup = main_soup.findAll('a', class_="cellMainLink")
seeders_soup = main_soup.findAll('td', class_="green center")
leechers_soup = main_soup.findAll(
'td', class_="red lasttd center")
sizes_soup = main_soup.findAll('td', class_="nobr center")
dates_soup = main_soup.findAll(
'td', class_="center", title=True)
magnets_soup = main_soup.findAll(
'a', attrs={'href': re.compile("^magnet:?"), 'title': "Torrent magnet link"})
titles = []
seeders = []
leechers = []
sizes = []
dates = []
limit_counter = 0
for title in titles_soup:
if limit_counter < limit:
titles.append(title.text)
limit_counter = limit_counter + 1
limit_counter = 0
for seeder in seeders_soup:
if limit_counter < limit:
seeders.append(seeder.text)
limit_counter = limit_counter + 1
limit_counter = 0
for leecher in leechers_soup:
if limit_counter < limit:
leechers.append(leecher.text)
limit_counter = limit_counter + 1
limit_counter = 0
for size in sizes_soup:
if limit_counter < limit:
sizes.append(size.text)
limit_counter = limit_counter + 1
limit_counter = 0
for date in dates_soup:
if limit_counter < limit:
dates.append(date.text)
limit_counter = limit_counter + 1
limit_counter = 0
count1 = 0
for magnet in magnets_soup:
if limit_counter < limit:
self.magnets.append(magnet.get('href'))
limit_counter = limit_counter + 1
count1 = count1 + 1
count2 = 0
while count2 < count1:
row_position = self.tableTableWidget.rowCount()
self.tableTableWidget.insertRow(row_position)
self.tableTableWidget.setItem(
row_position, 0, QTableWidgetItem(titles[count2]))
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(seeders[count2]))
self.tableTableWidget.setItem(row_position, 1, item)
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(leechers[count2]))
self.tableTableWidget.setItem(row_position, 2, item)
self.tableTableWidget.setItem(
row_position, 3, QTableWidgetItem(sizes[count2]))
self.tableTableWidget.setItem(
row_position, 4, QTableWidgetItem(dates[count2]))
self.tableTableWidget.setItem(
row_position, 5, QTableWidgetItem("KAT"))
count2 = count2 + 1
except:
error_message()
def nyaa():
try:
main_link = 'https://nyaa.si/?q=' + query
main_request = requests.get(
main_link, headers={'User-Agent': 'Mozilla/5.0'})
main_source = main_request.content
main_soup = BeautifulSoup(main_source, 'lxml')
titles_soup = main_soup.findAll('a', title=True, class_=False, attrs={
'href': re.compile("^/view/")})
seeders_soup = main_soup.findAll('td', class_="text-center")
leechers_soup = main_soup.findAll('td', class_="text-center")
sizes_soup = main_soup.findAll('td', class_="text-center")
dates_soup = main_soup.findAll('td', class_="text-center")
magnets_soup = main_soup.findAll(
'a', attrs={'href': re.compile("^magnet:?")})
titles = []
seeders = []
leechers = []
sizes = []
dates = []
limit_counter = 0
for title in titles_soup:
if limit_counter < limit:
titles.append(title.text)
limit_counter = limit_counter + 1
limit_counter = 0
for seeder in seeders_soup:
if limit_counter < limit*6:
seeders.append(seeder.text)
limit_counter = limit_counter + 1
limit_counter = 0
for leecher in leechers_soup:
if limit_counter < limit*6:
leechers.append(leecher.text)
limit_counter = limit_counter + 1
limit_counter = 0
for size in sizes_soup:
if limit_counter < limit*6:
sizes.append(size.text)
limit_counter = limit_counter + 1
limit_counter = 0
for date in dates_soup:
if limit_counter < limit*6:
dates.append(date.text)
limit_counter = limit_counter + 1
limit_counter = 0
count1 = 0
for magnet in magnets_soup:
if limit_counter < limit:
self.magnets.append(magnet.get('href'))
limit_counter = limit_counter + 1
count1 = count1 + 1
seeder1 = seeders[3]
seeders.pop(0)
seeders.pop(1)
seeders.pop(2)
seeders.pop(3)
seeders = seeders[6-1::6]
seeders.insert(0, seeder1)
leecher1 = leechers[4]
leechers.pop(0)
leechers.pop(1)
leechers.pop(2)
leechers.pop(3)
leechers.pop(4)
leechers = leechers[6-1::6]
leechers.insert(0, leecher1)
size1 = sizes[1]
sizes.pop(0)
sizes.pop(1)
sizes = sizes[6-1::6]
sizes.insert(0, size1)
date1 = dates[2]
dates.pop(0)
dates.pop(1)
dates.pop(2)
dates = dates[6-1::6]
dates.insert(0, date1)
count2 = 0
while count2 < count1:
row_position = self.tableTableWidget.rowCount()
self.tableTableWidget.insertRow(row_position)
self.tableTableWidget.setItem(
row_position, 0, QTableWidgetItem(titles[count2]))
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(seeders[count2]))
self.tableTableWidget.setItem(row_position, 1, item)
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(leechers[count2]))
self.tableTableWidget.setItem(row_position, 2, item)
self.tableTableWidget.setItem(
row_position, 3, QTableWidgetItem(sizes[count2]))
self.tableTableWidget.setItem(
row_position, 4, QTableWidgetItem(dates[count2]))
self.tableTableWidget.setItem(
row_position, 5, QTableWidgetItem("Nyaa"))
count2 = count2 + 1
except:
error_message()
def rarbg():
try:
token_url = "https://torrentapi.org/pubapi_v2.php?get_token=get_token&app_id=MagnetMagnet"
token_request = requests.get(token_url, headers={'User-Agent': 'Mozilla/5.0'})
token = token_request.json()["token"]
main_link = 'https://torrentapi.org/pubapi_v2.php?mode=search&search_string=' + \
query + '&token=' + token + '&format=json_extended&app_id=MagnetMagnet'
main_request = requests.get(
main_link, headers={'User-Agent': 'Mozilla/5.0'})
main_source = main_request.json()["torrent_results"]
limit_counter = 0
titles = []
seeders = []
leechers = []
sizes = []
dates = []
for item in main_source:
if limit_counter < limit:
def convert_size(size):
if size == 0:
return "0B"
size_name = ("B", "KB", "MB", "GB",
"TB", "PB", "EB", "ZB", "YB")
i = int(math.floor(math.log(size, 1024)))
p = math.pow(1024, i)
s = round(size / p, 2)
size = "%s %s" % (s, size_name[i])
return size
titles.append(item["title"])
seeders.append(item["seeders"])
leechers.append(item["leechers"])
sizes.append(convert_size(item["size"]))
dates.append(item["pubdate"])
self.magnets.append(item["download"])
limit_counter += 1
else:
pass
print(titles)
count2 = 0
while count2 < limit_counter:
row_position = self.tableTableWidget.rowCount()
self.tableTableWidget.insertRow(row_position)
self.tableTableWidget.setItem(
row_position, 0, QTableWidgetItem(titles[count2]))
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(seeders[count2]))
self.tableTableWidget.setItem(row_position, 1, item)
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(leechers[count2]))
self.tableTableWidget.setItem(row_position, 2, item)
self.tableTableWidget.setItem(
row_position, 3, QTableWidgetItem(sizes[count2]))
self.tableTableWidget.setItem(
row_position, 4, QTableWidgetItem(dates[count2]))
self.tableTableWidget.setItem(
row_position, 5, QTableWidgetItem("RARBG"))
count2 = count2 + 1
except:
error_message()
def tpb():
try:
main_link = 'https://tpb.party/search/' + query + '/1/99/0/'
main_request = requests.get(
main_link, headers={'User-Agent': 'Mozilla/5.0'})
main_source = main_request.content
main_soup = BeautifulSoup(main_source, 'lxml')
titles_soup = main_soup.findAll('div', class_="detName")
seeders_soup = main_soup.findAll(
'td', attrs={'align': "right"})
seeders_soup = seeders_soup[0::2]
leechers_soup = main_soup.findAll(
'td', attrs={'align': "right"})
leechers_soup = leechers_soup[1::2]
sizes_soup = main_soup.findAll('font', class_="detDesc")
dates_soup = main_soup.findAll('font', class_="detDesc")
magnets_soup = main_soup.findAll(
'a', attrs={'href': re.compile("^magnet")})
titles = []
seeders = []
leechers = []
sizes = []
dates = []
limit_counter = 0
for title in titles_soup:
if limit_counter < limit:
title = title.text.replace("\n", "")
titles.append(title)
limit_counter = limit_counter + 1
limit_counter = 0
for seeder in seeders_soup:
if limit_counter < limit:
seeders.append(seeder.text)
limit_counter = limit_counter + 1
limit_counter = 0
for leecher in leechers_soup:
if limit_counter < limit:
leechers.append(leecher.text)
limit_counter = limit_counter + 1
limit_counter = 0
for size in sizes_soup:
if limit_counter < limit:
size = size.text.split(", ")
size = size[1].replace("Size ", "")
sizes.append(size)
limit_counter = limit_counter + 1
limit_counter = 0
for date in dates_soup:
if limit_counter < limit:
date = date.text.split(", ")
date = date[0].replace("Uploaded ", "")
dates.append(date)
limit_counter = limit_counter + 1
count1 = 0
limit_counter = 0
for magnet in magnets_soup:
if limit_counter < limit:
self.magnets.append(magnet.get('href'))
count1 = count1 + 1
limit_counter = limit_counter + 1
count2 = 0
while count2 < count1:
row_position = self.tableTableWidget.rowCount()
self.tableTableWidget.insertRow(row_position)
self.tableTableWidget.setItem(
row_position, 0, QTableWidgetItem(titles[count2]))
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(seeders[count2]))
self.tableTableWidget.setItem(row_position, 1, item)
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(leechers[count2]))
self.tableTableWidget.setItem(row_position, 2, item)
self.tableTableWidget.setItem(
row_position, 3, QTableWidgetItem(sizes[count2]))
self.tableTableWidget.setItem(
row_position, 4, QTableWidgetItem(dates[count2]))
self.tableTableWidget.setItem(
row_position, 5, QTableWidgetItem("TPB"))
count2 = count2 + 1
except:
error_message()
if (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
nyaa()
rarbg()
tpb()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
nyaa()
rarbg()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
nyaa()
tpb()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked() and self.rarbgCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
rarbg()
tpb()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
nyaa()
rarbg()
resize()
searched_success_message()
elif (self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
kat()
nyaa()
rarbg()
tpb()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
nyaa()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked() and self.rarbgCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
rarbg()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
tpb()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
nyaa()
rarbg()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
nyaa()
tpb()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.rarbgCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
rarbg()
tpb()
resize()
searched_success_message()
elif (self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
kat()
nyaa()
rarbg()
resize()
searched_success_message()
elif (self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
kat()
nyaa()
tpb()
resize()
searched_success_message()
elif (self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
nyaa()
rarbg()
tpb()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.nyaaCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
nyaa()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.rarbgCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
rarbg()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
tpb()
resize()
searched_success_message()
elif (self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
kat()
nyaa()
resize()
searched_success_message()
elif (self.katCheckBox.isChecked() and self.rarbgCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
kat()
rarbg()
resize()
searched_success_message()
elif (self.katCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
kat()
tpb()
resize()
searched_success_message()
elif (self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
nyaa()
rarbg()
resize()
searched_success_message()
elif (self.nyaaCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
nyaa()
tpb()
resize()
searched_success_message()
elif (self.rarbgCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
rarbg()
tpb()
resize()
searched_success_message()
elif self.x1377CheckBox.isChecked():
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
resize()
searched_success_message()
elif self.katCheckBox.isChecked():
self.tableTableWidget.setRowCount(0)
self.magnets = []
kat()
resize()
searched_success_message()
elif self.nyaaCheckBox.isChecked():
self.tableTableWidget.setRowCount(0)
self.magnets = []
nyaa()
resize()
searched_success_message()
elif self.rarbgCheckBox.isChecked():
self.tableTableWidget.setRowCount(0)
self.magnets = []
rarbg()
resize()
searched_success_message()
elif self.tpbCheckBox.isChecked():
self.tableTableWidget.setRowCount(0)
self.magnets = []
tpb()
resize()
searched_success_message()
def setupUi(self, searchMainWindow):
searchMainWindow.setObjectName("searchMainWindow")
searchMainWindow.resize(1500, 400)
font = QFont()
font.setFamily("Bahnschrift Light")
font.setPointSize(11)
searchMainWindow.setFont(font)
icon = QIcon()
icon.addPixmap(QPixmap(src.mglobals.icon), QIcon.Normal, QIcon.Off)
searchMainWindow.setWindowIcon(icon)
self.centralwidget = QWidget(searchMainWindow)
self.centralwidget.setObjectName("centralwidget")
self.queryLineEdit = QLineEdit(self.centralwidget)
self.queryLineEdit.setGeometry(QRect(30, 20, 200, 20))
font = QFont()
font.setPointSize(9)
self.queryLineEdit.setFont(font)
self.queryLineEdit.setObjectName("queryLineEdit")
self.x1377CheckBox = QCheckBox(self.centralwidget)
self.x1377CheckBox.setGeometry(QRect(30, 70, 90, 20))
self.x1377CheckBox.setObjectName("x1377CheckBox")
self.tableTableWidget = QTableWidget(self.centralwidget)
self.tableTableWidget.setGeometry(QRect(260, 20, 1161, 360))
self.tableTableWidget.setObjectName("tableTableWidget")
self.tableTableWidget.setColumnCount(6)
self.tableTableWidget.setRowCount(0)
item = QTableWidgetItem()
self.tableTableWidget.setHorizontalHeaderItem(0, item)
item = QTableWidgetItem()
self.tableTableWidget.setHorizontalHeaderItem(1, item)
item = QTableWidgetItem()
self.tableTableWidget.setHorizontalHeaderItem(2, item)
item = QTableWidgetItem()
self.tableTableWidget.setHorizontalHeaderItem(3, item)
item = QTableWidgetItem()
self.tableTableWidget.setHorizontalHeaderItem(4, item)
item = QTableWidgetItem()
self.tableTableWidget.setHorizontalHeaderItem(5, item)
self.tableTableWidget.setSortingEnabled(True)
self.katCheckBox = QCheckBox(self.centralwidget)
self.katCheckBox.setGeometry(QRect(30, 110, 90, 20))
self.katCheckBox.setObjectName("katCheckBox")
self.nyaaCheckBox = QCheckBox(self.centralwidget)
self.nyaaCheckBox.setGeometry(QRect(30, 150, 90, 20))
self.nyaaCheckBox.setObjectName("nyaaCheckBox")
self.rarbgCheckBox = QCheckBox(self.centralwidget)
self.rarbgCheckBox.setGeometry(QRect(30, 190, 90, 20))
self.rarbgCheckBox.setObjectName("rarbgCheckBox")
self.tpbCheckBox = QCheckBox(self.centralwidget)
self.tpbCheckBox.setGeometry(QRect(30, 230, 90, 20))
self.tpbCheckBox.setObjectName("tpbCheckBox")
self.searchPushButton = QPushButton(self.centralwidget)
self.searchPushButton.setGeometry(QRect(30, 350, 90, 30))
font = QFont()
font.setPointSize(8)
self.searchPushButton.setFont(font)
self.searchPushButton.setObjectName("searchPushButton")
self.limitSlider = QSlider(self.centralwidget)
self.limitSlider.setGeometry(QRect(1450, 40, 22, 320))
self.limitSlider.setMaximum(20)
self.limitSlider.setPageStep(2)
self.limitSlider.setSliderPosition(10)
self.limitSlider.setOrientation(Qt.Vertical)
self.limitSlider.setObjectName("limitSlider")
self.minimumLabel = QLabel(self.centralwidget)
self.minimumLabel.setGeometry(QRect(1452, 365, 16, 16))
font = QFont()
font.setPointSize(9)
self.minimumLabel.setFont(font)
self.minimumLabel.setAlignment(Qt.AlignCenter)
self.minimumLabel.setObjectName("minimumLabel")
self.maximumLabel = QLabel(self.centralwidget)
self.maximumLabel.setGeometry(QRect(1452, 20, 16, 16))
font = QFont()
font.setPointSize(9)
self.maximumLabel.setFont(font)
self.maximumLabel.setAlignment(Qt.AlignCenter)
self.maximumLabel.setObjectName("maximumLabel")
searchMainWindow.setCentralWidget(self.centralwidget)
self.searchPushButton.clicked.connect(self.callback)
self.tableTableWidget.itemClicked.connect(self.copy)
self.retranslateUi(searchMainWindow)
QMetaObject.connectSlotsByName(searchMainWindow)
def retranslateUi(self, searchMainWindow):
_translate = QCoreApplication.translate
searchMainWindow.setWindowTitle(_translate(
"searchMainWindow", "MagnetMagnet - Search"))
self.x1377CheckBox.setText(_translate("searchMainWindow", "1377x"))
item = self.tableTableWidget.horizontalHeaderItem(0)
item.setText(_translate("searchMainWindow", "Titles"))
item = self.tableTableWidget.horizontalHeaderItem(1)
item.setText(_translate("searchMainWindow", "Seeders"))
item = self.tableTableWidget.horizontalHeaderItem(2)
item.setText(_translate("searchMainWindow", "Leechers"))
item = self.tableTableWidget.horizontalHeaderItem(3)
item.setText(_translate("searchMainWindow", "Sizes"))
item = self.tableTableWidget.horizontalHeaderItem(4)
item.setText(_translate("searchMainWindow", "Dates"))
item = self.tableTableWidget.horizontalHeaderItem(5)
item.setText(_translate("searchMainWindow", "Source"))
self.katCheckBox.setText(_translate("searchMainWindow", "KAT"))
self.nyaaCheckBox.setText(_translate("searchMainWindow", "Nyaa"))
self.rarbgCheckBox.setText(_translate("searchMainWindow", "RARBG"))
self.tpbCheckBox.setText(_translate("searchMainWindow", "TPB"))
self.searchPushButton.setText(_translate("searchMainWindow", "Search"))
self.minimumLabel.setText(_translate("searchMainWindow", "0"))
self.maximumLabel.setText(_translate("searchMainWindow", "20"))
| 44.173585 | 179 | 0.539154 | import math
import re
import pyperclip
import requests
from bs4 import BeautifulSoup
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
import src.mglobals
path = src.mglobals.base_path
class Ui_searchMainWindow(object):
def copied_success_message(self):
successMessageBox = QMessageBox()
successMessageBox.setIcon(QMessageBox.Information)
successMessageBox.setText(
"Magnet links have been successfully copied to the clipboard.")
successMessageBox.setWindowTitle("Task Completed!")
successMessageBox.setStandardButtons(QMessageBox.Ok)
icon = QIcon()
icon.addPixmap(QPixmap(src.mglobals.icon), QIcon.Normal, QIcon.Off)
successMessageBox.setWindowIcon(icon)
successMessageBox.exec_()
def copy(self):
choice_row = self.tableTableWidget.currentRow()
choice_magnet = self.magnets[choice_row]
pyperclip.copy(choice_magnet)
self.copied_success_message()
def callback(self):
query = self.queryLineEdit.text()
limit = self.limitSlider.value()
def resize():
self.tableTableWidget.resizeColumnToContents(0)
self.tableTableWidget.resizeColumnToContents(1)
self.tableTableWidget.resizeColumnToContents(2)
self.tableTableWidget.resizeColumnToContents(3)
self.tableTableWidget.resizeColumnToContents(4)
def searched_success_message():
successMessageBox = QMessageBox()
successMessageBox.setIcon(QMessageBox.Information)
successMessageBox.setText(
"Magnet links have been successfully scraped.")
successMessageBox.setWindowTitle("Task Completed!")
successMessageBox.setStandardButtons(QMessageBox.Ok)
icon = QIcon()
icon.addPixmap(QPixmap(src.mglobals.icon), QIcon.Normal, QIcon.Off)
successMessageBox.setWindowIcon(icon)
successMessageBox.exec_()
def error_message():
errorMessageBox = QMessageBox()
errorMessageBox.setIcon(QMessageBox.Information)
errorMessageBox.setText(
"Something went wrong! Please inform me through GitHub!")
errorMessageBox.setWindowTitle("Error!")
errorMessageBox.setStandardButtons(QMessageBox.Ok)
icon = QIcon()
icon.addPixmap(QPixmap(src.mglobals.icon), QIcon.Normal, QIcon.Off)
errorMessageBox.setWindowIcon(icon)
errorMessageBox.exec_()
def x1377():
try:
main_link = "https://1377x.to/search/" + query + '/1/'
main_request = requests.get(
main_link, headers={'User-Agent': 'Mozilla/5.0'})
main_source = main_request.content
main_soup = BeautifulSoup(main_source, 'lxml')
limit_counter = 0
page_links_soup = main_soup.findAll(
'a', attrs={'href': re.compile("^/torrent/")})
for page_link in page_links_soup:
if limit_counter < limit:
page_link = "https://1377x.to" + page_link.get('href')
page_request = requests.get(
page_link, headers={'User-Agent': 'Mozilla/5.0'})
page_source = page_request.content
page_soup = BeautifulSoup(page_source, 'lxml')
title = (page_soup.find('h1').text).replace("\n", " ")
seeder = page_soup.find('span', class_="seeds").text
leecher = page_soup.find('span', class_="leeches").text
size = page_soup.findAll('span')[15].text
date = page_soup.findAll('span')[19].text
magnet = page_soup.find(
'a', attrs={'href': re.compile("^magnet:?")}).get('href')
row_position = self.tableTableWidget.rowCount()
self.tableTableWidget.insertRow(row_position)
self.tableTableWidget.setItem(
row_position, 0, QTableWidgetItem(title))
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(seeder))
self.tableTableWidget.setItem(row_position, 1, item)
self.tableTableWidget.setItem(
row_position, 2, QTableWidgetItem(leecher))
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(leecher))
self.tableTableWidget.setItem(row_position, 2, item)
self.tableTableWidget.setItem(
row_position, 3, QTableWidgetItem(size))
self.tableTableWidget.setItem(
row_position, 4, QTableWidgetItem(date))
self.tableTableWidget.setItem(
row_position, 5, QTableWidgetItem("1377x"))
self.magnets.append(magnet)
limit_counter = limit_counter + 1
except:
error_message()
def kat():
try:
main_link = "https://kat.rip/usearch/" + query
main_request = requests.get(
main_link, headers={'User-Agent': 'Mozilla/5.0'})
main_source = main_request.content
main_soup = BeautifulSoup(main_source, 'lxml')
titles_soup = main_soup.findAll('a', class_="cellMainLink")
seeders_soup = main_soup.findAll('td', class_="green center")
leechers_soup = main_soup.findAll(
'td', class_="red lasttd center")
sizes_soup = main_soup.findAll('td', class_="nobr center")
dates_soup = main_soup.findAll(
'td', class_="center", title=True)
magnets_soup = main_soup.findAll(
'a', attrs={'href': re.compile("^magnet:?"), 'title': "Torrent magnet link"})
titles = []
seeders = []
leechers = []
sizes = []
dates = []
limit_counter = 0
for title in titles_soup:
if limit_counter < limit:
titles.append(title.text)
limit_counter = limit_counter + 1
limit_counter = 0
for seeder in seeders_soup:
if limit_counter < limit:
seeders.append(seeder.text)
limit_counter = limit_counter + 1
limit_counter = 0
for leecher in leechers_soup:
if limit_counter < limit:
leechers.append(leecher.text)
limit_counter = limit_counter + 1
limit_counter = 0
for size in sizes_soup:
if limit_counter < limit:
sizes.append(size.text)
limit_counter = limit_counter + 1
limit_counter = 0
for date in dates_soup:
if limit_counter < limit:
dates.append(date.text)
limit_counter = limit_counter + 1
limit_counter = 0
count1 = 0
for magnet in magnets_soup:
if limit_counter < limit:
self.magnets.append(magnet.get('href'))
limit_counter = limit_counter + 1
count1 = count1 + 1
count2 = 0
while count2 < count1:
row_position = self.tableTableWidget.rowCount()
self.tableTableWidget.insertRow(row_position)
self.tableTableWidget.setItem(
row_position, 0, QTableWidgetItem(titles[count2]))
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(seeders[count2]))
self.tableTableWidget.setItem(row_position, 1, item)
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(leechers[count2]))
self.tableTableWidget.setItem(row_position, 2, item)
self.tableTableWidget.setItem(
row_position, 3, QTableWidgetItem(sizes[count2]))
self.tableTableWidget.setItem(
row_position, 4, QTableWidgetItem(dates[count2]))
self.tableTableWidget.setItem(
row_position, 5, QTableWidgetItem("KAT"))
count2 = count2 + 1
except:
error_message()
def nyaa():
try:
main_link = 'https://nyaa.si/?q=' + query
main_request = requests.get(
main_link, headers={'User-Agent': 'Mozilla/5.0'})
main_source = main_request.content
main_soup = BeautifulSoup(main_source, 'lxml')
titles_soup = main_soup.findAll('a', title=True, class_=False, attrs={
'href': re.compile("^/view/")})
seeders_soup = main_soup.findAll('td', class_="text-center")
leechers_soup = main_soup.findAll('td', class_="text-center")
sizes_soup = main_soup.findAll('td', class_="text-center")
dates_soup = main_soup.findAll('td', class_="text-center")
magnets_soup = main_soup.findAll(
'a', attrs={'href': re.compile("^magnet:?")})
titles = []
seeders = []
leechers = []
sizes = []
dates = []
limit_counter = 0
for title in titles_soup:
if limit_counter < limit:
titles.append(title.text)
limit_counter = limit_counter + 1
limit_counter = 0
for seeder in seeders_soup:
if limit_counter < limit*6:
seeders.append(seeder.text)
limit_counter = limit_counter + 1
limit_counter = 0
for leecher in leechers_soup:
if limit_counter < limit*6:
leechers.append(leecher.text)
limit_counter = limit_counter + 1
limit_counter = 0
for size in sizes_soup:
if limit_counter < limit*6:
sizes.append(size.text)
limit_counter = limit_counter + 1
limit_counter = 0
for date in dates_soup:
if limit_counter < limit*6:
dates.append(date.text)
limit_counter = limit_counter + 1
limit_counter = 0
count1 = 0
for magnet in magnets_soup:
if limit_counter < limit:
self.magnets.append(magnet.get('href'))
limit_counter = limit_counter + 1
count1 = count1 + 1
seeder1 = seeders[3]
seeders.pop(0)
seeders.pop(1)
seeders.pop(2)
seeders.pop(3)
seeders = seeders[6-1::6]
seeders.insert(0, seeder1)
leecher1 = leechers[4]
leechers.pop(0)
leechers.pop(1)
leechers.pop(2)
leechers.pop(3)
leechers.pop(4)
leechers = leechers[6-1::6]
leechers.insert(0, leecher1)
size1 = sizes[1]
sizes.pop(0)
sizes.pop(1)
sizes = sizes[6-1::6]
sizes.insert(0, size1)
date1 = dates[2]
dates.pop(0)
dates.pop(1)
dates.pop(2)
dates = dates[6-1::6]
dates.insert(0, date1)
count2 = 0
while count2 < count1:
row_position = self.tableTableWidget.rowCount()
self.tableTableWidget.insertRow(row_position)
self.tableTableWidget.setItem(
row_position, 0, QTableWidgetItem(titles[count2]))
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(seeders[count2]))
self.tableTableWidget.setItem(row_position, 1, item)
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(leechers[count2]))
self.tableTableWidget.setItem(row_position, 2, item)
self.tableTableWidget.setItem(
row_position, 3, QTableWidgetItem(sizes[count2]))
self.tableTableWidget.setItem(
row_position, 4, QTableWidgetItem(dates[count2]))
self.tableTableWidget.setItem(
row_position, 5, QTableWidgetItem("Nyaa"))
count2 = count2 + 1
except:
error_message()
def rarbg():
try:
token_url = "https://torrentapi.org/pubapi_v2.php?get_token=get_token&app_id=MagnetMagnet"
token_request = requests.get(token_url, headers={'User-Agent': 'Mozilla/5.0'})
token = token_request.json()["token"]
main_link = 'https://torrentapi.org/pubapi_v2.php?mode=search&search_string=' + \
query + '&token=' + token + '&format=json_extended&app_id=MagnetMagnet'
main_request = requests.get(
main_link, headers={'User-Agent': 'Mozilla/5.0'})
main_source = main_request.json()["torrent_results"]
limit_counter = 0
titles = []
seeders = []
leechers = []
sizes = []
dates = []
for item in main_source:
if limit_counter < limit:
def convert_size(size):
if size == 0:
return "0B"
size_name = ("B", "KB", "MB", "GB",
"TB", "PB", "EB", "ZB", "YB")
i = int(math.floor(math.log(size, 1024)))
p = math.pow(1024, i)
s = round(size / p, 2)
size = "%s %s" % (s, size_name[i])
return size
titles.append(item["title"])
seeders.append(item["seeders"])
leechers.append(item["leechers"])
sizes.append(convert_size(item["size"]))
dates.append(item["pubdate"])
self.magnets.append(item["download"])
limit_counter += 1
else:
pass
print(titles)
count2 = 0
while count2 < limit_counter:
row_position = self.tableTableWidget.rowCount()
self.tableTableWidget.insertRow(row_position)
self.tableTableWidget.setItem(
row_position, 0, QTableWidgetItem(titles[count2]))
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(seeders[count2]))
self.tableTableWidget.setItem(row_position, 1, item)
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(leechers[count2]))
self.tableTableWidget.setItem(row_position, 2, item)
self.tableTableWidget.setItem(
row_position, 3, QTableWidgetItem(sizes[count2]))
self.tableTableWidget.setItem(
row_position, 4, QTableWidgetItem(dates[count2]))
self.tableTableWidget.setItem(
row_position, 5, QTableWidgetItem("RARBG"))
count2 = count2 + 1
except:
error_message()
def tpb():
try:
main_link = 'https://tpb.party/search/' + query + '/1/99/0/'
main_request = requests.get(
main_link, headers={'User-Agent': 'Mozilla/5.0'})
main_source = main_request.content
main_soup = BeautifulSoup(main_source, 'lxml')
titles_soup = main_soup.findAll('div', class_="detName")
seeders_soup = main_soup.findAll(
'td', attrs={'align': "right"})
seeders_soup = seeders_soup[0::2]
leechers_soup = main_soup.findAll(
'td', attrs={'align': "right"})
leechers_soup = leechers_soup[1::2]
sizes_soup = main_soup.findAll('font', class_="detDesc")
dates_soup = main_soup.findAll('font', class_="detDesc")
magnets_soup = main_soup.findAll(
'a', attrs={'href': re.compile("^magnet")})
titles = []
seeders = []
leechers = []
sizes = []
dates = []
limit_counter = 0
for title in titles_soup:
if limit_counter < limit:
title = title.text.replace("\n", "")
titles.append(title)
limit_counter = limit_counter + 1
limit_counter = 0
for seeder in seeders_soup:
if limit_counter < limit:
seeders.append(seeder.text)
limit_counter = limit_counter + 1
limit_counter = 0
for leecher in leechers_soup:
if limit_counter < limit:
leechers.append(leecher.text)
limit_counter = limit_counter + 1
limit_counter = 0
for size in sizes_soup:
if limit_counter < limit:
size = size.text.split(", ")
size = size[1].replace("Size ", "")
sizes.append(size)
limit_counter = limit_counter + 1
limit_counter = 0
for date in dates_soup:
if limit_counter < limit:
date = date.text.split(", ")
date = date[0].replace("Uploaded ", "")
dates.append(date)
limit_counter = limit_counter + 1
count1 = 0
limit_counter = 0
for magnet in magnets_soup:
if limit_counter < limit:
self.magnets.append(magnet.get('href'))
count1 = count1 + 1
limit_counter = limit_counter + 1
count2 = 0
while count2 < count1:
row_position = self.tableTableWidget.rowCount()
self.tableTableWidget.insertRow(row_position)
self.tableTableWidget.setItem(
row_position, 0, QTableWidgetItem(titles[count2]))
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(seeders[count2]))
self.tableTableWidget.setItem(row_position, 1, item)
item = QTableWidgetItem()
item.setData(Qt.DisplayRole, int(leechers[count2]))
self.tableTableWidget.setItem(row_position, 2, item)
self.tableTableWidget.setItem(
row_position, 3, QTableWidgetItem(sizes[count2]))
self.tableTableWidget.setItem(
row_position, 4, QTableWidgetItem(dates[count2]))
self.tableTableWidget.setItem(
row_position, 5, QTableWidgetItem("TPB"))
count2 = count2 + 1
except:
error_message()
if (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
nyaa()
rarbg()
tpb()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
nyaa()
rarbg()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
nyaa()
tpb()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked() and self.rarbgCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
rarbg()
tpb()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
nyaa()
rarbg()
resize()
searched_success_message()
elif (self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
kat()
nyaa()
rarbg()
tpb()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
nyaa()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked() and self.rarbgCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
rarbg()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
tpb()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
nyaa()
rarbg()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
nyaa()
tpb()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.rarbgCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
rarbg()
tpb()
resize()
searched_success_message()
elif (self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
kat()
nyaa()
rarbg()
resize()
searched_success_message()
elif (self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
kat()
nyaa()
tpb()
resize()
searched_success_message()
elif (self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
nyaa()
rarbg()
tpb()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.katCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
kat()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.nyaaCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
nyaa()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.rarbgCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
rarbg()
resize()
searched_success_message()
elif (self.x1377CheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
tpb()
resize()
searched_success_message()
elif (self.katCheckBox.isChecked() and self.nyaaCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
kat()
nyaa()
resize()
searched_success_message()
elif (self.katCheckBox.isChecked() and self.rarbgCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
kat()
rarbg()
resize()
searched_success_message()
elif (self.katCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
kat()
tpb()
resize()
searched_success_message()
elif (self.nyaaCheckBox.isChecked() and self.rarbgCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
nyaa()
rarbg()
resize()
searched_success_message()
elif (self.nyaaCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
nyaa()
tpb()
resize()
searched_success_message()
elif (self.rarbgCheckBox.isChecked() and self.tpbCheckBox.isChecked()):
self.tableTableWidget.setRowCount(0)
self.magnets = []
rarbg()
tpb()
resize()
searched_success_message()
elif self.x1377CheckBox.isChecked():
self.tableTableWidget.setRowCount(0)
self.magnets = []
x1377()
resize()
searched_success_message()
elif self.katCheckBox.isChecked():
self.tableTableWidget.setRowCount(0)
self.magnets = []
kat()
resize()
searched_success_message()
elif self.nyaaCheckBox.isChecked():
self.tableTableWidget.setRowCount(0)
self.magnets = []
nyaa()
resize()
searched_success_message()
elif self.rarbgCheckBox.isChecked():
self.tableTableWidget.setRowCount(0)
self.magnets = []
rarbg()
resize()
searched_success_message()
elif self.tpbCheckBox.isChecked():
self.tableTableWidget.setRowCount(0)
self.magnets = []
tpb()
resize()
searched_success_message()
def setupUi(self, searchMainWindow):
searchMainWindow.setObjectName("searchMainWindow")
searchMainWindow.resize(1500, 400)
font = QFont()
font.setFamily("Bahnschrift Light")
font.setPointSize(11)
searchMainWindow.setFont(font)
icon = QIcon()
icon.addPixmap(QPixmap(src.mglobals.icon), QIcon.Normal, QIcon.Off)
searchMainWindow.setWindowIcon(icon)
self.centralwidget = QWidget(searchMainWindow)
self.centralwidget.setObjectName("centralwidget")
self.queryLineEdit = QLineEdit(self.centralwidget)
self.queryLineEdit.setGeometry(QRect(30, 20, 200, 20))
font = QFont()
font.setPointSize(9)
self.queryLineEdit.setFont(font)
self.queryLineEdit.setObjectName("queryLineEdit")
self.x1377CheckBox = QCheckBox(self.centralwidget)
self.x1377CheckBox.setGeometry(QRect(30, 70, 90, 20))
self.x1377CheckBox.setObjectName("x1377CheckBox")
self.tableTableWidget = QTableWidget(self.centralwidget)
self.tableTableWidget.setGeometry(QRect(260, 20, 1161, 360))
self.tableTableWidget.setObjectName("tableTableWidget")
self.tableTableWidget.setColumnCount(6)
self.tableTableWidget.setRowCount(0)
item = QTableWidgetItem()
self.tableTableWidget.setHorizontalHeaderItem(0, item)
item = QTableWidgetItem()
self.tableTableWidget.setHorizontalHeaderItem(1, item)
item = QTableWidgetItem()
self.tableTableWidget.setHorizontalHeaderItem(2, item)
item = QTableWidgetItem()
self.tableTableWidget.setHorizontalHeaderItem(3, item)
item = QTableWidgetItem()
self.tableTableWidget.setHorizontalHeaderItem(4, item)
item = QTableWidgetItem()
self.tableTableWidget.setHorizontalHeaderItem(5, item)
self.tableTableWidget.setSortingEnabled(True)
self.katCheckBox = QCheckBox(self.centralwidget)
self.katCheckBox.setGeometry(QRect(30, 110, 90, 20))
self.katCheckBox.setObjectName("katCheckBox")
self.nyaaCheckBox = QCheckBox(self.centralwidget)
self.nyaaCheckBox.setGeometry(QRect(30, 150, 90, 20))
self.nyaaCheckBox.setObjectName("nyaaCheckBox")
self.rarbgCheckBox = QCheckBox(self.centralwidget)
self.rarbgCheckBox.setGeometry(QRect(30, 190, 90, 20))
self.rarbgCheckBox.setObjectName("rarbgCheckBox")
self.tpbCheckBox = QCheckBox(self.centralwidget)
self.tpbCheckBox.setGeometry(QRect(30, 230, 90, 20))
self.tpbCheckBox.setObjectName("tpbCheckBox")
self.searchPushButton = QPushButton(self.centralwidget)
self.searchPushButton.setGeometry(QRect(30, 350, 90, 30))
font = QFont()
font.setPointSize(8)
self.searchPushButton.setFont(font)
self.searchPushButton.setObjectName("searchPushButton")
self.limitSlider = QSlider(self.centralwidget)
self.limitSlider.setGeometry(QRect(1450, 40, 22, 320))
self.limitSlider.setMaximum(20)
self.limitSlider.setPageStep(2)
self.limitSlider.setSliderPosition(10)
self.limitSlider.setOrientation(Qt.Vertical)
self.limitSlider.setObjectName("limitSlider")
self.minimumLabel = QLabel(self.centralwidget)
self.minimumLabel.setGeometry(QRect(1452, 365, 16, 16))
font = QFont()
font.setPointSize(9)
self.minimumLabel.setFont(font)
self.minimumLabel.setAlignment(Qt.AlignCenter)
self.minimumLabel.setObjectName("minimumLabel")
self.maximumLabel = QLabel(self.centralwidget)
self.maximumLabel.setGeometry(QRect(1452, 20, 16, 16))
font = QFont()
font.setPointSize(9)
self.maximumLabel.setFont(font)
self.maximumLabel.setAlignment(Qt.AlignCenter)
self.maximumLabel.setObjectName("maximumLabel")
searchMainWindow.setCentralWidget(self.centralwidget)
self.searchPushButton.clicked.connect(self.callback)
self.tableTableWidget.itemClicked.connect(self.copy)
self.retranslateUi(searchMainWindow)
QMetaObject.connectSlotsByName(searchMainWindow)
def retranslateUi(self, searchMainWindow):
_translate = QCoreApplication.translate
searchMainWindow.setWindowTitle(_translate(
"searchMainWindow", "MagnetMagnet - Search"))
self.x1377CheckBox.setText(_translate("searchMainWindow", "1377x"))
item = self.tableTableWidget.horizontalHeaderItem(0)
item.setText(_translate("searchMainWindow", "Titles"))
item = self.tableTableWidget.horizontalHeaderItem(1)
item.setText(_translate("searchMainWindow", "Seeders"))
item = self.tableTableWidget.horizontalHeaderItem(2)
item.setText(_translate("searchMainWindow", "Leechers"))
item = self.tableTableWidget.horizontalHeaderItem(3)
item.setText(_translate("searchMainWindow", "Sizes"))
item = self.tableTableWidget.horizontalHeaderItem(4)
item.setText(_translate("searchMainWindow", "Dates"))
item = self.tableTableWidget.horizontalHeaderItem(5)
item.setText(_translate("searchMainWindow", "Source"))
self.katCheckBox.setText(_translate("searchMainWindow", "KAT"))
self.nyaaCheckBox.setText(_translate("searchMainWindow", "Nyaa"))
self.rarbgCheckBox.setText(_translate("searchMainWindow", "RARBG"))
self.tpbCheckBox.setText(_translate("searchMainWindow", "TPB"))
self.searchPushButton.setText(_translate("searchMainWindow", "Search"))
self.minimumLabel.setText(_translate("searchMainWindow", "0"))
self.maximumLabel.setText(_translate("searchMainWindow", "20"))
| true | true |
f72c091ee41e3a1bdb0176021673692667130530 | 1,631 | py | Python | mcp/lib/Slack.py | vspeter/MCP | 1f8ee863d64a2c276c47f1f8c4803983bff88fb7 | [
"Apache-2.0"
] | null | null | null | mcp/lib/Slack.py | vspeter/MCP | 1f8ee863d64a2c276c47f1f8c4803983bff88fb7 | [
"Apache-2.0"
] | null | null | null | mcp/lib/Slack.py | vspeter/MCP | 1f8ee863d64a2c276c47f1f8c4803983bff88fb7 | [
"Apache-2.0"
] | 1 | 2021-02-24T16:51:11.000Z | 2021-02-24T16:51:11.000Z | import json
import logging
from urllib import request, parse
class Slack():
NOTSET = ':loudspeaker:'
DEBUG = ':speaker:'
INFO = ':information_source:'
WARNING = ':warning:'
ERROR = ':exclamation:'
CRITICAL = ':boom:'
SUCCESS = ':+1:'
DONE = ':checkered_flag:'
def __init__( self, proc, api_token, channel_name, site=None, proxy=None ):
self.api_token = api_token
self.channel_name = channel_name
self.user_name = 'mcp({0})-{1}'.format( site, proc ) if site else 'mcp-{0}'.format( proc )
self.slack_api_base_url = 'https://slack.com/api'
if proxy:
self.opener = request.build_opener( request.ProxyHandler( { 'http': proxy, 'https': proxy } ) )
else:
self.opener = request.build_opener( request.ProxyHandler( {} ) )
def post_message( self, message, emoji=NOTSET ):
if self.api_token is None:
return
data = {
'token': self.api_token,
'channel': self.channel_name,
'username': self.user_name,
'text': message,
'icon_emoji': emoji
}
url = '{0}/{1}'.format( self.slack_api_base_url, 'chat.postMessage' )
data = parse.urlencode( data )
try:
resp = self.opener.open( url, data=data.encode() )
except Exception as e:
logging.warning( 'Slack: Got Exception "{0}" when posting message'.format( e ) )
return
rc = resp.read()
resp.close()
try:
rc = json.loads( rc )
except TypeError:
logging.warning( 'Slack: Response not valid JSON.' )
return
if 'ok' not in rc:
logging.warning( 'Slack: Failed to post message {0}'.format( rc ) )
return
| 28.614035 | 101 | 0.619865 | import json
import logging
from urllib import request, parse
class Slack():
NOTSET = ':loudspeaker:'
DEBUG = ':speaker:'
INFO = ':information_source:'
WARNING = ':warning:'
ERROR = ':exclamation:'
CRITICAL = ':boom:'
SUCCESS = ':+1:'
DONE = ':checkered_flag:'
def __init__( self, proc, api_token, channel_name, site=None, proxy=None ):
self.api_token = api_token
self.channel_name = channel_name
self.user_name = 'mcp({0})-{1}'.format( site, proc ) if site else 'mcp-{0}'.format( proc )
self.slack_api_base_url = 'https://slack.com/api'
if proxy:
self.opener = request.build_opener( request.ProxyHandler( { 'http': proxy, 'https': proxy } ) )
else:
self.opener = request.build_opener( request.ProxyHandler( {} ) )
def post_message( self, message, emoji=NOTSET ):
if self.api_token is None:
return
data = {
'token': self.api_token,
'channel': self.channel_name,
'username': self.user_name,
'text': message,
'icon_emoji': emoji
}
url = '{0}/{1}'.format( self.slack_api_base_url, 'chat.postMessage' )
data = parse.urlencode( data )
try:
resp = self.opener.open( url, data=data.encode() )
except Exception as e:
logging.warning( 'Slack: Got Exception "{0}" when posting message'.format( e ) )
return
rc = resp.read()
resp.close()
try:
rc = json.loads( rc )
except TypeError:
logging.warning( 'Slack: Response not valid JSON.' )
return
if 'ok' not in rc:
logging.warning( 'Slack: Failed to post message {0}'.format( rc ) )
return
| true | true |
f72c09ac05b9052d87975715d4ccabff57f26a58 | 12,756 | py | Python | LaserRender.py | gsboylan/meerk40t | 7607b034368a428dfc5cab56629032d6074c756d | [
"MIT"
] | null | null | null | LaserRender.py | gsboylan/meerk40t | 7607b034368a428dfc5cab56629032d6074c756d | [
"MIT"
] | null | null | null | LaserRender.py | gsboylan/meerk40t | 7607b034368a428dfc5cab56629032d6074c756d | [
"MIT"
] | null | null | null | from math import floor
import wx
from PIL import Image
from ZMatrix import ZMatrix
from svgelements import *
"""
Laser Render provides GUI relevant methods of displaying the given project.
"""
DRAW_MODE_FILLS = 0x000001
DRAW_MODE_GUIDES = 0x000002
DRAW_MODE_GRID = 0x000004
DRAW_MODE_LASERPATH = 0x000008
DRAW_MODE_RETICLE = 0x000010
DRAW_MODE_SELECTION = 0x000020
DRAW_MODE_STROKES = 0x000040
DRAW_MODE_CACHE = 0x000080 # Set means do not cache.
DRAW_MODE_REFRESH = 0x000100
DRAW_MODE_ANIMATE = 0x000200
DRAW_MODE_PATH = 0x000400
DRAW_MODE_IMAGE = 0x000800
DRAW_MODE_TEXT = 0x001000
DRAW_MODE_BACKGROUND = 0x002000
DRAW_MODE_ICONS = 0x0040000
DRAW_MODE_TREE = 0x0080000
DRAW_MODE_INVERT = 0x400000
DRAW_MODE_FLIPXY = 0x800000
def swizzlecolor(c):
if c is None:
return None
if isinstance(c, int):
c = Color(c)
return c.blue << 16 | c.green << 8 | c.red
class LaserRender:
def __init__(self, device):
self.device = device
self.cache = None
self.pen = wx.Pen()
self.brush = wx.Brush()
self.color = wx.Colour()
def render(self, elements, gc, draw_mode=None, zoomscale=1):
"""
Render scene information.
:param gc:
:param draw_mode:
:return:
"""
if draw_mode is None:
draw_mode = self.device.draw_mode
if draw_mode & (DRAW_MODE_TEXT | DRAW_MODE_IMAGE | DRAW_MODE_PATH) != 0:
types = []
if draw_mode & DRAW_MODE_PATH == 0:
types.append(Path)
if draw_mode & DRAW_MODE_IMAGE == 0:
types.append(SVGImage)
if draw_mode & DRAW_MODE_TEXT == 0:
types.append(SVGText)
elements = [e for e in elements if type(e) in types]
for element in elements:
try:
element.draw(element, gc, draw_mode, zoomscale=zoomscale)
except AttributeError:
if isinstance(element, Path):
element.draw = self.draw_path
elif isinstance(element, SVGImage):
element.draw = self.draw_image
elif isinstance(element, SVGText):
element.draw = self.draw_text
elif isinstance(element, Group):
element.draw = self.draw_group
else:
continue
element.draw(element, gc, draw_mode, zoomscale=zoomscale)
def make_path(self, gc, path):
p = gc.CreatePath()
first_point = path.first_point
if first_point is not None:
p.MoveToPoint(first_point[0], first_point[1])
for e in path:
if isinstance(e, Move):
p.MoveToPoint(e.end[0], e.end[1])
elif isinstance(e, Line):
p.AddLineToPoint(e.end[0], e.end[1])
elif isinstance(e, Close):
p.CloseSubpath()
elif isinstance(e, QuadraticBezier):
p.AddQuadCurveToPoint(e.control[0], e.control[1],
e.end[0], e.end[1])
elif isinstance(e, CubicBezier):
p.AddCurveToPoint(e.control1[0], e.control1[1],
e.control2[0], e.control2[1],
e.end[0], e.end[1])
elif isinstance(e, Arc):
for curve in e.as_cubic_curves():
p.AddCurveToPoint(curve.control1[0], curve.control1[1],
curve.control2[0], curve.control2[1],
curve.end[0], curve.end[1])
return p
def set_pen(self, gc, stroke, width=1.0):
if width < 1.0:
width = 1.0
c = stroke
if c is not None and c != 'none':
swizzle_color = swizzlecolor(c)
self.color.SetRGBA(swizzle_color | c.alpha << 24) # wx has BBGGRR
self.pen.SetColour(self.color)
self.pen.SetWidth(width)
gc.SetPen(self.pen)
else:
gc.SetPen(wx.TRANSPARENT_PEN)
def set_brush(self, gc, fill):
c = fill
if c is not None and c != 'none':
swizzle_color = swizzlecolor(c)
self.color.SetRGBA(swizzle_color | c.alpha << 24) # wx has BBGGRR
self.brush.SetColour(self.color)
gc.SetBrush(self.brush)
else:
gc.SetBrush(wx.TRANSPARENT_BRUSH)
def set_element_pen(self, gc, element, zoomscale=1.0):
try:
sw = Length(element.stroke_width).value(ppi=96.0)
# if sw < 3.0:
# sw = 3.0
except AttributeError:
sw = 1.0
if sw is None:
sw = 1.0
limit = zoomscale**.5
if sw < limit:
sw = limit
self.set_pen(gc, element.stroke, width=sw)
def set_element_brush(self, gc, element):
self.set_brush(gc, element.fill)
def draw_path(self, element, gc, draw_mode, zoomscale=1.0):
"""Default draw routine for the laser path element."""
try:
matrix = element.transform
except AttributeError:
matrix = Matrix()
if not hasattr(element, 'cache') or element.cache is None:
cache = self.make_path(gc, element)
element.cache = cache
gc.PushState()
gc.ConcatTransform(wx.GraphicsContext.CreateMatrix(gc, ZMatrix(matrix)))
self.set_element_pen(gc, element, zoomscale=zoomscale)
self.set_element_brush(gc, element)
if draw_mode & DRAW_MODE_FILLS == 0 and element.fill is not None:
gc.FillPath(element.cache)
if draw_mode & DRAW_MODE_STROKES == 0 and element.stroke is not None:
gc.StrokePath(element.cache)
gc.PopState()
def draw_text(self, element, gc, draw_mode, zoomscale=1.0):
try:
matrix = element.transform
except AttributeError:
matrix = Matrix()
if hasattr(element, 'wxfont'):
font = element.wxfont
else:
if element.font_size < 1:
if element.font_size > 0:
element.transform.pre_scale(element.font_size,
element.font_size,
element.x,
element.y)
element.font_size = 1 # No zero sized fonts.
font = wx.Font(element.font_size, wx.SWISS, wx.NORMAL, wx.BOLD)
try:
f = []
if element.font_family is not None:
f.append(str(element.font_family))
if element.font_face is not None:
f.append(str(element.font_face))
if element.font_weight is not None:
f.append(str(element.font_weight))
f.append("%d" % element.font_size)
font.SetNativeFontInfoUserDesc(' '.join(f))
except:
pass
element.wxfont = font
gc.PushState()
gc.ConcatTransform(wx.GraphicsContext.CreateMatrix(gc, ZMatrix(matrix)))
self.set_element_pen(gc, element, zoomscale=zoomscale)
self.set_element_brush(gc, element)
if element.fill is None or element.fill == 'none':
gc.SetFont(font, wx.BLACK)
else:
gc.SetFont(font, wx.Colour(swizzlecolor(element.fill)))
text = element.text
x = element.x
y = element.y
if text is not None:
w, h = element.width, element.height
element.width, element.height = gc.GetTextExtent(element.text)
if w != element.width and h != element.height:
element.modified()
if not hasattr(element, 'anchor') or element.anchor == 'start':
y -= element.height
elif element.anchor == 'middle':
x -= (element.width / 2)
y -= element.height
elif element.anchor == 'end':
x -= element.width
y -= element.height
gc.DrawText(text, x, y)
gc.PopState()
def draw_image(self, node, gc, draw_mode, zoomscale=1.0):
try:
matrix = node.transform
except AttributeError:
matrix = Matrix()
gc.PushState()
gc.ConcatTransform(wx.GraphicsContext.CreateMatrix(gc, ZMatrix(matrix)))
if draw_mode & DRAW_MODE_CACHE == 0:
cache = None
try:
cache = node.cache
except AttributeError:
pass
if cache is None:
try:
max_allowed = node.max_allowed
except AttributeError:
max_allowed = 2048
node.c_width, node.c_height = node.image.size
node.cache = self.make_thumbnail(node.image, maximum=max_allowed)
gc.DrawBitmap(node.cache, 0, 0, node.c_width, node.c_height)
else:
node.c_width, node.c_height = node.image.size
cache = self.make_thumbnail(node.image)
gc.DrawBitmap(cache, 0, 0, node.c_width, node.c_height)
gc.PopState()
def draw_group(self, element, gc, draw_mode, zoomscale=1.0):
pass
def make_raster(self, elements, bounds, width=None, height=None, bitmap=False, step=1):
if bounds is None:
return None
xmin, ymin, xmax, ymax = bounds
xmax = ceil(xmax)
ymax = ceil(ymax)
xmin = floor(xmin)
ymin = floor(ymin)
image_width = int(xmax - xmin)
if image_width == 0:
image_width = 1
image_height = int(ymax - ymin)
if image_height == 0:
image_height = 1
if width is None:
width = image_width
if height is None:
height = image_height
width /= float(step)
height /= float(step)
width = int(width)
height = int(height)
bmp = wx.Bitmap(width, height, 32)
dc = wx.MemoryDC()
dc.SelectObject(bmp)
dc.SetBackground(wx.WHITE_BRUSH)
dc.Clear()
matrix = Matrix()
matrix.post_translate(-xmin, -ymin)
scale_x = width / float(image_width)
scale_y = height / float(image_height)
scale = min(scale_x, scale_y)
matrix.post_scale(scale)
gc = wx.GraphicsContext.Create(dc)
gc.SetInterpolationQuality(wx.INTERPOLATION_BEST)
gc.PushState()
gc.ConcatTransform(wx.GraphicsContext.CreateMatrix(gc, ZMatrix(matrix)))
gc.SetBrush(wx.WHITE_BRUSH)
gc.DrawRectangle(xmin - 1, ymin - 1, xmax + 1, ymax + 1)
if not isinstance(elements, (list, tuple)):
elements = [elements]
self.render(elements, gc, draw_mode=DRAW_MODE_CACHE)
gc.PopState()
img = bmp.ConvertToImage()
buf = img.GetData()
image = Image.frombuffer("RGB", tuple(bmp.GetSize()), bytes(buf), "raw", "RGB", 0, 1)
gc.Destroy()
del dc
if bitmap:
return bmp
return image
def make_thumbnail(self, pil_data, maximum=None, width=None, height=None):
"""Resizes the given pil image into wx.Bitmap object that fits the constraints."""
image_width, image_height = pil_data.size
if width is not None and height is None:
height = width * image_height / float(image_width)
if width is None and height is not None:
width = height * image_width / float(image_height)
if width is None and height is None:
width = image_width
height = image_height
if maximum is not None and (width > maximum or height > maximum):
scale_x = maximum / width
scale_y = maximum / height
scale = min(scale_x, scale_y)
width = int(round(width * scale))
height = int(round(height * scale))
if image_width != width or image_height != height:
pil_data = pil_data.copy().resize((width, height))
else:
pil_data = pil_data.copy()
if pil_data.mode != "RGBA":
pil_data = pil_data.convert('RGBA')
pil_bytes = pil_data.tobytes()
return wx.Bitmap.FromBufferRGBA(width, height, pil_bytes)
| 37.189504 | 94 | 0.541863 | from math import floor
import wx
from PIL import Image
from ZMatrix import ZMatrix
from svgelements import *
DRAW_MODE_FILLS = 0x000001
DRAW_MODE_GUIDES = 0x000002
DRAW_MODE_GRID = 0x000004
DRAW_MODE_LASERPATH = 0x000008
DRAW_MODE_RETICLE = 0x000010
DRAW_MODE_SELECTION = 0x000020
DRAW_MODE_STROKES = 0x000040
DRAW_MODE_CACHE = 0x000080
DRAW_MODE_REFRESH = 0x000100
DRAW_MODE_ANIMATE = 0x000200
DRAW_MODE_PATH = 0x000400
DRAW_MODE_IMAGE = 0x000800
DRAW_MODE_TEXT = 0x001000
DRAW_MODE_BACKGROUND = 0x002000
DRAW_MODE_ICONS = 0x0040000
DRAW_MODE_TREE = 0x0080000
DRAW_MODE_INVERT = 0x400000
DRAW_MODE_FLIPXY = 0x800000
def swizzlecolor(c):
if c is None:
return None
if isinstance(c, int):
c = Color(c)
return c.blue << 16 | c.green << 8 | c.red
class LaserRender:
def __init__(self, device):
self.device = device
self.cache = None
self.pen = wx.Pen()
self.brush = wx.Brush()
self.color = wx.Colour()
def render(self, elements, gc, draw_mode=None, zoomscale=1):
if draw_mode is None:
draw_mode = self.device.draw_mode
if draw_mode & (DRAW_MODE_TEXT | DRAW_MODE_IMAGE | DRAW_MODE_PATH) != 0:
types = []
if draw_mode & DRAW_MODE_PATH == 0:
types.append(Path)
if draw_mode & DRAW_MODE_IMAGE == 0:
types.append(SVGImage)
if draw_mode & DRAW_MODE_TEXT == 0:
types.append(SVGText)
elements = [e for e in elements if type(e) in types]
for element in elements:
try:
element.draw(element, gc, draw_mode, zoomscale=zoomscale)
except AttributeError:
if isinstance(element, Path):
element.draw = self.draw_path
elif isinstance(element, SVGImage):
element.draw = self.draw_image
elif isinstance(element, SVGText):
element.draw = self.draw_text
elif isinstance(element, Group):
element.draw = self.draw_group
else:
continue
element.draw(element, gc, draw_mode, zoomscale=zoomscale)
def make_path(self, gc, path):
p = gc.CreatePath()
first_point = path.first_point
if first_point is not None:
p.MoveToPoint(first_point[0], first_point[1])
for e in path:
if isinstance(e, Move):
p.MoveToPoint(e.end[0], e.end[1])
elif isinstance(e, Line):
p.AddLineToPoint(e.end[0], e.end[1])
elif isinstance(e, Close):
p.CloseSubpath()
elif isinstance(e, QuadraticBezier):
p.AddQuadCurveToPoint(e.control[0], e.control[1],
e.end[0], e.end[1])
elif isinstance(e, CubicBezier):
p.AddCurveToPoint(e.control1[0], e.control1[1],
e.control2[0], e.control2[1],
e.end[0], e.end[1])
elif isinstance(e, Arc):
for curve in e.as_cubic_curves():
p.AddCurveToPoint(curve.control1[0], curve.control1[1],
curve.control2[0], curve.control2[1],
curve.end[0], curve.end[1])
return p
def set_pen(self, gc, stroke, width=1.0):
if width < 1.0:
width = 1.0
c = stroke
if c is not None and c != 'none':
swizzle_color = swizzlecolor(c)
self.color.SetRGBA(swizzle_color | c.alpha << 24)
self.pen.SetColour(self.color)
self.pen.SetWidth(width)
gc.SetPen(self.pen)
else:
gc.SetPen(wx.TRANSPARENT_PEN)
def set_brush(self, gc, fill):
c = fill
if c is not None and c != 'none':
swizzle_color = swizzlecolor(c)
self.color.SetRGBA(swizzle_color | c.alpha << 24)
self.brush.SetColour(self.color)
gc.SetBrush(self.brush)
else:
gc.SetBrush(wx.TRANSPARENT_BRUSH)
def set_element_pen(self, gc, element, zoomscale=1.0):
try:
sw = Length(element.stroke_width).value(ppi=96.0)
except AttributeError:
sw = 1.0
if sw is None:
sw = 1.0
limit = zoomscale**.5
if sw < limit:
sw = limit
self.set_pen(gc, element.stroke, width=sw)
def set_element_brush(self, gc, element):
self.set_brush(gc, element.fill)
def draw_path(self, element, gc, draw_mode, zoomscale=1.0):
try:
matrix = element.transform
except AttributeError:
matrix = Matrix()
if not hasattr(element, 'cache') or element.cache is None:
cache = self.make_path(gc, element)
element.cache = cache
gc.PushState()
gc.ConcatTransform(wx.GraphicsContext.CreateMatrix(gc, ZMatrix(matrix)))
self.set_element_pen(gc, element, zoomscale=zoomscale)
self.set_element_brush(gc, element)
if draw_mode & DRAW_MODE_FILLS == 0 and element.fill is not None:
gc.FillPath(element.cache)
if draw_mode & DRAW_MODE_STROKES == 0 and element.stroke is not None:
gc.StrokePath(element.cache)
gc.PopState()
def draw_text(self, element, gc, draw_mode, zoomscale=1.0):
try:
matrix = element.transform
except AttributeError:
matrix = Matrix()
if hasattr(element, 'wxfont'):
font = element.wxfont
else:
if element.font_size < 1:
if element.font_size > 0:
element.transform.pre_scale(element.font_size,
element.font_size,
element.x,
element.y)
element.font_size = 1
font = wx.Font(element.font_size, wx.SWISS, wx.NORMAL, wx.BOLD)
try:
f = []
if element.font_family is not None:
f.append(str(element.font_family))
if element.font_face is not None:
f.append(str(element.font_face))
if element.font_weight is not None:
f.append(str(element.font_weight))
f.append("%d" % element.font_size)
font.SetNativeFontInfoUserDesc(' '.join(f))
except:
pass
element.wxfont = font
gc.PushState()
gc.ConcatTransform(wx.GraphicsContext.CreateMatrix(gc, ZMatrix(matrix)))
self.set_element_pen(gc, element, zoomscale=zoomscale)
self.set_element_brush(gc, element)
if element.fill is None or element.fill == 'none':
gc.SetFont(font, wx.BLACK)
else:
gc.SetFont(font, wx.Colour(swizzlecolor(element.fill)))
text = element.text
x = element.x
y = element.y
if text is not None:
w, h = element.width, element.height
element.width, element.height = gc.GetTextExtent(element.text)
if w != element.width and h != element.height:
element.modified()
if not hasattr(element, 'anchor') or element.anchor == 'start':
y -= element.height
elif element.anchor == 'middle':
x -= (element.width / 2)
y -= element.height
elif element.anchor == 'end':
x -= element.width
y -= element.height
gc.DrawText(text, x, y)
gc.PopState()
def draw_image(self, node, gc, draw_mode, zoomscale=1.0):
try:
matrix = node.transform
except AttributeError:
matrix = Matrix()
gc.PushState()
gc.ConcatTransform(wx.GraphicsContext.CreateMatrix(gc, ZMatrix(matrix)))
if draw_mode & DRAW_MODE_CACHE == 0:
cache = None
try:
cache = node.cache
except AttributeError:
pass
if cache is None:
try:
max_allowed = node.max_allowed
except AttributeError:
max_allowed = 2048
node.c_width, node.c_height = node.image.size
node.cache = self.make_thumbnail(node.image, maximum=max_allowed)
gc.DrawBitmap(node.cache, 0, 0, node.c_width, node.c_height)
else:
node.c_width, node.c_height = node.image.size
cache = self.make_thumbnail(node.image)
gc.DrawBitmap(cache, 0, 0, node.c_width, node.c_height)
gc.PopState()
def draw_group(self, element, gc, draw_mode, zoomscale=1.0):
pass
def make_raster(self, elements, bounds, width=None, height=None, bitmap=False, step=1):
if bounds is None:
return None
xmin, ymin, xmax, ymax = bounds
xmax = ceil(xmax)
ymax = ceil(ymax)
xmin = floor(xmin)
ymin = floor(ymin)
image_width = int(xmax - xmin)
if image_width == 0:
image_width = 1
image_height = int(ymax - ymin)
if image_height == 0:
image_height = 1
if width is None:
width = image_width
if height is None:
height = image_height
width /= float(step)
height /= float(step)
width = int(width)
height = int(height)
bmp = wx.Bitmap(width, height, 32)
dc = wx.MemoryDC()
dc.SelectObject(bmp)
dc.SetBackground(wx.WHITE_BRUSH)
dc.Clear()
matrix = Matrix()
matrix.post_translate(-xmin, -ymin)
scale_x = width / float(image_width)
scale_y = height / float(image_height)
scale = min(scale_x, scale_y)
matrix.post_scale(scale)
gc = wx.GraphicsContext.Create(dc)
gc.SetInterpolationQuality(wx.INTERPOLATION_BEST)
gc.PushState()
gc.ConcatTransform(wx.GraphicsContext.CreateMatrix(gc, ZMatrix(matrix)))
gc.SetBrush(wx.WHITE_BRUSH)
gc.DrawRectangle(xmin - 1, ymin - 1, xmax + 1, ymax + 1)
if not isinstance(elements, (list, tuple)):
elements = [elements]
self.render(elements, gc, draw_mode=DRAW_MODE_CACHE)
gc.PopState()
img = bmp.ConvertToImage()
buf = img.GetData()
image = Image.frombuffer("RGB", tuple(bmp.GetSize()), bytes(buf), "raw", "RGB", 0, 1)
gc.Destroy()
del dc
if bitmap:
return bmp
return image
def make_thumbnail(self, pil_data, maximum=None, width=None, height=None):
image_width, image_height = pil_data.size
if width is not None and height is None:
height = width * image_height / float(image_width)
if width is None and height is not None:
width = height * image_width / float(image_height)
if width is None and height is None:
width = image_width
height = image_height
if maximum is not None and (width > maximum or height > maximum):
scale_x = maximum / width
scale_y = maximum / height
scale = min(scale_x, scale_y)
width = int(round(width * scale))
height = int(round(height * scale))
if image_width != width or image_height != height:
pil_data = pil_data.copy().resize((width, height))
else:
pil_data = pil_data.copy()
if pil_data.mode != "RGBA":
pil_data = pil_data.convert('RGBA')
pil_bytes = pil_data.tobytes()
return wx.Bitmap.FromBufferRGBA(width, height, pil_bytes)
| true | true |
f72c09ef26d7bd7db7719016288f760059b46e7e | 561 | py | Python | starry_night.py | weijun-github/some-python-codes | db3d4b4ceb8b7c8ce0bd4b61da6227cd9e994718 | [
"MIT"
] | null | null | null | starry_night.py | weijun-github/some-python-codes | db3d4b4ceb8b7c8ce0bd4b61da6227cd9e994718 | [
"MIT"
] | null | null | null | starry_night.py | weijun-github/some-python-codes | db3d4b4ceb8b7c8ce0bd4b61da6227cd9e994718 | [
"MIT"
] | null | null | null | from turtle import *
from random import randint, random
def draw_star(points,size,col,x,y):
penup()
goto(x,y)
pendown()
angle = 180 - (180 / points)
color(col)
begin_fill()
for i in range(points):
forward(size)
right(angle)
end_fill()
# main code
Screen().bgcolor("dark blue")
speed(0)
while True:
points = randint(2,5) * 2 + 1
size = randint(10,50)
col = (random(),random(),random())
x = randint(-350,300)
y = randint(-250,250)
draw_star(points,size,col,x,y) | 20.777778 | 39 | 0.57041 | from turtle import *
from random import randint, random
def draw_star(points,size,col,x,y):
penup()
goto(x,y)
pendown()
angle = 180 - (180 / points)
color(col)
begin_fill()
for i in range(points):
forward(size)
right(angle)
end_fill()
Screen().bgcolor("dark blue")
speed(0)
while True:
points = randint(2,5) * 2 + 1
size = randint(10,50)
col = (random(),random(),random())
x = randint(-350,300)
y = randint(-250,250)
draw_star(points,size,col,x,y) | true | true |
f72c09f7de9a8713a8c125758c01f1bfe2118188 | 13,359 | py | Python | fanficfare/adapters/adapter_bloodtiesfancom.py | Kolbo5/FanFicFare | cf2ae9b12631bfeeb9198ca686f9e58d4579aeb3 | [
"Apache-2.0"
] | null | null | null | fanficfare/adapters/adapter_bloodtiesfancom.py | Kolbo5/FanFicFare | cf2ae9b12631bfeeb9198ca686f9e58d4579aeb3 | [
"Apache-2.0"
] | null | null | null | fanficfare/adapters/adapter_bloodtiesfancom.py | Kolbo5/FanFicFare | cf2ae9b12631bfeeb9198ca686f9e58d4579aeb3 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2011 Fanficdownloader team, 2018 FanFicFare team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Software: eFiction
from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
from bs4.element import Tag
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
# py2 vs py3 transition
from ..six import text_type as unicode
from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
# By virtue of being recent and requiring both is_adult and user/pass,
# adapter_fanficcastletvnet.py is the best choice for learning to
# write adapters--especially for sites that use the eFiction system.
# Most sites that have ".../viewstory.php?sid=123" in the story URL
# are eFiction.
# For non-eFiction sites, it can be considerably more complex, but
# this is still a good starting point.
# In general an 'adapter' needs to do these five things:
# - 'Register' correctly with the downloader
# - Site Login (if needed)
# - 'Are you adult?' check (if needed--some do one, some the other, some both)
# - Grab the chapter list
# - Grab the story meta-data (some (non-eFiction) adapters have to get it from the author page)
# - Grab the chapter texts
# Search for XXX comments--that's where things are most likely to need changing.
# This function is called by the downloader in all adapter_*.py files
# in this dir to register the adapter class. So it needs to be
# updated to reflect the class below it. That, plus getSiteDomain()
# take care of 'Registering'.
def getClass():
return BloodTiesFansComAdapter # XXX
# Class name has to be unique. Our convention is camel case the
# sitename with Adapter at the end. www is skipped.
class BloodTiesFansComAdapter(BaseSiteAdapter): # XXX
def __init__(self, config, url):
BaseSiteAdapter.__init__(self, config, url)
self.is_adult=False
# get storyId from url--url validation guarantees query is only sid=1234
self.story.setMetadata('storyId',self.parsedUrl.query.split('=',)[1])
# normalized story URL.
# XXX Most sites don't have the /fanfic part. Replace all to remove it usually.
self._setURL('http://' + self.getSiteDomain() + '/fiction/viewstory.php?sid='+self.story.getMetadata('storyId'))
# Each adapter needs to have a unique site abbreviation.
self.story.setMetadata('siteabbrev','btf') # XXX
# The date format will vary from site to site.
# http://docs.python.org/library/datetime.html#strftime-strptime-behavior
self.dateformat = "%d %b %Y" # XXX
@staticmethod # must be @staticmethod, don't remove it.
def getSiteDomain():
# The site domain. Does have www here, if it uses it.
return 'bloodties-fans.com' # XXX
@classmethod
def getSiteExampleURLs(cls):
return "http://"+cls.getSiteDomain()+"/fiction/viewstory.php?sid=1234"
def getSiteURLPattern(self):
return re.escape("http://"+self.getSiteDomain()+"/fiction/viewstory.php?sid=")+r"\d+$"
## Login seems to be reasonably standard across eFiction sites.
def needToLoginCheck(self, data):
if 'Registered Users Only' in data \
or 'There is no such account on our website' in data \
or "That password doesn't match the one in our database" in data:
return True
else:
return False
def performLogin(self, url):
params = {}
if self.password:
params['penname'] = self.username
params['password'] = self.password
else:
params['penname'] = self.getConfig("username")
params['password'] = self.getConfig("password")
params['cookiecheck'] = '1'
params['submit'] = 'Submit'
loginUrl = 'http://' + self.getSiteDomain() + '/fiction/user.php?action=login'
logger.debug("Will now login to URL (%s) as (%s)" % (loginUrl,
params['penname']))
d = self._fetchUrl(loginUrl, params)
if "Member Account" not in d : #Member Account
logger.info("Failed to login to URL %s as %s" % (loginUrl,
params['penname']))
raise exceptions.FailedToLogin(url,params['penname'])
return False
else:
return True
## Getting the chapter list and the meta data, plus 'is adult' checking.
def extractChapterUrlsAndMetadata(self):
if self.is_adult or self.getConfig("is_adult"):
# Weirdly, different sites use different warning numbers.
# If the title search below fails, there's a good chance
# you need a different number. print data at that point
# and see what the 'click here to continue' url says.
# Furthermore, there's a couple sites now with more than
# one warning level for different ratings. And they're
# fussy about it. midnightwhispers has three: 4, 2 & 1.
# we'll try 1 first.
addurl = "&ageconsent=ok&warning=4" # XXX
else:
addurl=""
# index=1 makes sure we see the story chapter index. Some
# sites skip that for one-chapter stories.
url = self.url+'&index=1'+addurl
logger.debug("URL: "+url)
try:
data = self._fetchUrl(url)
except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
raise e
# The actual text that is used to announce you need to be an
# adult varies from site to site. Again, print data before
# the title search to troubleshoot.
# Since the warning text can change by warning level, let's
# look for the warning pass url. nfacommunity uses
# &warning= -- actually, so do other sites. Must be an
# eFiction book.
# viewstory.php?sid=561&warning=4
# viewstory.php?sid=561&warning=1
# viewstory.php?sid=561&warning=2
#print data
#m = re.search(r"'viewstory.php\?sid=1882(&warning=4)'",data)
m = re.search(r"'viewstory.php\?sid=\d+((?:&ageconsent=ok)?&warning=\d+)'",data)
if m != None:
if self.is_adult or self.getConfig("is_adult"):
# We tried the default and still got a warning, so
# let's pull the warning number from the 'continue'
# link and reload data.
addurl = m.group(1)
# correct stupid & error in url.
addurl = addurl.replace("&","&")
url = self.url+'&index=1'+addurl
logger.debug("URL 2nd try: "+url)
try:
data = self._fetchUrl(url)
except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
raise e
else:
raise exceptions.AdultCheckRequired(self.url)
if "Access denied. This story has not been validated by the adminstrators of this site." in data:
raise exceptions.AccessDenied(self.getSiteDomain() +" says: Access denied. This story has not been validated by the adminstrators of this site.")
# use BeautifulSoup HTML parser to make everything easier to find.
soup = self.make_soup(data)
# print data
# Now go hunting for all the meta data and the chapter list.
## Title
a = soup.find('a', href=re.compile(r'viewstory.php\?sid='+self.story.getMetadata('storyId')+"$"))
self.story.setMetadata('title',stripHTML(a))
# Find authorid and URL from... author url.
a = soup.find('a', href=re.compile(r"viewuser.php\?uid=\d+"))
self.story.setMetadata('authorId',a['href'].split('=')[1])
self.story.setMetadata('authorUrl','http://'+self.host+'/fiction/'+a['href'])
self.story.setMetadata('author',a.string)
# Find the chapters:
for chapter in soup.findAll('a', href=re.compile(r'viewstory.php\?sid='+self.story.getMetadata('storyId')+r"&chapter=\d+$")):
# just in case there's tags, like <i> in chapter titles.
self.add_chapter(chapter,'http://'+self.host+'/fiction/'+chapter['href']+addurl)
# eFiction sites don't help us out a lot with their meta data
# formating, so it's a little ugly.
# utility method
def defaultGetattr(d,k):
try:
return d[k]
except:
return ""
listbox = soup.find('div',{'class':'listbox'})
# <strong>Rating:</strong> M<br /> etc
labels = listbox.findAll('strong')
for labelspan in labels:
value = labelspan.nextSibling
label = labelspan.string
if 'Summary' in label:
## Everything until the next strong tag.
svalue = ""
while not isinstance(value,Tag) or value.name != 'strong':
svalue += unicode(value)
value = value.nextSibling
self.setDescription(url,svalue)
#self.story.setMetadata('description',stripHTML(svalue))
if 'Rating' in label:
self.story.setMetadata('rating', value)
if 'Words' in label:
value=re.sub(r"\|",r"",value)
self.story.setMetadata('numWords', value)
if 'Categories' in label:
cats = labelspan.parent.findAll('a',href=re.compile(r'browse.php\?type=categories'))
catstext = [cat.string for cat in cats]
for cat in catstext:
self.story.addToList('category',cat.string)
if 'Characters' in label:
chars = labelspan.parent.findAll('a',href=re.compile(r'browse.php\?type=characters'))
charstext = [char.string for char in chars]
for char in charstext:
self.story.addToList('characters',char.string)
if 'Completed' in label:
if 'Yes' in value:
self.story.setMetadata('status', 'Completed')
else:
self.story.setMetadata('status', 'In-Progress')
if 'Published' in label:
value=re.sub(r"\|",r"",value)
self.story.setMetadata('datePublished', makeDate(stripHTML(value), self.dateformat))
if 'Updated' in label:
value=re.sub(r"\|",r"",value)
self.story.setMetadata('dateUpdated', makeDate(stripHTML(value), self.dateformat))
# moved outside because they changed *most*, but not *all* labels to <strong>
ships = listbox.findAll('a',href=re.compile(r'browse.php.type=class&(amp;)?type_id=2')) # crappy html: & vs & in url.
shipstext = [ship.string for ship in ships]
for ship in shipstext:
self.story.addToList('ships',ship.string)
genres = listbox.findAll('a',href=re.compile(r'browse.php\?type=class&(amp;)?type_id=1')) # crappy html: & vs & in url.
genrestext = [genre.string for genre in genres]
for genre in genrestext:
self.story.addToList('genre',genre.string)
try:
# Find Series name from series URL.
a = soup.find('a', href=re.compile(r"viewseries.php\?seriesid=\d+"))
series_name = a.string
series_url = 'http://'+self.host+'/fiction/'+a['href']
# use BeautifulSoup HTML parser to make everything easier to find.
seriessoup = self.make_soup(self._fetchUrl(series_url))
storyas = seriessoup.findAll('a', href=re.compile(r'^viewstory.php\?sid=\d+$'))
i=1
for a in storyas:
if a['href'] == ('viewstory.php?sid='+self.story.getMetadata('storyId')):
self.setSeries(series_name, i)
self.story.setMetadata('seriesUrl',series_url)
break
i+=1
except:
# I find it hard to care if the series parsing fails
pass
# grab the text for an individual chapter.
def getChapterText(self, url):
logger.debug('Getting chapter text from: %s' % url)
soup = self.make_soup(self._fetchUrl(url))
div = soup.find('div', {'id' : 'story'})
if None == div:
raise exceptions.FailedToDownload("Error downloading Chapter: %s! Missing required element!" % url)
return self.utf8FromSoup(url,div)
| 40.853211 | 157 | 0.601991 |
from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import re
from bs4.element import Tag
from ..htmlcleanup import stripHTML
from .. import exceptions as exceptions
from ..six import text_type as unicode
from ..six.moves.urllib.error import HTTPError
from .base_adapter import BaseSiteAdapter, makeDate
# This function is called by the downloader in all adapter_*.py files
# in this dir to register the adapter class. So it needs to be
# updated to reflect the class below it. That, plus getSiteDomain()
# take care of 'Registering'.
def getClass():
return BloodTiesFansComAdapter # XXX
# Class name has to be unique. Our convention is camel case the
# sitename with Adapter at the end. www is skipped.
class BloodTiesFansComAdapter(BaseSiteAdapter): # XXX
def __init__(self, config, url):
BaseSiteAdapter.__init__(self, config, url)
self.is_adult=False
# get storyId from url--url validation guarantees query is only sid=1234
self.story.setMetadata('storyId',self.parsedUrl.query.split('=',)[1])
# normalized story URL.
# XXX Most sites don't have the /fanfic part. Replace all to remove it usually.
self._setURL('http://' + self.getSiteDomain() + '/fiction/viewstory.php?sid='+self.story.getMetadata('storyId'))
self.story.setMetadata('siteabbrev','btf')
"%d %b %Y"
@staticmethod
def getSiteDomain():
# The site domain. Does have www here, if it uses it.
return 'bloodties-fans.com' # XXX
@classmethod
def getSiteExampleURLs(cls):
return "http://"+cls.getSiteDomain()+"/fiction/viewstory.php?sid=1234"
def getSiteURLPattern(self):
return re.escape("http://"+self.getSiteDomain()+"/fiction/viewstory.php?sid=")+r"\d+$"
## Login seems to be reasonably standard across eFiction sites.
def needToLoginCheck(self, data):
if 'Registered Users Only' in data \
or 'There is no such account on our website' in data \
or "That password doesn't match the one in our database" in data:
return True
else:
return False
def performLogin(self, url):
params = {}
if self.password:
params['penname'] = self.username
params['password'] = self.password
else:
params['penname'] = self.getConfig("username")
params['password'] = self.getConfig("password")
params['cookiecheck'] = '1'
params['submit'] = 'Submit'
loginUrl = 'http://' + self.getSiteDomain() + '/fiction/user.php?action=login'
logger.debug("Will now login to URL (%s) as (%s)" % (loginUrl,
params['penname']))
d = self._fetchUrl(loginUrl, params)
if "Member Account" not in d :
logger.info("Failed to login to URL %s as %s" % (loginUrl,
params['penname']))
raise exceptions.FailedToLogin(url,params['penname'])
return False
else:
return True
or self.getConfig("is_adult"):
# you need a different number. print data at that point
# and see what the 'click here to continue' url says.
# Furthermore, there's a couple sites now with more than
# fussy about it. midnightwhispers has three: 4, 2 & 1.
# we'll try 1 first.
addurl = "&ageconsent=ok&warning=4"
else:
addurl=""
url = self.url+'&index=1'+addurl
logger.debug("URL: "+url)
try:
data = self._fetchUrl(url)
except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
raise e
# look for the warning pass url. nfacommunity uses
# &warning= -- actually, so do other sites. Must be an
# eFiction book.
# viewstory.php?sid=561&warning=4
# viewstory.php?sid=561&warning=1
# viewstory.php?sid=561&warning=2
#print data
#m = re.search(r"'viewstory.php\?sid=1882(&warning=4)'",data)
m = re.search(r"'viewstory.php\?sid=\d+((?:&ageconsent=ok)?&warning=\d+)'",data)
if m != None:
if self.is_adult or self.getConfig("is_adult"):
# We tried the default and still got a warning, so
# let's pull the warning number from the 'continue'
addurl = m.group(1)
addurl = addurl.replace("&","&")
url = self.url+'&index=1'+addurl
logger.debug("URL 2nd try: "+url)
try:
data = self._fetchUrl(url)
except HTTPError as e:
if e.code == 404:
raise exceptions.StoryDoesNotExist(self.url)
else:
raise e
else:
raise exceptions.AdultCheckRequired(self.url)
if "Access denied. This story has not been validated by the adminstrators of this site." in data:
raise exceptions.AccessDenied(self.getSiteDomain() +" says: Access denied. This story has not been validated by the adminstrators of this site.")
soup = self.make_soup(data)
a = soup.find('a', href=re.compile(r'viewstory.php\?sid='+self.story.getMetadata('storyId')+"$"))
self.story.setMetadata('title',stripHTML(a))
a = soup.find('a', href=re.compile(r"viewuser.php\?uid=\d+"))
self.story.setMetadata('authorId',a['href'].split('=')[1])
self.story.setMetadata('authorUrl','http://'+self.host+'/fiction/'+a['href'])
self.story.setMetadata('author',a.string)
for chapter in soup.findAll('a', href=re.compile(r'viewstory.php\?sid='+self.story.getMetadata('storyId')+r"&chapter=\d+$")):
self.add_chapter(chapter,'http://'+self.host+'/fiction/'+chapter['href']+addurl)
# eFiction sites don't help us out a lot with their meta data
# utility method
def defaultGetattr(d,k):
try:
return d[k]
except:
return ""
listbox = soup.find('div',{'class':'listbox'})
# <strong>Rating:</strong> M<br /> etc
labels = listbox.findAll('strong')
for labelspan in labels:
value = labelspan.nextSibling
label = labelspan.string
if 'Summary' in label:
## Everything until the next strong tag.
svalue = ""
while not isinstance(value,Tag) or value.name != 'strong':
svalue += unicode(value)
value = value.nextSibling
self.setDescription(url,svalue)
#self.story.setMetadata('description',stripHTML(svalue))
if 'Rating' in label:
self.story.setMetadata('rating', value)
if 'Words' in label:
value=re.sub(r"\|",r"",value)
self.story.setMetadata('numWords', value)
if 'Categories' in label:
cats = labelspan.parent.findAll('a',href=re.compile(r'browse.php\?type=categories'))
catstext = [cat.string for cat in cats]
for cat in catstext:
self.story.addToList('category',cat.string)
if 'Characters' in label:
chars = labelspan.parent.findAll('a',href=re.compile(r'browse.php\?type=characters'))
charstext = [char.string for char in chars]
for char in charstext:
self.story.addToList('characters',char.string)
if 'Completed' in label:
if 'Yes' in value:
self.story.setMetadata('status', 'Completed')
else:
self.story.setMetadata('status', 'In-Progress')
if 'Published' in label:
value=re.sub(r"\|",r"",value)
self.story.setMetadata('datePublished', makeDate(stripHTML(value), self.dateformat))
if 'Updated' in label:
value=re.sub(r"\|",r"",value)
self.story.setMetadata('dateUpdated', makeDate(stripHTML(value), self.dateformat))
# moved outside because they changed *most*, but not *all* labels to <strong>
ships = listbox.findAll('a',href=re.compile(r'browse.php.type=class&(amp;)?type_id=2')) # crappy html: & vs & in url.
shipstext = [ship.string for ship in ships]
for ship in shipstext:
self.story.addToList('ships',ship.string)
genres = listbox.findAll('a',href=re.compile(r'browse.php\?type=class&(amp;)?type_id=1')) # crappy html: & vs & in url.
genrestext = [genre.string for genre in genres]
for genre in genrestext:
self.story.addToList('genre',genre.string)
try:
# Find Series name from series URL.
a = soup.find('a', href=re.compile(r"viewseries.php\?seriesid=\d+"))
series_name = a.string
series_url = 'http://'+self.host+'/fiction/'+a['href']
# use BeautifulSoup HTML parser to make everything easier to find.
seriessoup = self.make_soup(self._fetchUrl(series_url))
storyas = seriessoup.findAll('a', href=re.compile(r'^viewstory.php\?sid=\d+$'))
i=1
for a in storyas:
if a['href'] == ('viewstory.php?sid='+self.story.getMetadata('storyId')):
self.setSeries(series_name, i)
self.story.setMetadata('seriesUrl',series_url)
break
i+=1
except:
# I find it hard to care if the series parsing fails
pass
# grab the text for an individual chapter.
def getChapterText(self, url):
logger.debug('Getting chapter text from: %s' % url)
soup = self.make_soup(self._fetchUrl(url))
div = soup.find('div', {'id' : 'story'})
if None == div:
raise exceptions.FailedToDownload("Error downloading Chapter: %s! Missing required element!" % url)
return self.utf8FromSoup(url,div)
| true | true |
f72c0a308bec3bcb670b9a62046fd98d6431cf98 | 109 | py | Python | callbacks/__init__.py | ivancreator/sgotgbot | fea3a8234610d1dee473688959167f430d43ad75 | [
"MIT"
] | null | null | null | callbacks/__init__.py | ivancreator/sgotgbot | fea3a8234610d1dee473688959167f430d43ad75 | [
"MIT"
] | null | null | null | callbacks/__init__.py | ivancreator/sgotgbot | fea3a8234610d1dee473688959167f430d43ad75 | [
"MIT"
] | null | null | null | from aiogram.utils.callback_data import CallbackData
cb_account = CallbackData('account', 'action', 'value') | 36.333333 | 55 | 0.798165 | from aiogram.utils.callback_data import CallbackData
cb_account = CallbackData('account', 'action', 'value') | true | true |
f72c0a6ac92672124a848a9846ceb3739ebe1129 | 6,194 | py | Python | module/LP.py | banboooo044/optimization | a15614b367712d6046311eac311214d27999fc7c | [
"MIT"
] | null | null | null | module/LP.py | banboooo044/optimization | a15614b367712d6046311eac311214d27999fc7c | [
"MIT"
] | null | null | null | module/LP.py | banboooo044/optimization | a15614b367712d6046311eac311214d27999fc7c | [
"MIT"
] | null | null | null | # date : 2/11/2019
# author : takeshi
import pandas as pd
import numpy as np
from IPython.display import display
def linprog(c,A,comp,b,maximize=True):
'''
Maximize(or Minimize) a linear objective function subject to linear equality and inequality constraints.
Linear Programming is intended to solve the following problem form:
Maximize: c * x
Subject to: A * x [comp] b , (x >= 0)
Parameters
----------
c : array_like
Coefficients of the linear objective function to be maximized.
A : array_like
2-D array which, when matrix-multiplied by x,
gives the values of constraints at x.
comp : array_like
1-D array of values representing a sign of equality in each constraint (row).
if value is -1 , it means (<=)
if value is 0 , it means (=)
if value is 1 , it means (=>)
b : array_like
1-D array of values representing the RHS of each constraint (row).
maximize : bool, optional
If True, the linear objective function is to be maximized.
If False, the linear objective function is to be minimized.
(the default is True)
Returns
-------
pandas.DataFrame
final simplex table.
Optimal solution is table['Values'] , and Optimal value is table['z','Values'].
if x is (1 * n) matrix , x_i ( i >= n ) is Slack Variable.
'''
# optimize
def optimize(table,target):
if not __debug__:
if target == 'w':
print("Phase 1 : find initial solution")
else:
if maximize:
print("Phase 2 : Maximize the liner objective function")
else:
print("Phase 2 : Minimize the liner objective function")
baseIndex = table.index.values
nonBaseIndex = np.setdiff1d(np.vectorize(lambda i : 'x' + str(i))(np.arange(len(table.columns)-1)) ,baseIndex)
for i in range(100000):
if not __debug__:
print("roop {0}".foramt(i))
display(table)
nonBaseTable = table.loc[target,nonBaseIndex]
if ((nonBaseTable < -1e-8).values.sum()) == 0:
return table
# 新たな基底変数
nextIndex = (nonBaseTable.map(lambda x: -x)).idxmax(axis=1)
# 取り替えられる基底変数
idx = table.index.get_loc(target)
tmpLine = (table['Value'].iloc[:idx] / table.loc[ : ,nextIndex].iloc[:idx] )
prevIndex = str(tmpLine.map(lambda x: float('inf') if x < 0 else x ).idxmin())
nonBaseIndex[np.where(nonBaseIndex == nextIndex)] = prevIndex
table = table.rename(index={prevIndex : nextIndex})
table.loc[nextIndex] /= table.at[nextIndex,nextIndex]
pivotLine = table.loc[nextIndex]
unPivotIndex = list(table.index.drop(nextIndex))
table.loc[unPivotIndex] = table.loc[unPivotIndex].apply(lambda x: x - (x.at[nextIndex]*pivotLine) ,axis=1)
print("cannot find base solutions")
if not maximize:
c = (-c)
n,m = A.shape
slackVariableNum = 0
artificialVariableNum = 0
slackVariable = [0] * n
artificialVariable = [0] * n
for i in range(n):
# bの値を全て正の値にしておく
if b[i] < 0:
A[i] = -A[i]
comp[i] = -comp[i]
b[i] = -b[i]
# < ( -> スラック変数を導入 )
if comp[i] == -1:
slackVariableNum += 1
slackVariable[i] = 1
# = ( -> 人為変数を導入 )
elif comp[i] == 0:
artificialVariableNum += 1
artificialVariable[i] = 1
# > ( -> スラック変数,人為変数を導入 )
else:
slackVariableNum += 1
artificialVariableNum += 1
slackVariable[i] = -1
artificialVariable[i] = 1
variableNum = c.shape[0] + slackVariableNum + artificialVariableNum
addVariableNum = slackVariableNum + artificialVariableNum
# Valueを求める.
baseIndex = np.empty(n)
baseValue = np.empty(n)
A_ = np.append(A , np.zeros((n,addVariableNum)),axis=1)
slackIter = c.shape[0]
artificialIter = c.shape[0] + slackVariableNum
# (スラック変数 < 人為変数) の優先順位で基底変数に選ぶ.
# すると , i 本目の制約条件式のみに登場する変数を選ぶことができる.
# baseIndex[i] := i 本目の制約条件式のみに登場する変数の番号
# baseValue[i] := i本目の制約条件式のみに登場する変数の値 ( = Value = b[i] ) となる.
for i in range(n):
if slackVariable[i] != 0:
A_[i,slackIter] = slackVariable[i]
# 1の場合
if slackVariable[i] > 0:
baseIndex[i],baseValue[i] = slackIter, b[i]
slackIter += 1
if artificialVariable[i] != 0:
A_[i,artificialIter] = artificialVariable[i]
baseIndex[i],baseValue[i] = artificialIter, b[i]
artificialIter += 1
# フェーズ1 (Valueを見つける)
# 目的関数の値をzとおく
# Valueの列を追加
exA = np.append(baseValue.reshape(n,1),A_,axis=1)
# zの行を追加
c_ = np.array([0]*(c.shape[0] + slackVariableNum) + [-1]*(artificialVariableNum))
c_ = c_[np.vectorize(int)(baseIndex)]
w = (c_ @ exA).reshape(1,variableNum+1)
z = np.append(np.append(np.zeros(1),-c),np.array([0]*addVariableNum)).reshape(1,variableNum+1)
table = np.append(np.append(exA,w,axis=0),z,axis=0)
# データフレームにする
df = pd.DataFrame(table,
columns=['Value']+[ 'x' + str(i) for i in range(variableNum)],
index= list(np.vectorize(lambda i: 'x' + str(int(i)))(baseIndex)) + ['w','z']
)
table = optimize(df,'w')
if artificialVariableNum != 0:
table = table.iloc[:,:-artificialVariableNum]
variableNum -= artificialVariableNum
table = table.drop('w')
result = optimize(table,'z')
if not maximize:
result['Value']['z'] = -result['Value']['z']
return result
## Example
if __name__ == '__main__':
# maximize 2 * x_0 + 3 * x_1
# constraints :
# 1 * x_0 + 2 * x_1 <= 10
# 2 * x_0 + 1 * x_0 <= 8
# ( x_0 >= 0 , x_1 >= 0)
c = np.array([ 2,3])
A = np.array([ [1,2],
[2,1] ])
comp = np.array([-1,-1])
b = np.array([10,8])
# solve
df = linprog(c,A,comp,b,True)
# result
print(df)
| 34.99435 | 118 | 0.56474 |
import pandas as pd
import numpy as np
from IPython.display import display
def linprog(c,A,comp,b,maximize=True):
def optimize(table,target):
if not __debug__:
if target == 'w':
print("Phase 1 : find initial solution")
else:
if maximize:
print("Phase 2 : Maximize the liner objective function")
else:
print("Phase 2 : Minimize the liner objective function")
baseIndex = table.index.values
nonBaseIndex = np.setdiff1d(np.vectorize(lambda i : 'x' + str(i))(np.arange(len(table.columns)-1)) ,baseIndex)
for i in range(100000):
if not __debug__:
print("roop {0}".foramt(i))
display(table)
nonBaseTable = table.loc[target,nonBaseIndex]
if ((nonBaseTable < -1e-8).values.sum()) == 0:
return table
nextIndex = (nonBaseTable.map(lambda x: -x)).idxmax(axis=1)
idx = table.index.get_loc(target)
tmpLine = (table['Value'].iloc[:idx] / table.loc[ : ,nextIndex].iloc[:idx] )
prevIndex = str(tmpLine.map(lambda x: float('inf') if x < 0 else x ).idxmin())
nonBaseIndex[np.where(nonBaseIndex == nextIndex)] = prevIndex
table = table.rename(index={prevIndex : nextIndex})
table.loc[nextIndex] /= table.at[nextIndex,nextIndex]
pivotLine = table.loc[nextIndex]
unPivotIndex = list(table.index.drop(nextIndex))
table.loc[unPivotIndex] = table.loc[unPivotIndex].apply(lambda x: x - (x.at[nextIndex]*pivotLine) ,axis=1)
print("cannot find base solutions")
if not maximize:
c = (-c)
n,m = A.shape
slackVariableNum = 0
artificialVariableNum = 0
slackVariable = [0] * n
artificialVariable = [0] * n
for i in range(n):
if b[i] < 0:
A[i] = -A[i]
comp[i] = -comp[i]
b[i] = -b[i]
if comp[i] == -1:
slackVariableNum += 1
slackVariable[i] = 1
elif comp[i] == 0:
artificialVariableNum += 1
artificialVariable[i] = 1
else:
slackVariableNum += 1
artificialVariableNum += 1
slackVariable[i] = -1
artificialVariable[i] = 1
variableNum = c.shape[0] + slackVariableNum + artificialVariableNum
addVariableNum = slackVariableNum + artificialVariableNum
baseIndex = np.empty(n)
baseValue = np.empty(n)
A_ = np.append(A , np.zeros((n,addVariableNum)),axis=1)
slackIter = c.shape[0]
artificialIter = c.shape[0] + slackVariableNum
for i in range(n):
if slackVariable[i] != 0:
A_[i,slackIter] = slackVariable[i]
if slackVariable[i] > 0:
baseIndex[i],baseValue[i] = slackIter, b[i]
slackIter += 1
if artificialVariable[i] != 0:
A_[i,artificialIter] = artificialVariable[i]
baseIndex[i],baseValue[i] = artificialIter, b[i]
artificialIter += 1
exA = np.append(baseValue.reshape(n,1),A_,axis=1)
c_ = np.array([0]*(c.shape[0] + slackVariableNum) + [-1]*(artificialVariableNum))
c_ = c_[np.vectorize(int)(baseIndex)]
w = (c_ @ exA).reshape(1,variableNum+1)
z = np.append(np.append(np.zeros(1),-c),np.array([0]*addVariableNum)).reshape(1,variableNum+1)
table = np.append(np.append(exA,w,axis=0),z,axis=0)
df = pd.DataFrame(table,
columns=['Value']+[ 'x' + str(i) for i in range(variableNum)],
index= list(np.vectorize(lambda i: 'x' + str(int(i)))(baseIndex)) + ['w','z']
)
table = optimize(df,'w')
if artificialVariableNum != 0:
table = table.iloc[:,:-artificialVariableNum]
variableNum -= artificialVariableNum
table = table.drop('w')
result = optimize(table,'z')
if not maximize:
result['Value']['z'] = -result['Value']['z']
return result
e__ == '__main__':
c = np.array([ 2,3])
A = np.array([ [1,2],
[2,1] ])
comp = np.array([-1,-1])
b = np.array([10,8])
df = linprog(c,A,comp,b,True)
print(df)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.